language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | qos-ch__slf4j | slf4j-api/src/test/java/org/slf4j/basicTests/FluentAPIUsage.java | {
"start": 194,
"end": 633
} | class ____ {
@Test
public void smoke() {
String name = "smoke";
Logger logger = LoggerFactory.getLogger(name);
logger.atTrace().addKeyValue("a", "n").setCause(new Throwable()).log("hello");
}
@Test
public void smokxce() {
String name = "smoke";
Logger logger = LoggerFactory.getLogger(name);
assertFalse(logger.isEnabledForLevel(Level.DEBUG));
}
}
| FluentAPIUsage |
java | google__guice | extensions/assistedinject/src/com/google/inject/assistedinject/FactoryModuleBuilder.java | {
"start": 2664,
"end": 2808
} | interface ____ {
* Payment create(Date startDate, Money amount);
* Payment createWithoutDate(Money amount);
* }
*
* public | PaymentFactory |
java | google__guice | core/test/com/google/inject/util/TypesTest.java | {
"start": 1387,
"end": 7190
} | class ____ extends TestCase {
// generic types for comparison
Map<String, Integer> a;
Inner<Float, Double> b;
List<Set<String>[][]> c;
List<String> d;
Set<String> e;
Outer<String>.Inner f;
private ParameterizedType mapStringInteger;
private ParameterizedType innerFloatDouble;
private ParameterizedType listSetStringArray;
private ParameterizedType listString;
private ParameterizedType setString;
private ParameterizedType outerInner;
private GenericArrayType setStringArray;
@Override
protected void setUp() throws Exception {
super.setUp();
mapStringInteger = (ParameterizedType) getClass().getDeclaredField("a").getGenericType();
innerFloatDouble = (ParameterizedType) getClass().getDeclaredField("b").getGenericType();
listSetStringArray = (ParameterizedType) getClass().getDeclaredField("c").getGenericType();
listString = (ParameterizedType) getClass().getDeclaredField("d").getGenericType();
setString = (ParameterizedType) getClass().getDeclaredField("e").getGenericType();
outerInner = (ParameterizedType) getClass().getDeclaredField("f").getGenericType();
setStringArray = (GenericArrayType) listSetStringArray.getActualTypeArguments()[0];
}
public void testListSetMap() {
assertEqualsBothWays(mapStringInteger, Types.mapOf(String.class, Integer.class));
assertEqualsBothWays(listString, Types.listOf(String.class));
assertEqualsBothWays(setString, Types.setOf(String.class));
}
public void testDefensiveCopies() {
Type[] arguments = new Type[] {String.class, Integer.class};
ParameterizedType parameterizedType = Types.newParameterizedType(Map.class, arguments);
arguments[0] = null;
assertEquals(String.class, parameterizedType.getActualTypeArguments()[0]);
parameterizedType.getActualTypeArguments()[1] = null;
assertEquals(Integer.class, parameterizedType.getActualTypeArguments()[1]);
}
public void testTypeWithOwnerType() {
ParameterizedType actual =
Types.newParameterizedTypeWithOwner(
TypesTest.class, Inner.class, Float.class, Double.class);
assertEquals(TypesTest.class, actual.getOwnerType());
assertEqualsBothWays(innerFloatDouble, actual);
// The JDK prints this out as:
// com.google.inject.util.TypesTest.com.google.inject.util.TypesTest$Inner<java.lang.Float, java.lang.Double>
// and we think that's wrong, so the assertEquals comparison is worthless. :-(
// assertEquals(innerFloatDouble.toString(), actual.toString());
// We think the correct comparison is:
assertEquals(
"com.google.inject.util.TypesTest$Inner<java.lang.Float, java.lang.Double>",
actual.toString());
}
public void testTypeParametersMustNotBePrimitives() {
try {
Types.newParameterizedType(Map.class, String.class, int.class);
fail();
} catch (IllegalArgumentException expected) {
assertContains(
expected.getMessage(), "Primitive types are not allowed in type parameters: int");
}
}
public List<? extends CharSequence> wildcardExtends;
public List<? super CharSequence> wildcardSuper;
public List<?> wildcardObject;
public void testWildcardTypes() throws NoSuchFieldException, IOException {
assertEqualsBothWays(getWildcard("wildcardSuper"), supertypeOf(CharSequence.class));
assertEqualsBothWays(getWildcard("wildcardExtends"), subtypeOf(CharSequence.class));
assertEqualsBothWays(getWildcard("wildcardObject"), subtypeOf(Object.class));
assertEquals("? super java.lang.CharSequence", supertypeOf(CharSequence.class).toString());
assertEquals("? extends java.lang.CharSequence", subtypeOf(CharSequence.class).toString());
assertEquals("?", subtypeOf(Object.class).toString());
assertEqualWhenReserialized(supertypeOf(CharSequence.class));
assertEqualWhenReserialized(subtypeOf(CharSequence.class));
}
public void testWildcardBoundsMustNotBePrimitives() {
try {
supertypeOf(int.class);
fail();
} catch (IllegalArgumentException expected) {
assertContains(
expected.getMessage(), "Primitive types are not allowed in wildcard bounds: int");
}
try {
subtypeOf(int.class);
fail();
} catch (IllegalArgumentException expected) {
assertContains(
expected.getMessage(), "Primitive types are not allowed in wildcard bounds: int");
}
}
private WildcardType getWildcard(String fieldName) throws NoSuchFieldException {
ParameterizedType type = (ParameterizedType) getClass().getField(fieldName).getGenericType();
return (WildcardType) type.getActualTypeArguments()[0];
}
public void testEqualsAndHashcode() {
ParameterizedType parameterizedType =
Types.newParameterizedType(Map.class, String.class, Integer.class);
assertEqualsBothWays(mapStringInteger, parameterizedType);
assertEquals(mapStringInteger.toString(), parameterizedType.toString());
GenericArrayType genericArrayType =
Types.arrayOf(Types.arrayOf(Types.newParameterizedType(Set.class, String.class)));
assertEqualsBothWays(setStringArray, genericArrayType);
assertEquals(setStringArray.toString(), genericArrayType.toString());
}
public void testToString() {
Assert.assertEquals("java.lang.String", MoreTypes.typeToString(String.class));
assertEquals("java.util.Set<java.lang.String>[][]", MoreTypes.typeToString(setStringArray));
assertEquals(
"java.util.Map<java.lang.String, java.lang.Integer>",
MoreTypes.typeToString(mapStringInteger));
assertEquals(
"java.util.List<java.util.Set<java.lang.String>[][]>",
MoreTypes.typeToString(listSetStringArray));
assertEquals(innerFloatDouble.toString(), MoreTypes.typeToString(innerFloatDouble));
}
static | TypesTest |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/exc/TestExceptionsDuringWriting.java | {
"start": 796,
"end": 1009
} | class ____
{
/*
/**********************************************************
/* Helper classes
/**********************************************************
*/
static | TestExceptionsDuringWriting |
java | google__guice | core/src/com/google/inject/internal/Messages.java | {
"start": 5690,
"end": 7045
} | enum ____ for the error
* @param cause The exception that caused the error
* @param sources The binding sources for the source stack
* @param messageFormat Format string
* @param arguments format string arguments
*/
public static Message create(
ErrorId errorId,
Throwable cause,
List<Object> sources,
String messageFormat,
Object... arguments) {
String message = format(messageFormat, arguments);
return new Message(errorId, sources, message, cause);
}
/** Formats an object in a user friendly way. */
static Object convert(Object o) {
ElementSource source = null;
if (o instanceof ElementSource) {
source = (ElementSource) o;
o = source.getDeclaringSource();
}
return convert(o, source);
}
static Object convert(Object o, ElementSource source) {
for (Converter<?> converter : converters) {
if (converter.appliesTo(o)) {
return appendModules(converter.convert(o), source);
}
}
return appendModules(o, source);
}
private static Object appendModules(Object source, ElementSource elementSource) {
String modules = SourceFormatter.getModuleStack(elementSource);
if (modules.length() == 0) {
return source;
} else {
return source + " (installed by: " + modules + ")";
}
}
private abstract static | id |
java | alibaba__nacos | core/src/test/java/com/alibaba/nacos/core/remote/core/ServerReloaderRequestHandlerTest.java | {
"start": 1456,
"end": 2641
} | class ____ {
@InjectMocks
private ServerReloaderRequestHandler handler;
@Mock
private ConnectionManager connectionManager;
@Test
void testHandle() {
Mockito.when(connectionManager.currentClientsCount(Mockito.any())).thenReturn(2);
ServerReloadRequest reloadRequest = new ServerReloadRequest();
reloadRequest.setReloadCount(2);
reloadRequest.setReloadServer("test");
RequestMeta meta = new RequestMeta();
meta.setClientIp("1.1.1.1");
try {
ServerReloadResponse reloadResponse = handler.handle(reloadRequest, meta);
assertEquals("ignore", reloadResponse.getMessage());
} catch (NacosException e) {
e.printStackTrace();
fail(e.getMessage());
}
reloadRequest.setReloadCount(1);
try {
ServerReloadResponse reloadResponse = handler.handle(reloadRequest, meta);
assertEquals("ok", reloadResponse.getMessage());
} catch (NacosException e) {
e.printStackTrace();
fail(e.getMessage());
}
}
}
| ServerReloaderRequestHandlerTest |
java | micronaut-projects__micronaut-core | inject-java/src/main/java/io/micronaut/annotation/processing/visitor/JavaClassElement.java | {
"start": 3581,
"end": 4023
} | class ____ extends AbstractTypeAwareJavaElement implements ArrayableClassElement {
private static final String KOTLIN_METADATA = "kotlin.Metadata";
private static final String PREFIX_IS = "is";
protected final TypeElement classElement;
protected final int arrayDimensions;
@Nullable
protected String doc;
@Nullable
// Not null means raw type definition: "List myMethod()"
// Null value means a | JavaClassElement |
java | apache__camel | core/camel-core-processor/src/main/java/org/apache/camel/processor/transformer/ProcessorTransformer.java | {
"start": 1417,
"end": 4758
} | class ____ extends Transformer {
private static final Logger LOG = LoggerFactory.getLogger(ProcessorTransformer.class);
private Processor processor;
private String transformerString;
public ProcessorTransformer() {
}
public ProcessorTransformer(CamelContext context) {
setCamelContext(context);
}
/**
* Perform data transformation with specified from/to type using Processor.
*
* @param message message to apply transformation
* @param from 'from' data type
* @param to 'to' data type
*/
@Override
public void transform(Message message, DataType from, DataType to) throws Exception {
Exchange exchange = message.getExchange();
CamelContext context = exchange.getContext();
if (from.isJavaType()) {
Object input = message.getBody();
Class<?> fromClass = context.getClassResolver().resolveClass(from.getName());
if (!fromClass.isAssignableFrom(input.getClass())) {
LOG.debug("Converting to: {}", fromClass.getName());
input = context.getTypeConverter().mandatoryConvertTo(fromClass, input);
message.setBody(input);
}
}
LOG.debug("Sending to transform processor: {}", processor);
// must create a copy in this way
Exchange transformExchange = new DefaultExchange(exchange);
transformExchange.setIn(message);
transformExchange.getExchangeExtension().setProperties(exchange.getProperties());
processor.process(transformExchange);
Message answer = transformExchange.getMessage();
if (to.isJavaType()) {
Object answerBody = answer.getBody();
Class<?> toClass = context.getClassResolver().resolveClass(to.getName());
if (!toClass.isAssignableFrom(answerBody.getClass())) {
LOG.debug("Converting to: {}", toClass.getName());
answerBody = context.getTypeConverter().mandatoryConvertTo(toClass, answerBody);
answer.setBody(answerBody);
}
}
message.copyFrom(answer);
}
/**
* Set processor to use
*
* @param processor Processor
* @return this ProcessorTransformer instance
*/
public ProcessorTransformer setProcessor(Processor processor) {
this.processor = processor;
this.transformerString = null;
return this;
}
@Override
public String toString() {
if (transformerString == null) {
transformerString = String.format("ProcessorTransformer[name='%s', from='%s', to='%s', processor='%s']",
getName(), getFrom(), getTo(), processor);
}
return transformerString;
}
@Override
protected void doBuild() throws Exception {
ServiceHelper.buildService(processor);
}
@Override
protected void doInit() throws Exception {
ServiceHelper.initService(processor);
}
@Override
protected void doStart() throws Exception {
ObjectHelper.notNull(processor, "processor", this);
ServiceHelper.startService(this.processor);
}
@Override
protected void doStop() throws Exception {
ServiceHelper.stopService(this.processor);
}
}
| ProcessorTransformer |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/web/util/WebUtils.java | {
"start": 1969,
"end": 34412
} | class ____ {
/**
* Standard Servlet 2.3+ spec request attribute for include request URI.
* <p>If included via a {@code RequestDispatcher}, the current resource will see the
* originating request. Its own request URI is exposed as a request attribute.
*/
public static final String INCLUDE_REQUEST_URI_ATTRIBUTE = "jakarta.servlet.include.request_uri";
/**
* Standard Servlet 2.3+ spec request attribute for include context path.
* <p>If included via a {@code RequestDispatcher}, the current resource will see the
* originating context path. Its own context path is exposed as a request attribute.
*/
public static final String INCLUDE_CONTEXT_PATH_ATTRIBUTE = "jakarta.servlet.include.context_path";
/**
* Standard Servlet 2.3+ spec request attribute for include servlet path.
* <p>If included via a {@code RequestDispatcher}, the current resource will see the
* originating servlet path. Its own servlet path is exposed as a request attribute.
*/
public static final String INCLUDE_SERVLET_PATH_ATTRIBUTE = "jakarta.servlet.include.servlet_path";
/**
* Standard Servlet 2.3+ spec request attribute for include path info.
* <p>If included via a {@code RequestDispatcher}, the current resource will see the
* originating path info. Its own path info is exposed as a request attribute.
*/
public static final String INCLUDE_PATH_INFO_ATTRIBUTE = "jakarta.servlet.include.path_info";
/**
* Standard Servlet 2.3+ spec request attribute for include query string.
* <p>If included via a {@code RequestDispatcher}, the current resource will see the
* originating query string. Its own query string is exposed as a request attribute.
*/
public static final String INCLUDE_QUERY_STRING_ATTRIBUTE = "jakarta.servlet.include.query_string";
/**
* Standard Servlet 2.4+ spec request attribute for forward request URI.
* <p>If forwarded to via a RequestDispatcher, the current resource will see its
* own request URI. The originating request URI is exposed as a request attribute.
*/
public static final String FORWARD_REQUEST_URI_ATTRIBUTE = "jakarta.servlet.forward.request_uri";
/**
* Standard Servlet 2.4+ spec request attribute for forward context path.
* <p>If forwarded to via a RequestDispatcher, the current resource will see its
* own context path. The originating context path is exposed as a request attribute.
*/
public static final String FORWARD_CONTEXT_PATH_ATTRIBUTE = "jakarta.servlet.forward.context_path";
/**
* Standard Servlet 2.4+ spec request attribute for forward servlet path.
* <p>If forwarded to via a RequestDispatcher, the current resource will see its
* own servlet path. The originating servlet path is exposed as a request attribute.
*/
public static final String FORWARD_SERVLET_PATH_ATTRIBUTE = "jakarta.servlet.forward.servlet_path";
/**
* Standard Servlet 2.4+ spec request attribute for forward path info.
* <p>If forwarded to via a RequestDispatcher, the current resource will see its
* own path ingo. The originating path info is exposed as a request attribute.
*/
public static final String FORWARD_PATH_INFO_ATTRIBUTE = "jakarta.servlet.forward.path_info";
/**
* Standard Servlet 2.4+ spec request attribute for forward query string.
* <p>If forwarded to via a RequestDispatcher, the current resource will see its
* own query string. The originating query string is exposed as a request attribute.
*/
public static final String FORWARD_QUERY_STRING_ATTRIBUTE = "jakarta.servlet.forward.query_string";
/**
* Standard Servlet 2.3+ spec request attribute for error page status code.
* <p>To be exposed to JSPs that are marked as error pages, when forwarding
* to them directly rather than through the servlet container's error page
* resolution mechanism.
*/
public static final String ERROR_STATUS_CODE_ATTRIBUTE = "jakarta.servlet.error.status_code";
/**
* Standard Servlet 2.3+ spec request attribute for error page exception type.
* <p>To be exposed to JSPs that are marked as error pages, when forwarding
* to them directly rather than through the servlet container's error page
* resolution mechanism.
*/
public static final String ERROR_EXCEPTION_TYPE_ATTRIBUTE = "jakarta.servlet.error.exception_type";
/**
* Standard Servlet 2.3+ spec request attribute for error page message.
* <p>To be exposed to JSPs that are marked as error pages, when forwarding
* to them directly rather than through the servlet container's error page
* resolution mechanism.
*/
public static final String ERROR_MESSAGE_ATTRIBUTE = "jakarta.servlet.error.message";
/**
* Standard Servlet 2.3+ spec request attribute for error page exception.
* <p>To be exposed to JSPs that are marked as error pages, when forwarding
* to them directly rather than through the servlet container's error page
* resolution mechanism.
*/
public static final String ERROR_EXCEPTION_ATTRIBUTE = "jakarta.servlet.error.exception";
/**
* Standard Servlet 2.3+ spec request attribute for error page request URI.
* <p>To be exposed to JSPs that are marked as error pages, when forwarding
* to them directly rather than through the servlet container's error page
* resolution mechanism.
*/
public static final String ERROR_REQUEST_URI_ATTRIBUTE = "jakarta.servlet.error.request_uri";
/**
* Standard Servlet 2.3+ spec request attribute for error page servlet name.
* <p>To be exposed to JSPs that are marked as error pages, when forwarding
* to them directly rather than through the servlet container's error page
* resolution mechanism.
*/
public static final String ERROR_SERVLET_NAME_ATTRIBUTE = "jakarta.servlet.error.servlet_name";
/**
* Prefix of the charset clause in a content type String: ";charset=".
*/
public static final String CONTENT_TYPE_CHARSET_PREFIX = ";charset=";
/**
* Default character encoding to use when {@code request.getCharacterEncoding}
* returns {@code null}, according to the Servlet spec.
* @see ServletRequest#getCharacterEncoding
*/
public static final String DEFAULT_CHARACTER_ENCODING = "ISO-8859-1";
/**
* Standard Servlet spec context attribute that specifies a temporary
* directory for the current web application, of type {@code java.io.File}.
*/
public static final String TEMP_DIR_CONTEXT_ATTRIBUTE = "jakarta.servlet.context.tempdir";
/**
* HTML escape parameter at the servlet context level
* (i.e. a context-param in {@code web.xml}): "defaultHtmlEscape".
*/
public static final String HTML_ESCAPE_CONTEXT_PARAM = "defaultHtmlEscape";
/**
* Use of response encoding for HTML escaping parameter at the servlet context level
* (i.e. a context-param in {@code web.xml}): "responseEncodedHtmlEscape".
* @since 4.1.2
*/
public static final String RESPONSE_ENCODED_HTML_ESCAPE_CONTEXT_PARAM = "responseEncodedHtmlEscape";
/**
* Web app root key parameter at the servlet context level
* (i.e. a context-param in {@code web.xml}): "webAppRootKey".
*/
public static final String WEB_APP_ROOT_KEY_PARAM = "webAppRootKey";
/** Default web app root key: "webapp.root". */
public static final String DEFAULT_WEB_APP_ROOT_KEY = "webapp.root";
/** Name suffixes in case of image buttons. */
public static final String[] SUBMIT_IMAGE_SUFFIXES = {".x", ".y"};
/** Key for the mutex session attribute. */
public static final String SESSION_MUTEX_ATTRIBUTE = WebUtils.class.getName() + ".MUTEX";
/**
* Set a system property to the web application root directory.
* The key of the system property can be defined with the "webAppRootKey"
* context-param in {@code web.xml}. Default is "webapp.root".
* <p>Can be used for tools that support substitution with {@code System.getProperty}
* values, like log4j's "${key}" syntax within log file locations.
* @param servletContext the servlet context of the web application
* @throws IllegalStateException if the system property is already set,
* or if the WAR file is not expanded
* @see #WEB_APP_ROOT_KEY_PARAM
* @see #DEFAULT_WEB_APP_ROOT_KEY
* @see WebAppRootListener
*/
public static void setWebAppRootSystemProperty(ServletContext servletContext) throws IllegalStateException {
Assert.notNull(servletContext, "ServletContext must not be null");
String root = servletContext.getRealPath("/");
if (root == null) {
throw new IllegalStateException(
"Cannot set web app root system property when WAR file is not expanded");
}
String param = servletContext.getInitParameter(WEB_APP_ROOT_KEY_PARAM);
String key = (param != null ? param : DEFAULT_WEB_APP_ROOT_KEY);
String oldValue = System.getProperty(key);
if (oldValue != null && !StringUtils.pathEquals(oldValue, root)) {
throw new IllegalStateException("Web app root system property already set to different value: '" +
key + "' = [" + oldValue + "] instead of [" + root + "] - " +
"Choose unique values for the 'webAppRootKey' context-param in your web.xml files!");
}
System.setProperty(key, root);
servletContext.log("Set web app root system property: '" + key + "' = [" + root + "]");
}
/**
* Remove the system property that points to the web app root directory.
* To be called on shutdown of the web application.
* @param servletContext the servlet context of the web application
* @see #setWebAppRootSystemProperty
*/
public static void removeWebAppRootSystemProperty(ServletContext servletContext) {
Assert.notNull(servletContext, "ServletContext must not be null");
String param = servletContext.getInitParameter(WEB_APP_ROOT_KEY_PARAM);
String key = (param != null ? param : DEFAULT_WEB_APP_ROOT_KEY);
System.clearProperty(key);
}
/**
* Return whether default HTML escaping is enabled for the web application,
* i.e. the value of the "defaultHtmlEscape" context-param in {@code web.xml}
* (if any).
* <p>This method differentiates between no param specified at all and
* an actual boolean value specified, allowing to have a context-specific
* default in case of no setting at the global level.
* @param servletContext the servlet context of the web application
* @return whether default HTML escaping is enabled for the given application
* ({@code null} = no explicit default)
*/
public static @Nullable Boolean getDefaultHtmlEscape(@Nullable ServletContext servletContext) {
if (servletContext == null) {
return null;
}
String param = servletContext.getInitParameter(HTML_ESCAPE_CONTEXT_PARAM);
return (StringUtils.hasText(param) ? Boolean.valueOf(param) : null);
}
/**
* Return whether response encoding should be used when HTML escaping characters,
* thus only escaping XML markup significant characters with UTF-* encodings.
* This option is enabled for the web application with a ServletContext param,
* i.e. the value of the "responseEncodedHtmlEscape" context-param in {@code web.xml}
* (if any).
* <p>This method differentiates between no param specified at all and
* an actual boolean value specified, allowing to have a context-specific
* default in case of no setting at the global level.
* @param servletContext the servlet context of the web application
* @return whether response encoding is to be used for HTML escaping
* ({@code null} = no explicit default)
* @since 4.1.2
*/
public static @Nullable Boolean getResponseEncodedHtmlEscape(@Nullable ServletContext servletContext) {
if (servletContext == null) {
return null;
}
String param = servletContext.getInitParameter(RESPONSE_ENCODED_HTML_ESCAPE_CONTEXT_PARAM);
return (StringUtils.hasText(param) ? Boolean.valueOf(param) : null);
}
/**
* Return the temporary directory for the current web application,
* as provided by the servlet container.
* @param servletContext the servlet context of the web application
* @return the File representing the temporary directory
*/
public static File getTempDir(ServletContext servletContext) {
Assert.notNull(servletContext, "ServletContext must not be null");
return (File) servletContext.getAttribute(TEMP_DIR_CONTEXT_ATTRIBUTE);
}
/**
* Return the real path of the given path within the web application,
* as provided by the servlet container.
* <p>Prepends a slash if the path does not already start with a slash,
* and throws a FileNotFoundException if the path cannot be resolved to
* a resource (in contrast to ServletContext's {@code getRealPath},
* which returns null).
* @param servletContext the servlet context of the web application
* @param path the path within the web application
* @return the corresponding real path
* @throws FileNotFoundException if the path cannot be resolved to a resource
* @see jakarta.servlet.ServletContext#getRealPath
*/
public static String getRealPath(ServletContext servletContext, String path) throws FileNotFoundException {
Assert.notNull(servletContext, "ServletContext must not be null");
// Interpret location as relative to the web application root directory.
if (!path.startsWith("/")) {
path = "/" + path;
}
String realPath = servletContext.getRealPath(path);
if (realPath == null) {
throw new FileNotFoundException(
"ServletContext resource [" + path + "] cannot be resolved to absolute file path - " +
"web application archive not expanded?");
}
return realPath;
}
/**
* Determine the session id of the given request, if any.
* @param request current HTTP request
* @return the session id, or {@code null} if none
*/
public static @Nullable String getSessionId(HttpServletRequest request) {
Assert.notNull(request, "Request must not be null");
HttpSession session = request.getSession(false);
return (session != null ? session.getId() : null);
}
/**
* Check the given request for a session attribute of the given name.
* Returns null if there is no session or if the session has no such attribute.
* Does not create a new session if none has existed before!
* @param request current HTTP request
* @param name the name of the session attribute
* @return the value of the session attribute, or {@code null} if not found
*/
public static @Nullable Object getSessionAttribute(HttpServletRequest request, String name) {
Assert.notNull(request, "Request must not be null");
HttpSession session = request.getSession(false);
return (session != null ? session.getAttribute(name) : null);
}
/**
* Check the given request for a session attribute of the given name.
* Throws an exception if there is no session or if the session has no such
* attribute. Does not create a new session if none has existed before!
* @param request current HTTP request
* @param name the name of the session attribute
* @return the value of the session attribute, or {@code null} if not found
* @throws IllegalStateException if the session attribute could not be found
*/
public static Object getRequiredSessionAttribute(HttpServletRequest request, String name)
throws IllegalStateException {
Object attr = getSessionAttribute(request, name);
if (attr == null) {
throw new IllegalStateException("No session attribute '" + name + "' found");
}
return attr;
}
/**
* Set the session attribute with the given name to the given value.
* Removes the session attribute if value is null, if a session existed at all.
* Does not create a new session if not necessary!
* @param request current HTTP request
* @param name the name of the session attribute
* @param value the value of the session attribute
*/
public static void setSessionAttribute(HttpServletRequest request, String name, @Nullable Object value) {
Assert.notNull(request, "Request must not be null");
if (value != null) {
request.getSession().setAttribute(name, value);
}
else {
HttpSession session = request.getSession(false);
if (session != null) {
session.removeAttribute(name);
}
}
}
/**
* Return the best available mutex for the given session:
* that is, an object to synchronize on for the given session.
* <p>Returns the session mutex attribute if available; usually,
* this means that the HttpSessionMutexListener needs to be defined
* in {@code web.xml}. Falls back to the HttpSession itself
* if no mutex attribute found.
* <p>The session mutex is guaranteed to be the same object during
* the entire lifetime of the session, available under the key defined
* by the {@code SESSION_MUTEX_ATTRIBUTE} constant. It serves as a
* safe reference to synchronize on for locking on the current session.
* <p>In many cases, the HttpSession reference itself is a safe mutex
* as well, since it will always be the same object reference for the
* same active logical session. However, this is not guaranteed across
* different servlet containers; the only 100% safe way is a session mutex.
* @param session the HttpSession to find a mutex for
* @return the mutex object (never {@code null})
* @see #SESSION_MUTEX_ATTRIBUTE
* @see HttpSessionMutexListener
*/
public static Object getSessionMutex(HttpSession session) {
Assert.notNull(session, "Session must not be null");
Object mutex = session.getAttribute(SESSION_MUTEX_ATTRIBUTE);
if (mutex == null) {
mutex = session;
}
return mutex;
}
/**
* Return an appropriate request object of the specified type, if available,
* unwrapping the given request as far as necessary.
* @param request the servlet request to introspect
* @param requiredType the desired type of request object
* @return the matching request object, or {@code null} if none
* of that type is available
*/
@SuppressWarnings("unchecked")
public static <T> @Nullable T getNativeRequest(ServletRequest request, @Nullable Class<T> requiredType) {
if (requiredType != null) {
if (requiredType.isInstance(request)) {
return (T) request;
}
else if (request instanceof ServletRequestWrapper wrapper) {
return getNativeRequest(wrapper.getRequest(), requiredType);
}
}
return null;
}
/**
* Return an appropriate response object of the specified type, if available,
* unwrapping the given response as far as necessary.
* @param response the servlet response to introspect
* @param requiredType the desired type of response object
* @return the matching response object, or {@code null} if none
* of that type is available
*/
@SuppressWarnings("unchecked")
public static <T> @Nullable T getNativeResponse(ServletResponse response, @Nullable Class<T> requiredType) {
if (requiredType != null) {
if (requiredType.isInstance(response)) {
return (T) response;
}
else if (response instanceof ServletResponseWrapper wrapper) {
return getNativeResponse(wrapper.getResponse(), requiredType);
}
}
return null;
}
/**
* Determine whether the given request is an include request,
* that is, not a top-level HTTP request coming in from the outside.
* <p>Checks the presence of the "jakarta.servlet.include.request_uri"
* request attribute. Could check any request attribute that is only
* present in an include request.
* @param request current servlet request
* @return whether the given request is an include request
*/
public static boolean isIncludeRequest(ServletRequest request) {
return (request.getAttribute(INCLUDE_REQUEST_URI_ATTRIBUTE) != null);
}
/**
* Expose the Servlet spec's error attributes as {@link jakarta.servlet.http.HttpServletRequest}
* attributes under the keys defined in the Servlet 2.3 specification, for error pages that
* are rendered directly rather than through the Servlet container's error page resolution:
* {@code jakarta.servlet.error.status_code},
* {@code jakarta.servlet.error.exception_type},
* {@code jakarta.servlet.error.message},
* {@code jakarta.servlet.error.exception},
* {@code jakarta.servlet.error.request_uri},
* {@code jakarta.servlet.error.servlet_name}.
* <p>Does not override values if already present, to respect attribute values
* that have been exposed explicitly before.
* <p>Exposes status code 200 by default. Set the "jakarta.servlet.error.status_code"
* attribute explicitly (before or after) in order to expose a different status code.
* @param request current servlet request
* @param ex the exception encountered
* @param servletName the name of the offending servlet
*/
public static void exposeErrorRequestAttributes(HttpServletRequest request, Throwable ex,
@Nullable String servletName) {
exposeRequestAttributeIfNotPresent(request, ERROR_STATUS_CODE_ATTRIBUTE, HttpServletResponse.SC_OK);
exposeRequestAttributeIfNotPresent(request, ERROR_EXCEPTION_TYPE_ATTRIBUTE, ex.getClass());
exposeRequestAttributeIfNotPresent(request, ERROR_MESSAGE_ATTRIBUTE, ex.getMessage());
exposeRequestAttributeIfNotPresent(request, ERROR_EXCEPTION_ATTRIBUTE, ex);
exposeRequestAttributeIfNotPresent(request, ERROR_REQUEST_URI_ATTRIBUTE, request.getRequestURI());
if (servletName != null) {
exposeRequestAttributeIfNotPresent(request, ERROR_SERVLET_NAME_ATTRIBUTE, servletName);
}
}
/**
* Expose the specified request attribute if not already present.
* @param request current servlet request
* @param name the name of the attribute
* @param value the suggested value of the attribute
*/
private static void exposeRequestAttributeIfNotPresent(ServletRequest request, String name, @Nullable Object value) {
if (request.getAttribute(name) == null) {
request.setAttribute(name, value);
}
}
/**
* Clear the Servlet spec's error attributes as {@link jakarta.servlet.http.HttpServletRequest}
* attributes under the keys defined in the Servlet 2.3 specification:
* {@code jakarta.servlet.error.status_code},
* {@code jakarta.servlet.error.exception_type},
* {@code jakarta.servlet.error.message},
* {@code jakarta.servlet.error.exception},
* {@code jakarta.servlet.error.request_uri},
* {@code jakarta.servlet.error.servlet_name}.
* @param request current servlet request
*/
public static void clearErrorRequestAttributes(HttpServletRequest request) {
request.removeAttribute(ERROR_STATUS_CODE_ATTRIBUTE);
request.removeAttribute(ERROR_EXCEPTION_TYPE_ATTRIBUTE);
request.removeAttribute(ERROR_MESSAGE_ATTRIBUTE);
request.removeAttribute(ERROR_EXCEPTION_ATTRIBUTE);
request.removeAttribute(ERROR_REQUEST_URI_ATTRIBUTE);
request.removeAttribute(ERROR_SERVLET_NAME_ATTRIBUTE);
}
/**
* Retrieve the first cookie with the given name. Note that multiple
* cookies can have the same name but different paths or domains.
* @param request current servlet request
* @param name cookie name
* @return the first cookie with the given name, or {@code null} if none is found
*/
public static @Nullable Cookie getCookie(HttpServletRequest request, String name) {
Assert.notNull(request, "Request must not be null");
Cookie[] cookies = request.getCookies();
if (cookies != null) {
for (Cookie cookie : cookies) {
if (name.equals(cookie.getName())) {
return cookie;
}
}
}
return null;
}
/**
* Check if a specific input type="submit" parameter was sent in the request,
* either via a button (directly with name) or via an image (name + ".x" or
* name + ".y").
* @param request current HTTP request
* @param name the name of the parameter
* @return if the parameter was sent
* @see #SUBMIT_IMAGE_SUFFIXES
*/
public static boolean hasSubmitParameter(ServletRequest request, String name) {
Assert.notNull(request, "Request must not be null");
if (request.getParameter(name) != null) {
return true;
}
for (String suffix : SUBMIT_IMAGE_SUFFIXES) {
if (request.getParameter(name + suffix) != null) {
return true;
}
}
return false;
}
/**
* Obtain a named parameter from the given request parameters.
* <p>See {@link #findParameterValue(java.util.Map, String)}
* for a description of the lookup algorithm.
* @param request current HTTP request
* @param name the <i>logical</i> name of the request parameter
* @return the value of the parameter, or {@code null}
* if the parameter does not exist in given request
*/
public static @Nullable String findParameterValue(ServletRequest request, String name) {
return findParameterValue(request.getParameterMap(), name);
}
/**
* Obtain a named parameter from the given request parameters.
* <p>This method will try to obtain a parameter value using the
* following algorithm:
* <ol>
* <li>Try to get the parameter value using just the given <i>logical</i> name.
* This handles parameters of the form {@code logicalName = value}. For normal
* parameters, for example, submitted using a hidden HTML form field, this will return
* the requested value.</li>
* <li>Try to obtain the parameter value from the parameter name, where the
* parameter name in the request is of the form {@code logicalName_value = xyz}
* with "_" being the configured delimiter. This deals with parameter values
* submitted using an HTML form submit button.</li>
* <li>If the value obtained in the previous step has a ".x" or ".y" suffix,
* remove that. This handles cases where the value was submitted using an
* HTML form image button. In this case the parameter in the request would
* actually be of the form {@code logicalName_value.x = 123}.</li>
* </ol>
* @param parameters the available parameter map
* @param name the <i>logical</i> name of the request parameter
* @return the value of the parameter, or {@code null}
* if the parameter does not exist in given request
*/
public static @Nullable String findParameterValue(Map<String, ?> parameters, String name) {
// First try to get it as a normal name=value parameter
Object value = parameters.get(name);
if (value instanceof String[] values) {
return (values.length > 0 ? values[0] : null);
}
else if (value != null) {
return value.toString();
}
// If no value yet, try to get it as a name_value=xyz parameter
String prefix = name + "_";
for (String paramName : parameters.keySet()) {
if (paramName.startsWith(prefix)) {
// Support images buttons, which would submit parameters as name_value.x=123
for (String suffix : SUBMIT_IMAGE_SUFFIXES) {
if (paramName.endsWith(suffix)) {
return paramName.substring(prefix.length(), paramName.length() - suffix.length());
}
}
return paramName.substring(prefix.length());
}
}
// We couldn't find the parameter value...
return null;
}
/**
* Return a map containing all parameters with the given prefix.
* Maps single values to String and multiple values to String array.
* <p>For example, with a prefix of "spring_", "spring_param1" and
* "spring_param2" result in a Map with "param1" and "param2" as keys.
* @param request the HTTP request in which to look for parameters
* @param prefix the beginning of parameter names
* (if this is null or the empty string, all parameters will match)
* @return map containing request parameters <b>without the prefix</b>,
* containing either a String or a String array as values
* @see jakarta.servlet.ServletRequest#getParameterNames
* @see jakarta.servlet.ServletRequest#getParameterValues
* @see jakarta.servlet.ServletRequest#getParameterMap
*/
public static Map<String, Object> getParametersStartingWith(ServletRequest request, @Nullable String prefix) {
Assert.notNull(request, "Request must not be null");
Enumeration<String> paramNames = request.getParameterNames();
Map<String, Object> params = new TreeMap<>();
if (prefix == null) {
prefix = "";
}
while (paramNames != null && paramNames.hasMoreElements()) {
String paramName = paramNames.nextElement();
if (prefix.isEmpty() || paramName.startsWith(prefix)) {
String unprefixed = paramName.substring(prefix.length());
String[] values = request.getParameterValues(paramName);
if (values == null || values.length == 0) {
// Do nothing, no values found at all.
}
else if (values.length > 1) {
params.put(unprefixed, values);
}
else {
params.put(unprefixed, values[0]);
}
}
}
return params;
}
/**
* Parse the given string with matrix variables. An example string would look
* like this {@code "q1=a;q1=b;q2=a,b,c"}. The resulting map would contain
* keys {@code "q1"} and {@code "q2"} with values {@code ["a","b"]} and
* {@code ["a","b","c"]} respectively.
* @param matrixVariables the unparsed matrix variables string
* @return a map with matrix variable names and values (never {@code null})
* @since 3.2
*/
public static MultiValueMap<String, String> parseMatrixVariables(String matrixVariables) {
MultiValueMap<String, String> result = new LinkedMultiValueMap<>();
if (!StringUtils.hasText(matrixVariables)) {
return result;
}
StringTokenizer pairs = new StringTokenizer(matrixVariables, ";");
while (pairs.hasMoreTokens()) {
String pair = pairs.nextToken();
int index = pair.indexOf('=');
if (index != -1) {
String name = pair.substring(0, index);
if (name.equalsIgnoreCase("jsessionid")) {
continue;
}
String rawValue = pair.substring(index + 1);
for (String value : StringUtils.commaDelimitedListToStringArray(rawValue)) {
result.add(name, value);
}
}
else {
result.add(pair, "");
}
}
return result;
}
/**
* Check the given request origin against a list of allowed origins.
* A list containing "*" means that all origins are allowed.
* An empty list means only same origin is allowed.
*
* <p><strong>Note:</strong> as of 5.1 this method ignores
* {@code "Forwarded"} and {@code "X-Forwarded-*"} headers that specify the
* client-originated address. Consider using the {@code ForwardedHeaderFilter}
* to extract and use, or to discard such headers.
* @return {@code true} if the request origin is valid, {@code false} otherwise
* @since 4.1.5
* @see <a href="https://tools.ietf.org/html/rfc6454">RFC 6454: The Web Origin Concept</a>
*/
public static boolean isValidOrigin(HttpRequest request, Collection<String> allowedOrigins) {
Assert.notNull(request, "Request must not be null");
Assert.notNull(allowedOrigins, "Allowed origins must not be null");
String origin = request.getHeaders().getOrigin();
if (origin == null || allowedOrigins.contains("*")) {
return true;
}
else if (CollectionUtils.isEmpty(allowedOrigins)) {
return isSameOrigin(request);
}
else {
return allowedOrigins.contains(origin);
}
}
/**
* Check if the request is a same-origin one, based on {@code Origin}, {@code Host},
* {@code Forwarded}, {@code X-Forwarded-Proto}, {@code X-Forwarded-Host} and
* {@code X-Forwarded-Port} headers.
*
* <p><strong>Note:</strong> as of 5.1 this method ignores
* {@code "Forwarded"} and {@code "X-Forwarded-*"} headers that specify the
* client-originated address. Consider using the {@code ForwardedHeaderFilter}
* to extract and use, or to discard such headers.
* @return {@code true} if the request is a same-origin one, {@code false} in case
* of cross-origin request
* @since 4.2
*/
public static boolean isSameOrigin(HttpRequest request) {
HttpHeaders headers = request.getHeaders();
String origin = headers.getOrigin();
if (origin == null) {
return true;
}
String scheme;
String host;
int port;
if (request instanceof ServletServerHttpRequest servletServerHttpRequest) {
// Build more efficiently if we can: we only need scheme, host, port for origin comparison
HttpServletRequest servletRequest = servletServerHttpRequest.getServletRequest();
scheme = servletRequest.getScheme();
host = servletRequest.getServerName();
port = servletRequest.getServerPort();
}
else {
URI uri = request.getURI();
scheme = uri.getScheme();
host = uri.getHost();
port = uri.getPort();
}
UriComponents originUrl = UriComponentsBuilder.fromUriString(origin).build();
return (ObjectUtils.nullSafeEquals(scheme, originUrl.getScheme()) &&
ObjectUtils.nullSafeEquals(host, originUrl.getHost()) &&
getPort(scheme, port) == getPort(originUrl.getScheme(), originUrl.getPort()));
}
private static int getPort(@Nullable String scheme, int port) {
if (port == -1) {
if ("http".equals(scheme) || "ws".equals(scheme)) {
port = 80;
}
else if ("https".equals(scheme) || "wss".equals(scheme)) {
port = 443;
}
}
return port;
}
}
| WebUtils |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/resource/basic/resource/ScanSubresource.java | {
"start": 167,
"end": 315
} | class ____ {
@Path("doit")
@GET
@Produces("text/plain")
public String get() {
return "subresource-doit";
}
}
| ScanSubresource |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/fetchprofiles/join/Course.java | {
"start": 720,
"end": 1656
} | class ____ {
private Department department;
private int number;
public Code() {
}
public Code(Department department, int number) {
this.department = department;
this.number = number;
}
public Department getDepartment() {
return department;
}
public void setDepartment(Department department) {
this.department = department;
}
public int getNumber() {
return number;
}
public void setNumber(int number) {
this.number = number;
}
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( !( o instanceof Code ) ) {
return false;
}
Code code = ( Code ) o;
if ( number != code.number ) {
return false;
}
if ( !department.equals( code.department ) ) {
return false;
}
return true;
}
public int hashCode() {
int result;
result = department.hashCode();
result = 31 * result + number;
return result;
}
}
}
| Code |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/query/sql/internal/NativeSelectQueryPlanImpl.java | {
"start": 1561,
"end": 6728
} | class ____<R> implements NativeSelectQueryPlan<R> {
private final String sql;
private final Set<String> affectedTableNames;
private final List<ParameterOccurrence> parameterList;
private final JdbcValuesMappingProducer resultSetMapping;
public NativeSelectQueryPlanImpl(
String sql,
Set<String> affectedTableNames,
List<ParameterOccurrence> parameterList,
ResultSetMapping resultSetMapping,
SessionFactoryImplementor sessionFactory) {
final ResultSetMappingProcessor processor = new ResultSetMappingProcessor( resultSetMapping, sessionFactory );
final SQLQueryParser parser = new SQLQueryParser( sql, processor.process(), sessionFactory );
this.sql = parser.process();
this.parameterList = parameterList;
this.resultSetMapping = processor.generateResultMapping( parser.queryHasAliases() );
if ( affectedTableNames == null ) {
affectedTableNames = new HashSet<>();
}
if ( resultSetMapping != null ) {
resultSetMapping.addAffectedTableNames( affectedTableNames, sessionFactory );
}
this.affectedTableNames = affectedTableNames;
}
@Override
public <T> T executeQuery(DomainQueryExecutionContext executionContext, ResultsConsumer<T, R> resultsConsumer) {
final List<JdbcParameterBinder> jdbcParameterBinders;
final JdbcParameterBindings jdbcParameterBindings;
final QueryParameterBindings queryParameterBindings = executionContext.getQueryParameterBindings();
if ( parameterList == null || parameterList.isEmpty() ) {
jdbcParameterBinders = emptyList();
jdbcParameterBindings = JdbcParameterBindings.NO_BINDINGS;
}
else {
jdbcParameterBinders = new ArrayList<>( parameterList.size() );
jdbcParameterBindings = new JdbcParameterBindingsImpl(
queryParameterBindings,
parameterList,
jdbcParameterBinders,
executionContext.getSession().getFactory()
);
}
final JdbcOperationQuerySelect jdbcSelect = new JdbcOperationQuerySelect(
sql,
jdbcParameterBinders,
resultSetMapping,
affectedTableNames
);
return executionContext.getSession().getJdbcServices().getJdbcSelectExecutor().executeQuery(
jdbcSelect,
jdbcParameterBindings,
SqmJdbcExecutionContextAdapter.usingLockingAndPaging( executionContext ),
null,
null,
-1,
resultsConsumer
);
}
@Override
public List<R> performList(DomainQueryExecutionContext executionContext) {
final QueryOptions queryOptions = executionContext.getQueryOptions();
if ( queryOptions.getEffectiveLimit().getMaxRowsJpa() == 0 ) {
return emptyList();
}
else {
final List<JdbcParameterBinder> jdbcParameterBinders;
final JdbcParameterBindings jdbcParameterBindings;
if ( parameterList == null || parameterList.isEmpty() ) {
jdbcParameterBinders = emptyList();
jdbcParameterBindings = JdbcParameterBindings.NO_BINDINGS;
}
else {
jdbcParameterBinders = new ArrayList<>( parameterList.size() );
jdbcParameterBindings = new JdbcParameterBindingsImpl(
executionContext.getQueryParameterBindings(),
parameterList,
jdbcParameterBinders,
executionContext.getSession().getFactory()
);
}
final JdbcOperationQuerySelect jdbcSelect = new JdbcOperationQuerySelect(
sql,
jdbcParameterBinders,
resultSetMapping,
affectedTableNames
);
executionContext.getSession().autoFlushIfRequired( jdbcSelect.getAffectedTableNames() );
return executionContext.getSession().getJdbcServices().getJdbcSelectExecutor().list(
jdbcSelect,
jdbcParameterBindings,
SqmJdbcExecutionContextAdapter.usingLockingAndPaging( executionContext ),
null,
queryOptions.getUniqueSemantic() == null
? ListResultsConsumer.UniqueSemantic.NEVER
: queryOptions.getUniqueSemantic()
);
}
}
@Override
public ScrollableResultsImplementor<R> performScroll(ScrollMode scrollMode, DomainQueryExecutionContext executionContext) {
if ( executionContext.getQueryOptions().getEffectiveLimit().getMaxRowsJpa() == 0 ) {
return EmptyScrollableResults.instance();
}
else {
final List<JdbcParameterBinder> jdbcParameterBinders;
final JdbcParameterBindings jdbcParameterBindings;
if ( parameterList == null || parameterList.isEmpty() ) {
jdbcParameterBinders = emptyList();
jdbcParameterBindings = JdbcParameterBindings.NO_BINDINGS;
}
else {
jdbcParameterBinders = new ArrayList<>( parameterList.size() );
jdbcParameterBindings = new JdbcParameterBindingsImpl(
executionContext.getQueryParameterBindings(),
parameterList,
jdbcParameterBinders,
executionContext.getSession().getFactory()
);
}
final JdbcOperationQuerySelect jdbcSelect = new JdbcOperationQuerySelect(
sql,
jdbcParameterBinders,
resultSetMapping,
affectedTableNames
);
executionContext.getSession().autoFlushIfRequired( jdbcSelect.getAffectedTableNames() );
return executionContext.getSession().getJdbcServices().getJdbcSelectExecutor().scroll(
jdbcSelect,
scrollMode,
jdbcParameterBindings,
SqmJdbcExecutionContextAdapter.usingLockingAndPaging( executionContext ),
null,
-1
);
}
}
}
| NativeSelectQueryPlanImpl |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/sharedfk/AbstractChild.java | {
"start": 527,
"end": 608
} | class ____ {
@Id
@GeneratedValue
@Column(name = "ID")
Integer id;
}
| AbstractChild |
java | google__dagger | javatests/dagger/internal/codegen/IgnoreProvisionKeyWildcardsTest.java | {
"start": 15062,
"end": 15715
} | interface ____ {",
" fun set(): Set<Foo<Bar>>",
"}",
"@Module",
"object MyModule {",
" @Provides @IntoSet fun set1(): Foo<Bar> = TODO()",
" @Provides @IntoSet fun set2(): Foo<Bar> = TODO()",
" @Provides @ElementsIntoSet fun set3(): Set<Foo<Bar>> = TODO()",
"}"),
subject -> subject.hasErrorCount(0));
}
@Test
public void testProvidesMultibindsMapDeclarationValuesWithDifferentTypeVariances() {
compile(
/* javaComponentClass = */
NEW_LINES.join(
"@Component(modules = MyModule.class)",
" | MyComponent |
java | mapstruct__mapstruct | core/src/main/java/org/mapstruct/BeanMapping.java | {
"start": 1610,
"end": 1936
} | class ____ implements FruitMapper {
* @Override
* public Fruit toFruit(FruitDto fruitDto) {
* Apple fruit = fruitFactory.createApple();
* // ...
* }
* }
* </code></pre>
*
* @author Sjaak Derksen
*/
@Target(ElementType.METHOD)
@Retention(RetentionPolicy.CLASS)
public @ | FruitMapperImpl |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/deletion/task/DeletionTask.java | {
"start": 1483,
"end": 8532
} | class ____ implements Runnable {
static final Logger LOG =
LoggerFactory.getLogger(DeletionTask.class);
public static final int INVALID_TASK_ID = -1;
private int taskId;
private String user;
private DeletionTaskType deletionTaskType;
private DeletionService deletionService;
private final AtomicInteger numberOfPendingPredecessorTasks;
private final Set<DeletionTask> successorTaskSet;
// By default all tasks will start as success=true; however if any of
// the dependent task fails then it will be marked as false in
// deletionTaskFinished().
private boolean success;
/**
* Deletion task with taskId and default values.
*
* @param taskId the ID of the task, if previously set.
* @param deletionService the {@link DeletionService}.
* @param user the user associated with the delete.
* @param deletionTaskType the {@link DeletionTaskType}.
*/
public DeletionTask(int taskId, DeletionService deletionService, String user,
DeletionTaskType deletionTaskType) {
this(taskId, deletionService, user, new AtomicInteger(0),
new HashSet<DeletionTask>(), deletionTaskType);
}
/**
* Deletion task with taskId and user supplied values.
*
* @param taskId the ID of the task, if previously set.
* @param deletionService the {@link DeletionService}.
* @param user the user associated with the delete.
* @param numberOfPendingPredecessorTasks Number of pending tasks.
* @param successorTaskSet the list of successor DeletionTasks
* @param deletionTaskType the {@link DeletionTaskType}.
*/
public DeletionTask(int taskId, DeletionService deletionService, String user,
AtomicInteger numberOfPendingPredecessorTasks,
Set<DeletionTask> successorTaskSet, DeletionTaskType deletionTaskType) {
this.taskId = taskId;
this.deletionService = deletionService;
this.user = user;
this.numberOfPendingPredecessorTasks = numberOfPendingPredecessorTasks;
this.successorTaskSet = successorTaskSet;
this.deletionTaskType = deletionTaskType;
success = true;
}
/**
* Get the taskId for the DeletionTask.
*
* @return the taskId.
*/
public int getTaskId() {
return taskId;
}
/**
* Set the taskId for the DeletionTask.
*
* @param taskId the taskId.
*/
public void setTaskId(int taskId) {
this.taskId = taskId;
}
/**
* The the user assoicated with the DeletionTask.
*
* @return the user name.
*/
public String getUser() {
return user;
}
/**
* Get the {@link DeletionService} for this DeletionTask.
*
* @return the {@link DeletionService}.
*/
public DeletionService getDeletionService() {
return deletionService;
}
/**
* Get the {@link DeletionTaskType} for this DeletionTask.
*
* @return the {@link DeletionTaskType}.
*/
public DeletionTaskType getDeletionTaskType() {
return deletionTaskType;
}
/**
* Set the DeletionTask run status.
*
* @param success the status of the running DeletionTask.
*/
public synchronized void setSuccess(boolean success) {
this.success = success;
}
/**
* Return the DeletionTask run status.
*
* @return the status of the running DeletionTask.
*/
public synchronized boolean getSucess() {
return this.success;
}
/**
* Return the list of successor tasks for the DeletionTask.
*
* @return the list of successor tasks.
*/
public synchronized DeletionTask[] getSuccessorTasks() {
DeletionTask[] successors = new DeletionTask[successorTaskSet.size()];
return successorTaskSet.toArray(successors);
}
/**
* Convert the DeletionTask to the Protobuf representation for storing in the
* state store and recovery.
*
* @return the protobuf representation of the DeletionTask.
*/
public abstract DeletionServiceDeleteTaskProto convertDeletionTaskToProto();
/**
* Add a dependent DeletionTask.
*
* If there is a task dependency between say tasks 1,2,3 such that
* task2 and task3 can be started only after task1 then we should define
* task2 and task3 as successor tasks for task1.
* Note:- Task dependency should be defined prior to calling delete.
*
* @param successorTask the DeletionTask the depends on this DeletionTask.
*/
public synchronized void addDeletionTaskDependency(
DeletionTask successorTask) {
if (successorTaskSet.add(successorTask)) {
successorTask.incrementAndGetPendingPredecessorTasks();
}
}
/**
* Increments and returns pending predecessor task count.
*
* @return the number of pending predecessor DeletionTasks.
*/
public int incrementAndGetPendingPredecessorTasks() {
return numberOfPendingPredecessorTasks.incrementAndGet();
}
/**
* Decrements and returns pending predecessor task count.
*
* @return the number of pending predecessor DeletionTasks.
*/
public int decrementAndGetPendingPredecessorTasks() {
return numberOfPendingPredecessorTasks.decrementAndGet();
}
/**
* Removes the DeletionTask from the state store and validates that successor
* tasks have been scheduled and completed.
*
* This is called when:
* 1) Current deletion task ran and finished.
* 2) When directly called by predecessor task if one of the
* dependent tasks of it has failed marking its success = false.
*/
synchronized void deletionTaskFinished() {
try {
NMStateStoreService stateStore = deletionService.getStateStore();
stateStore.removeDeletionTask(taskId);
} catch (IOException e) {
LOG.error("Unable to remove deletion task " + taskId
+ " from state store", e);
}
Iterator<DeletionTask> successorTaskI = this.successorTaskSet.iterator();
while (successorTaskI.hasNext()) {
DeletionTask successorTask = successorTaskI.next();
if (!success) {
successorTask.setSuccess(success);
}
int count = successorTask.decrementAndGetPendingPredecessorTasks();
if (count == 0) {
if (successorTask.getSucess()) {
successorTask.deletionService.delete(successorTask);
} else {
successorTask.deletionTaskFinished();
}
}
}
}
/**
* Return the Protobuf builder with the base DeletionTask attributes.
*
* @return pre-populated Buidler with the base attributes.
*/
DeletionServiceDeleteTaskProto.Builder getBaseDeletionTaskProtoBuilder() {
DeletionServiceDeleteTaskProto.Builder builder =
DeletionServiceDeleteTaskProto.newBuilder();
builder.setId(getTaskId());
if (getUser() != null) {
builder.setUser(getUser());
}
builder.setDeletionTime(System.currentTimeMillis() +
TimeUnit.MILLISECONDS.convert(getDeletionService().getDebugDelay(),
TimeUnit.SECONDS));
for (DeletionTask successor : getSuccessorTasks()) {
builder.addSuccessorIds(successor.getTaskId());
}
return builder;
}
}
| DeletionTask |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/tools/picocli/CommandLine.java | {
"start": 267702,
"end": 270398
} | enum ____ implements IStyle {
reset(0, 0),
bold(1, 21),
faint(2, 22),
italic(3, 23),
underline(4, 24),
blink(5, 25),
reverse(7, 27),
fg_black(30, 39),
fg_red(31, 39),
fg_green(32, 39),
fg_yellow(33, 39),
fg_blue(34, 39),
fg_magenta(35, 39),
fg_cyan(36, 39),
fg_white(37, 39),
bg_black(40, 49),
bg_red(41, 49),
bg_green(42, 49),
bg_yellow(43, 49),
bg_blue(44, 49),
bg_magenta(45, 49),
bg_cyan(46, 49),
bg_white(47, 49),
;
private final int startCode;
private final int endCode;
Style(final int startCode, final int endCode) {
this.startCode = startCode;
this.endCode = endCode;
}
@Override
public String on() {
return CSI + startCode + "m";
}
@Override
public String off() {
return CSI + endCode + "m";
}
/** Returns the concatenated ANSI escape codes for turning all specified styles on.
* @param styles the styles to generate ANSI escape codes for
* @return the concatenated ANSI escape codes for turning all specified styles on */
public static String on(final IStyle... styles) {
final StringBuilder result = new StringBuilder();
for (final IStyle style : styles) {
result.append(style.on());
}
return result.toString();
}
/** Returns the concatenated ANSI escape codes for turning all specified styles off.
* @param styles the styles to generate ANSI escape codes for
* @return the concatenated ANSI escape codes for turning all specified styles off */
public static String off(final IStyle... styles) {
final StringBuilder result = new StringBuilder();
for (final IStyle style : styles) {
result.append(style.off());
}
return result.toString();
}
/** Parses the specified style markup and returns the associated style.
* The markup may be one of the Style | Style |
java | spring-projects__spring-framework | spring-messaging/src/main/java/org/springframework/messaging/converter/AbstractMessageConverter.java | {
"start": 9422,
"end": 11026
} | class ____ the conversion
* @param conversionHint an extra object passed to the {@link MessageConverter},
* for example, the associated {@code MethodParameter} (may be {@code null})
* @return the result of the conversion, or {@code null} if the converter cannot
* perform the conversion
* @since 4.2
*/
protected @Nullable Object convertFromInternal(
Message<?> message, Class<?> targetClass, @Nullable Object conversionHint) {
return null;
}
/**
* Convert the payload object to serialized form.
* @param payload the Object to convert
* @param headers optional headers for the message (may be {@code null})
* @param conversionHint an extra object passed to the {@link MessageConverter},
* for example, the associated {@code MethodParameter} (may be {@code null})
* @return the resulting payload for the message, or {@code null} if the converter
* cannot perform the conversion
* @since 4.2
*/
protected @Nullable Object convertToInternal(
Object payload, @Nullable MessageHeaders headers, @Nullable Object conversionHint) {
return null;
}
static Type getResolvedType(Class<?> targetClass, @Nullable Object conversionHint) {
if (conversionHint instanceof MethodParameter param) {
param = param.nestedIfOptional();
if (Message.class.isAssignableFrom(param.getParameterType())) {
param = param.nested();
}
Type genericParameterType = param.getNestedGenericParameterType();
Class<?> contextClass = param.getContainingClass();
return GenericTypeResolver.resolveType(genericParameterType, contextClass);
}
return targetClass;
}
}
| for |
java | elastic__elasticsearch | libs/exponential-histogram/src/main/java/org/elasticsearch/exponentialhistogram/ExponentialScaleUtils.java | {
"start": 1573,
"end": 12952
} | class ____ {
private static final double LN_2 = Math.log(2);
/**
* This table is visible for testing to ensure it is up-to-date.
* <br>
* For each scale from {@link ExponentialHistogram#MIN_SCALE} to {@link ExponentialHistogram#MAX_SCALE},
* the table contains a pre-computed constant for up-scaling bucket indices.
* The constant is computed using the following formula:
* {@code 2^63 * (1 + 2^scale * (1 - log2(1 + 2^(2^-scale))))}
*/
static final long[] SCALE_UP_CONSTANT_TABLE = new long[] {
4503599627370495L,
9007199254740991L,
18014398509481983L,
36028797018963967L,
72057594037927935L,
144115188075855871L,
288230376054894118L,
576448062320457790L,
1146436840887505800L,
2104167428150631728L,
3127054724296373505L,
3828045265094622256L,
4214097751025163417L,
4412149414858430624L,
4511824212543271281L,
4561743405547877994L,
4586713247558758689L,
4599199449917992829L,
4605442711287634239L,
4608564361996858084L,
4610125189854540715L,
4610905604096266504L,
4611295811256239977L,
4611490914841115537L,
4611588466634164420L,
4611637242530765249L,
4611661630479075212L,
4611673824453231387L,
4611679921440309624L,
4611682969933848761L,
4611684494180618332L,
4611685256304003118L,
4611685637365695511L,
4611685827896541707L,
4611685923161964805L,
4611685970794676354L,
4611685994611032129L,
4611686006519210016L,
4611686012473298960L,
4611686015450343432L,
4611686016938865668L,
4611686017683126786L,
4611686018055257345L,
4611686018241322624L,
4611686018334355264L,
4611686018380871584L,
4611686018404129744L,
4611686018415758824L,
4611686018421573364L,
4611686018424480634L };
/**
* Computes the new index for a bucket when adjusting the scale of the histogram.
* This method supports both down-scaling (reducing the scale) and up-scaling.
* When up-scaling, it returns the bucket containing the point of least error of the original bucket.
*
* @param index the current bucket index to be adjusted
* @param currentScale the current scale
* @param scaleAdjustment the adjustment to make; the new scale will be {@code currentScale + scaleAdjustment}
* @return the index of the bucket in the new scale
*/
static long adjustScale(long index, int currentScale, int scaleAdjustment) {
checkIndexAndScaleBounds(index, currentScale);
int newScale = currentScale + scaleAdjustment;
assert newScale >= MIN_SCALE && newScale <= MAX_SCALE
: "adjusted scale must be in the range [" + MIN_SCALE + ", " + MAX_SCALE + "]";
if (scaleAdjustment <= 0) {
return index >> -scaleAdjustment;
} else {
assert scaleAdjustment <= MAX_INDEX_BITS : "Scaling up more than " + MAX_INDEX_BITS + " does not make sense";
// When scaling up, we want to return the bucket containing the point of least relative error.
// This bucket index can be computed as (index << adjustment) + offset.
// The offset is a constant that depends only on the scale and adjustment, not the index.
// The mathematically correct formula for the offset is:
// 2^adjustment * (1 + 2^currentScale * (1 - log2(1 + 2^(2^-currentScale))))
// This is hard to compute with double-precision floating-point numbers due to rounding errors and is also expensive.
// Therefore, we precompute 2^63 * (1 + 2^currentScale * (1 - log2(1 + 2^(2^-currentScale)))) and store it
// in SCALE_UP_CONSTANT_TABLE for each scale.
// This can then be converted to the correct offset by dividing with (2^(63-adjustment)),
// which is equivalent to a right shift with (63-adjustment)
long offset = SCALE_UP_CONSTANT_TABLE[currentScale - MIN_SCALE] >> (63 - scaleAdjustment);
return (index << scaleAdjustment) + offset;
}
}
/**
* Compares the lower boundaries of two buckets, which may have different scales.
* This is equivalent to a mathematically correct comparison of the lower bucket boundaries.
* Note that this method allows for scales and indices of the full numeric range of the types.
*
* @param idxA the index of the first bucket
* @param scaleA the scale of the first bucket
* @param idxB the index of the second bucket
* @param scaleB the scale of the second bucket
* @return a negative integer, zero, or a positive integer as the first bucket's lower boundary is
* less than, equal to, or greater than the second bucket's lower boundary
*/
public static int compareExponentiallyScaledValues(long idxA, int scaleA, long idxB, int scaleB) {
if (scaleA > scaleB) {
return -compareExponentiallyScaledValues(idxB, scaleB, idxA, scaleA);
}
// scaleA <= scaleB
int shifts = scaleB - scaleA;
long scaledDownB = idxB >> shifts;
int result = Long.compare(idxA, scaledDownB);
if (result == 0) {
// the scaled down values are equal
// this means that b is bigger if it has a "fractional" part, which corresponds to the bits that were removed on the right-shift
assert (1L << shifts) > 0;
long shiftedAway = idxB & ((1L << shifts) - 1);
if (shiftedAway > 0) {
return -1;
} else {
return 0;
}
}
return result;
}
/**
* Returns the maximum permissible scale increase that does not cause the index to grow out
* of the [{@link ExponentialHistogram#MIN_INDEX}, {@link ExponentialHistogram#MIN_INDEX}] range.
*
* @param index the index to check
* @return the maximum permissible scale increase
*/
public static int getMaximumScaleIncrease(long index) {
checkIndexBounds(index);
// Scale increase by one corresponds to a left shift, which in turn is the same as multiplying by two.
// Because we know that MIN_INDEX = -MAX_INDEX, we can just compute the maximum increase of the absolute index.
// This allows us to reason only about non-negative indices further below.
index = Math.abs(index);
// the maximum scale increase is defined by how many left-shifts we can do without growing beyond MAX_INDEX
// MAX_INDEX is defined as a number where the left MAX_INDEX_BITS are all ones.
// So in other words, we must ensure that the leftmost (64 - MAX_INDEX_BITS) remain zero,
// which is exactly what the formula below does.
return Long.numberOfLeadingZeros(index) - (64 - MAX_INDEX_BITS);
}
/**
* Returns a scale at to which the given index can be scaled down without changing the exponentially scaled number it represents.
* @param index the index of the number
* @param scale the current scale of the number
* @return the new scale
*/
static int normalizeScale(long index, int scale) {
return Math.max(MIN_SCALE, scale - Long.numberOfTrailingZeros(index));
}
/**
* Returns the upper boundary of the bucket with the given index and scale.
*
* @param index the index of the bucket
* @param scale the scale of the bucket
* @return the upper boundary of the bucket
*/
public static double getUpperBucketBoundary(long index, int scale) {
checkIndexAndScaleBounds(index, scale);
return exponentiallyScaledToDoubleValue(index + 1, scale);
}
/**
* Returns the lower boundary of the bucket with the given index and scale.
*
* @param index the index of the bucket in the [{@link ExponentialHistogram#MIN_INDEX}, {@link ExponentialHistogram#MAX_INDEX}] range.
* @param scale the scale of the bucket
* @return the lower boundary of the bucket
*/
public static double getLowerBucketBoundary(long index, int scale) {
checkIndexAndScaleBounds(index, scale);
return exponentiallyScaledToDoubleValue(index, scale);
}
/**
* Computes (2^(2^-scale))^index,
* allowing also indices outside of the [{@link ExponentialHistogram#MIN_INDEX}, {@link ExponentialHistogram#MAX_INDEX}] range.
*/
static double exponentiallyScaledToDoubleValue(long index, int scale) {
// Math.exp is expected to be faster and more accurate than Math.pow
// For that reason we use (2^(2^-scale))^index = 2^( (2^-scale) * index) = (e^ln(2))^( (2^-scale) * index)
// = e^( ln(2) * (2^-scale) * index)
double inverseFactor = Math.scalb(LN_2, -scale);
return Math.exp(inverseFactor * index);
}
/**
* For a bucket with the given index, computes the point {@code x} in the bucket such that
* {@code (x - l) / l} equals {@code (u - x) / u}, where {@code l} is the lower bucket boundary and {@code u}
* is the upper bucket boundary.
* <br>
* In other words, we select the point in the bucket that has the least relative error with respect to any other point in the bucket.
*
* @param bucketIndex the index of the bucket
* @param scale the scale of the bucket
* @return the point of least relative error
*/
public static double getPointOfLeastRelativeError(long bucketIndex, int scale) {
checkIndexAndScaleBounds(bucketIndex, scale);
double histogramBase = Math.pow(2, Math.scalb(1, -scale));
if (Double.isFinite(histogramBase)) {
double upperBound = getUpperBucketBoundary(bucketIndex, scale);
return 2 / (histogramBase + 1) * upperBound;
} else {
if (bucketIndex >= 0) {
// the bucket is (1, +inf), approximate point of least error as inf
return Double.POSITIVE_INFINITY;
} else {
// the bucket is (1/(Inf), 1), approximate point of least error as 0
return 0;
}
}
}
/**
* Provides the index of the bucket of the exponential histogram with the given scale that contains the provided value.
*
* @param value the value to find the bucket for
* @param scale the scale of the histogram
* @return the index of the bucket
*/
public static long computeIndex(double value, int scale) {
checkScaleBounds(scale);
return Base2ExponentialHistogramIndexer.computeIndex(value, scale);
}
private static void checkIndexAndScaleBounds(long index, int scale) {
checkIndexBounds(index);
checkScaleBounds(scale);
}
private static void checkScaleBounds(int scale) {
assert scale >= MIN_SCALE && scale <= MAX_SCALE : "scale must be in range [" + MIN_SCALE + ".." + MAX_SCALE + "]";
}
private static void checkIndexBounds(long index) {
assert index >= MIN_INDEX && index <= MAX_INDEX : "index must be in range [" + MIN_INDEX + ".." + MAX_INDEX + "]";
}
}
| ExponentialScaleUtils |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/checkreturnvalue/NoCanIgnoreReturnValueOnClasses.java | {
"start": 5123,
"end": 5897
} | class ____'s marked @CRV
return hasAnnotation(classTree, CRV, state) ? null : super.visitClass(classTree, null);
}
@Override
public Void visitMethod(MethodTree methodTree, Void unused) {
if (shouldAddCirv(methodTree, state)) {
String trailingComment = null;
if (methodTree.getReturnType() == null) { // constructor
trailingComment = CTOR_COMMENT;
} else if (alwaysReturnsThis()) {
trailingComment = "";
} else {
trailingComment = METHOD_COMMENT;
}
fix.prefixWith(methodTree, "@" + cirvName + trailingComment + "\n");
}
// TODO(kak): we could also consider removing CRV from individual methods (since the
// enclosing | that |
java | apache__camel | core/camel-health/src/main/java/org/apache/camel/impl/health/ProducersHealthCheckRepository.java | {
"start": 1504,
"end": 3006
} | class ____ extends ServiceSupport
implements CamelContextAware, WritableHealthCheckRepository, StaticService, NonManagedService {
public static final String REPOSITORY_ID = "producers";
public static final String REPOSITORY_NAME = "producers-repository";
private final List<HealthCheck> checks;
private volatile CamelContext context;
private boolean enabled; // default disabled
public ProducersHealthCheckRepository() {
this.checks = new CopyOnWriteArrayList<>();
}
@Override
public void setCamelContext(CamelContext camelContext) {
this.context = camelContext;
}
@Override
public String getId() {
return REPOSITORY_ID;
}
@Override
public CamelContext getCamelContext() {
return context;
}
@Override
public boolean isEnabled() {
return enabled;
}
@Override
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
@Override
public Stream<HealthCheck> stream() {
return this.context != null && enabled
? checks.stream()
: Stream.empty();
}
@Override
public void addHealthCheck(HealthCheck healthCheck) {
CamelContextAware.trySetCamelContext(healthCheck, getCamelContext());
this.checks.add(healthCheck);
}
@Override
public void removeHealthCheck(HealthCheck healthCheck) {
this.checks.remove(healthCheck);
}
}
| ProducersHealthCheckRepository |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/bean/BeanPropertyMapTest.java | {
"start": 602,
"end": 1560
} | class ____ extends ObjectIdReader
{
public MyObjectIdReader(String name) {
super(BOGUS_TYPE, new PropertyName(name), null,
null, null, null);
}
}
// Highly specialized test in which we get couple of hash collisions for
// small (16) hash map
@Test
public void testArrayOutOfBounds884() throws Exception
{
List<SettableBeanProperty> props = new ArrayList<SettableBeanProperty>();
PropertyMetadata md = PropertyMetadata.STD_REQUIRED;
props.add(new ObjectIdValueProperty(new MyObjectIdReader("pk"), md));
props.add(new ObjectIdValueProperty(new MyObjectIdReader("firstName"), md));
BeanPropertyMap propMap = new BeanPropertyMap(props,
null, Locale.getDefault(), false, true);
propMap = propMap.withProperty(new ObjectIdValueProperty(new MyObjectIdReader("@id"), md));
assertNotNull(propMap);
}
}
| MyObjectIdReader |
java | spring-projects__spring-boot | module/spring-boot-actuator/src/test/java/org/springframework/boot/actuate/endpoint/annotation/DiscoveredOperationsFactoryTests.java | {
"start": 5555,
"end": 5646
} | class ____ {
@ReadOperation
String read() {
return "read";
}
}
static | ExampleRead |
java | qos-ch__slf4j | jcl-over-slf4j/src/main/java/org/apache/commons/logging/impl/SimpleLog.java | {
"start": 19405,
"end": 19787
} | class ____ cannot be identified.
*/
private static ClassLoader getContextClassLoader() {
ClassLoader classLoader = null;
if (classLoader == null) {
try {
// Are we running on a JDK 1.2 or later system?
Method method = Thread.class.getMethod("getContextClassLoader");
// Get the thread context | loader |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/submitted/repeatable/RepeatableInsertTest.java | {
"start": 1092,
"end": 6962
} | class ____ {
@Test
void hsql() throws IOException, SQLException {
SqlSessionFactory sqlSessionFactory;
try (Reader reader = Resources.getResourceAsReader("org/apache/ibatis/submitted/repeatable/mybatis-config.xml")) {
sqlSessionFactory = new SqlSessionFactoryBuilder().build(reader, "development-hsql");
}
BaseDataTest.runScript(sqlSessionFactory.getConfiguration().getEnvironment().getDataSource(),
"org/apache/ibatis/submitted/repeatable/CreateDB.sql");
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
Mapper mapper = sqlSession.getMapper(Mapper.class);
int count = mapper.count();
User newUser = new User();
newUser.setName("Test");
mapper.insertUser(newUser);
User user = mapper.getUser(newUser.getId());
Assertions.assertEquals(Integer.valueOf(count + 1), user.getId());
Assertions.assertEquals("Test HSQL", user.getName());
}
}
@Test
void hsqlUsingProvider() throws IOException, SQLException {
SqlSessionFactory sqlSessionFactory;
try (Reader reader = Resources.getResourceAsReader("org/apache/ibatis/submitted/repeatable/mybatis-config.xml")) {
sqlSessionFactory = new SqlSessionFactoryBuilder().build(reader, "development-hsql");
}
BaseDataTest.runScript(sqlSessionFactory.getConfiguration().getEnvironment().getDataSource(),
"org/apache/ibatis/submitted/repeatable/CreateDB.sql");
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
Mapper mapper = sqlSession.getMapper(Mapper.class);
int count = mapper.count();
User newUser = new User();
newUser.setName("Test");
mapper.insertUserUsingProvider(newUser);
User user = mapper.getUser(newUser.getId());
Assertions.assertEquals(Integer.valueOf(count + 1), user.getId());
Assertions.assertEquals("Test HSQL", user.getName());
}
}
@Test
void derby() throws IOException, SQLException {
SqlSessionFactory sqlSessionFactory;
try (Reader reader = Resources.getResourceAsReader("org/apache/ibatis/submitted/repeatable/mybatis-config.xml")) {
sqlSessionFactory = new SqlSessionFactoryBuilder().build(reader, "development-derby");
}
// populate in-memory database
BaseDataTest.runScript(sqlSessionFactory.getConfiguration().getEnvironment().getDataSource(),
"org/apache/ibatis/submitted/repeatable/CreateDB.sql");
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
Mapper mapper = sqlSession.getMapper(Mapper.class);
int count = mapper.count();
User newUser = new User();
newUser.setName("Test");
mapper.insertUser(newUser);
User user = mapper.getUser(newUser.getId());
Assertions.assertEquals(Integer.valueOf(count + 1001), user.getId());
Assertions.assertEquals("Test DERBY", user.getName());
}
}
@Test
void derbyUsingProvider() throws IOException, SQLException {
SqlSessionFactory sqlSessionFactory;
try (Reader reader = Resources.getResourceAsReader("org/apache/ibatis/submitted/repeatable/mybatis-config.xml")) {
sqlSessionFactory = new SqlSessionFactoryBuilder().build(reader, "development-derby");
}
// populate in-memory database
BaseDataTest.runScript(sqlSessionFactory.getConfiguration().getEnvironment().getDataSource(),
"org/apache/ibatis/submitted/repeatable/CreateDB.sql");
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
Mapper mapper = sqlSession.getMapper(Mapper.class);
int count = mapper.count();
User newUser = new User();
newUser.setName("Test");
mapper.insertUserUsingProvider(newUser);
User user = mapper.getUser(newUser.getId());
Assertions.assertEquals(Integer.valueOf(count + 1001), user.getId());
Assertions.assertEquals("Test DERBY", user.getName());
}
}
@Test
void h2() throws IOException, SQLException {
SqlSessionFactory sqlSessionFactory;
try (Reader reader = Resources.getResourceAsReader("org/apache/ibatis/submitted/repeatable/mybatis-config.xml")) {
sqlSessionFactory = new SqlSessionFactoryBuilder().build(reader, "development-h2");
}
// populate in-memory database
BaseDataTest.runScript(sqlSessionFactory.getConfiguration().getEnvironment().getDataSource(),
"org/apache/ibatis/submitted/repeatable/CreateDB.sql");
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
Mapper mapper = sqlSession.getMapper(Mapper.class);
int count = mapper.count();
User newUser = new User();
newUser.setName("Test");
mapper.insertUser(newUser);
User user = mapper.getUser(newUser.getId());
Assertions.assertEquals(Integer.valueOf(count + 10001), user.getId());
Assertions.assertEquals("Test DEFAULT", user.getName());
}
}
@Test
void h2UsingProvider() throws IOException, SQLException {
SqlSessionFactory sqlSessionFactory;
try (Reader reader = Resources.getResourceAsReader("org/apache/ibatis/submitted/repeatable/mybatis-config.xml")) {
sqlSessionFactory = new SqlSessionFactoryBuilder().build(reader, "development-h2");
}
// populate in-memory database
BaseDataTest.runScript(sqlSessionFactory.getConfiguration().getEnvironment().getDataSource(),
"org/apache/ibatis/submitted/repeatable/CreateDB.sql");
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
Mapper mapper = sqlSession.getMapper(Mapper.class);
int count = mapper.count();
User newUser = new User();
newUser.setName("Test");
mapper.insertUserUsingProvider(newUser);
User user = mapper.getUser(newUser.getId());
Assertions.assertEquals(Integer.valueOf(count + 10001), user.getId());
Assertions.assertEquals("Test DEFAULT", user.getName());
}
}
}
| RepeatableInsertTest |
java | quarkusio__quarkus | docs/src/main/java/io/quarkus/docs/generation/QuarkusMavenPluginDocsGenerator.java | {
"start": 590,
"end": 5401
} | class ____ {
private static final String QUARKUS_MAVEN_PLUGIN = "quarkus-maven-plugin-";
private static final String GOAL_PARAMETER_ANCHOR_FORMAT = QUARKUS_MAVEN_PLUGIN + "goal-%s-%s";
public static void main(String[] args) throws Exception {
if (args.length != 2) {
throw new IllegalArgumentException("Path for input and output were not provided");
}
Path pluginXmlDescriptorPath = Path.of(args[0]);
Path mavenPluginAdocPath = Path.of(args[1]);
if (!Files.exists(pluginXmlDescriptorPath) || !Files.isRegularFile(pluginXmlDescriptorPath)) {
throw new IllegalArgumentException(pluginXmlDescriptorPath + " does not exist or is not a regular file");
}
// Deserialize plugin.xml to PluginDescriptor
PluginDescriptor pluginDescriptor = null;
try (Reader input = new XmlStreamReader(new FileInputStream(pluginXmlDescriptorPath.toFile()))) {
pluginDescriptor = new PluginDescriptorBuilder().build(input);
} catch (IOException e) {
throw new IllegalStateException("Failed to deserialize PluginDescriptor", e);
}
// Don't generate documentation if there are no goals (shouldn't happen if correct descriptor is available)
if (pluginDescriptor != null && (pluginDescriptor.getMojos() == null || pluginDescriptor.getMojos().isEmpty())) {
return;
}
StringBuilder asciidoc = new StringBuilder();
// Build Goals documentation
for (MojoDescriptor mojo : pluginDescriptor.getMojos()) {
asciidoc.append("= ").append(mojo.getFullGoalName()).append("\n\n");
// Add Goal Description
if (mojo.getDescription() != null && !mojo.getDescription().isBlank()) {
asciidoc.append(mojo.getDescription()).append("\n\n");
}
if (mojo.getParameters() != null && !mojo.getParameters().isEmpty()) {
asciidoc.append("[.configuration-reference, cols=\"70,15,15\"]\n");
asciidoc.append("|===\n\n");
asciidoc.append("h|[[").append(String.format(GOAL_PARAMETER_ANCHOR_FORMAT, mojo.getGoal(), "parameter-table"))
.append("]] Parameter\n");
asciidoc.append("h|Type\n");
asciidoc.append("h|Default value\n\n");
for (Parameter parameter : mojo.getParameters()) {
String property = getPropertyFromExpression(parameter.getExpression());
String name = Optional.ofNullable(property).orElseGet(parameter::getName);
asciidoc.append("a| [[").append(String.format(GOAL_PARAMETER_ANCHOR_FORMAT, mojo.getGoal(), name))
.append("]] ").append(name).append("\n");
if (parameter.getDescription() != null && !parameter.getDescription().isBlank()) {
asciidoc.append("\n[.description]\n--\n").append(escapeCellContent(parameter.getDescription()))
.append("\n--\n");
}
asciidoc.append("|").append("`" + simplifyType(parameter.getType()) + "`")
.append(parameter.isRequired() ? " (required)" : "")
.append("\n");
asciidoc.append("|")
.append(parameter.getDefaultValue() != null && !parameter.getDefaultValue().isEmpty()
? "`" + escapeCellContent(parameter.getDefaultValue()) + "`"
: "")
.append("\n\n");
}
asciidoc.append("|===\n\n");
}
}
Files.createDirectories(mavenPluginAdocPath.getParent());
Files.writeString(mavenPluginAdocPath, asciidoc.toString());
}
private static String getPropertyFromExpression(String expression) {
if ((expression != null && !expression.isEmpty())
&& expression.startsWith("${")
&& expression.endsWith("}")
&& !expression.substring(2).contains("${")) {
// expression="${xxx}" -> property="xxx"
return expression.substring(2, expression.length() - 1);
}
// no property can be extracted
return null;
}
private static String simplifyType(String type) {
if (type == null || type.isBlank() || type.indexOf('.') == -1) {
return type;
}
return type.substring(type.lastIndexOf('.') + 1);
}
private static String escapeCellContent(String value) {
if (value == null) {
return null;
}
return value.replace("|", "\\|");
}
}
| QuarkusMavenPluginDocsGenerator |
java | elastic__elasticsearch | x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java | {
"start": 99181,
"end": 100957
} | class ____ extends GroupingElementContext {
public GroupingExpressionsContext groupingExpressions() {
return getRuleContext(GroupingExpressionsContext.class, 0);
}
public SingleGroupingSetContext(GroupingElementContext ctx) {
copyFrom(ctx);
}
@Override
public void enterRule(ParseTreeListener listener) {
if (listener instanceof SqlBaseListener) ((SqlBaseListener) listener).enterSingleGroupingSet(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if (listener instanceof SqlBaseListener) ((SqlBaseListener) listener).exitSingleGroupingSet(this);
}
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if (visitor instanceof SqlBaseVisitor) return ((SqlBaseVisitor<? extends T>) visitor).visitSingleGroupingSet(this);
else return visitor.visitChildren(this);
}
}
public final GroupingElementContext groupingElement() throws RecognitionException {
GroupingElementContext _localctx = new GroupingElementContext(_ctx, getState());
enterRule(_localctx, 22, RULE_groupingElement);
try {
_localctx = new SingleGroupingSetContext(_localctx);
enterOuterAlt(_localctx, 1);
{
setState(356);
groupingExpressions();
}
} catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
} finally {
exitRule();
}
return _localctx;
}
@SuppressWarnings("CheckReturnValue")
public static | SingleGroupingSetContext |
java | alibaba__fastjson | src/main/java/com/alibaba/fastjson/serializer/ListSerializer.java | {
"start": 835,
"end": 5862
} | class ____ implements ObjectSerializer {
public static final ListSerializer instance = new ListSerializer();
public final void write(JSONSerializer serializer, Object object, Object fieldName, Type fieldType, int features)
throws IOException {
boolean writeClassName = serializer.out.isEnabled(SerializerFeature.WriteClassName)
|| SerializerFeature.isEnabled(features, SerializerFeature.WriteClassName);
SerializeWriter out = serializer.out;
Type elementType = null;
if (writeClassName) {
elementType = TypeUtils.getCollectionItemType(fieldType);
}
if (object == null) {
out.writeNull(SerializerFeature.WriteNullListAsEmpty);
return;
}
List<?> list = (List<?>) object;
if (list.size() == 0) {
out.append("[]");
return;
}
SerialContext context = serializer.context;
serializer.setContext(context, object, fieldName, 0);
ObjectSerializer itemSerializer = null;
try {
if (out.isEnabled(SerializerFeature.PrettyFormat)) {
out.append('[');
serializer.incrementIndent();
int i = 0;
for (Object item : list) {
if (i != 0) {
out.append(',');
}
serializer.println();
if (item != null) {
if (serializer.containsReference(item)) {
serializer.writeReference(item);
} else {
itemSerializer = serializer.getObjectWriter(item.getClass());
SerialContext itemContext = new SerialContext(context, object, fieldName, 0, 0);
serializer.context = itemContext;
itemSerializer.write(serializer, item, i, elementType, features);
}
} else {
serializer.out.writeNull();
}
i++;
}
serializer.decrementIdent();
serializer.println();
out.append(']');
return;
}
out.append('[');
for (int i = 0, size = list.size(); i < size; ++i) {
Object item = list.get(i);
if (i != 0) {
out.append(',');
}
if (item == null) {
out.append("null");
} else {
Class<?> clazz = item.getClass();
if (clazz == Integer.class) {
out.writeInt(((Integer) item).intValue());
} else if (clazz == Long.class) {
long val = ((Long) item).longValue();
if (writeClassName) {
out.writeLong(val);
out.write('L');
} else {
out.writeLong(val);
}
} else {
if ((SerializerFeature.DisableCircularReferenceDetect.mask & features) != 0){
itemSerializer = serializer.getObjectWriter(item.getClass());
itemSerializer.write(serializer, item, i, elementType, features);
}else {
if (!out.disableCircularReferenceDetect) {
SerialContext itemContext = new SerialContext(context, object, fieldName, 0, 0);
serializer.context = itemContext;
}
if (serializer.containsReference(item)) {
serializer.writeReference(item);
} else {
itemSerializer = serializer.getObjectWriter(item.getClass());
if ((SerializerFeature.WriteClassName.mask & features) != 0
&& itemSerializer instanceof JavaBeanSerializer)
{
JavaBeanSerializer javaBeanSerializer = (JavaBeanSerializer) itemSerializer;
javaBeanSerializer.writeNoneASM(serializer, item, i, elementType, features);
} else {
itemSerializer.write(serializer, item, i, elementType, features);
}
}
}
}
}
}
out.append(']');
} finally {
serializer.context = context;
}
}
}
| ListSerializer |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/ser/BeanSerializerFactory.java | {
"start": 3120,
"end": 7087
} | class ____
extends BasicSerializerFactory
implements java.io.Serializable
{
private static final long serialVersionUID = 3;
/**
* Like {@link BasicSerializerFactory}, this factory is stateless, and
* thus a single shared global (== singleton) instance can be used
* without thread-safety issues.
*/
public final static BeanSerializerFactory instance = new BeanSerializerFactory(null);
/*
/**********************************************************************
/* Life-cycle: creation, configuration
/**********************************************************************
*/
/**
* Constructor for creating instances with specified configuration.
*/
protected BeanSerializerFactory(SerializerFactoryConfig config)
{
super(config);
}
/**
* Method used by module registration functionality, to attach additional
* serializer providers into this serializer factory. This is typically
* handled by constructing a new instance with additional serializers,
* to ensure thread-safe access.
*/
@Override
public SerializerFactory withConfig(SerializerFactoryConfig config)
{
if (_factoryConfig == config) {
return this;
}
/* 22-Nov-2010, tatu: Handling of subtypes is tricky if we do immutable-with-copy-ctor;
* and we pretty much have to here either choose between losing subtype instance
* when registering additional serializers, or losing serializers.
* Instead, let's actually just throw an error if this method is called when subtype
* has not properly overridden this method; this to indicate problem as soon as possible.
*/
ClassUtil.verifyMustOverride(BeanSerializerFactory.class, this, "withConfig");
return new BeanSerializerFactory(config);
}
/*
/**********************************************************************
/* SerializerFactory impl
/**********************************************************************
*/
/**
* Main serializer constructor method. We will have to be careful
* with respect to ordering of various method calls: essentially
* we want to reliably figure out which classes are standard types,
* and which are beans. The problem is that some bean Classes may
* implement standard interfaces (say, {@link java.lang.Iterable}.
*<p>
* Note: sub-classes may choose to complete replace implementation,
* if they want to alter priority of serializer lookups.
*/
@Override
@SuppressWarnings("unchecked")
public ValueSerializer<Object> createSerializer(SerializationContext ctxt, JavaType origType,
BeanDescription.Supplier beanDescRef, JsonFormat.Value formatOverrides)
{
// Very first thing, let's check if there is explicit serializer annotation:
ValueSerializer<?> ser = findSerializerFromAnnotation(ctxt,
beanDescRef.getClassInfo());
if (ser != null) {
return (ValueSerializer<Object>) ser;
}
final SerializationConfig config = ctxt.getConfig();
boolean staticTyping;
// Next: we may have annotations that further indicate actual type to use (a super type)
final AnnotationIntrospector intr = config.getAnnotationIntrospector();
JavaType type;
if (intr == null) {
type = origType;
} else {
try {
type = intr.refineSerializationType(config, beanDescRef.getClassInfo(), origType);
} catch (JacksonException e) {
return ctxt.reportBadTypeDefinition(beanDescRef, e.getMessage());
}
}
if (type == origType) { // no changes, won't force static typing
staticTyping = false;
} else { // changes; assume static typing; plus, need to re-introspect if | BeanSerializerFactory |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/abstract_/AbstractAssert_isEqualTo_Test.java | {
"start": 1081,
"end": 2146
} | class ____ extends AbstractAssertBaseTest {
@Override
protected ConcreteAssert invoke_api_method() {
return assertions.isEqualTo(Long.valueOf(8l));
}
@Test
void should_fail_because_called_on_assertion_directly() {
// WHEN
Throwable thrown = catchThrowable(() -> assertThat(assertions).isEqualTo(assertions));
// THEN
then(thrown).isInstanceOf(UnsupportedOperationException.class)
.hasMessage("Attempted to compare an assertion object to another object using 'isEqualTo'. "
+ "This is not supported. Perhaps you meant 'isSameAs' instead?");
}
@Test
void should_not_fail_when_equals_exceptions_is_deactivated() {
AbstractAssert.throwUnsupportedExceptionOnEquals = false;
try {
assertions.isEqualTo(assertions);
} finally {
AbstractAssert.throwUnsupportedExceptionOnEquals = true;
}
}
@Override
protected void verify_internal_effects() {
verify(objects).assertEqual(getInfo(assertions), getActual(assertions), 8L);
}
}
| AbstractAssert_isEqualTo_Test |
java | apache__logging-log4j2 | log4j-1.2-api/src/main/java/org/apache/log4j/Hierarchy.java | {
"start": 2911,
"end": 3392
} | class ____ extends AbstractLoggerAdapter<Logger> {
@Override
protected org.apache.logging.log4j.spi.LoggerContext getContext() {
return PrivateLogManager.getContext();
}
@Override
protected Logger newLogger(final String name, final org.apache.logging.log4j.spi.LoggerContext context) {
return new Logger(context, name);
}
}
/**
* Private LogManager.
*/
private static | PrivateLoggerAdapter |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/support/cache/SimpleSoftCacheTest.java | {
"start": 1349,
"end": 8959
} | class ____ {
private final SimpleSoftCache<Integer, Object> cache = new SimpleSoftCache<>(new ConcurrentHashMap<>());
@Test
void testSoftCacheGetAndPut() {
cache.put(1, "foo");
cache.put(2, "bar");
assertEquals("foo", cache.get(1));
assertEquals("bar", cache.get(2));
assertNull(cache.get(3));
assertEquals(2, cache.size());
cache.getInnerCache().get(1).clear();
assertEquals(2, cache.size());
assertNull(cache.get(1));
assertEquals(1, cache.size());
}
@Test
void testSoftCacheContainsValue() {
cache.put(1, "foo");
assertTrue(cache.containsValue("foo"));
assertFalse(cache.containsValue("bar"));
assertFalse(cache.isEmpty());
cache.getInnerCache().get(1).clear();
assertFalse(cache.containsValue("foo"));
assertTrue(cache.isEmpty());
}
@Test
void testSoftCacheForEach() {
cache.put(1, "foo");
cache.put(2, "bar");
Map<Integer, Object> tmp = new HashMap<>(cache);
assertEquals("foo", tmp.get(1));
assertEquals("bar", tmp.get(2));
assertNull(tmp.get(3));
assertEquals(2, tmp.size());
cache.getInnerCache().get(1).clear();
tmp = new HashMap<>(cache);
assertNull(tmp.get(1));
assertEquals("bar", tmp.get(2));
assertNull(tmp.get(3));
assertEquals(1, tmp.size());
}
@Test
void testSoftCacheReplaceAll() {
cache.put(1, "foo");
cache.put(2, "bar");
cache.replaceAll((k, v) -> v + "2");
assertEquals("foo2", cache.get(1));
assertEquals("bar2", cache.get(2));
assertEquals(2, cache.size());
}
@Test
void testSoftCachePutIfAbsent() {
cache.put(1, "foo");
assertEquals("foo", cache.putIfAbsent(1, "bar"));
assertEquals("foo", cache.get(1));
assertNull(cache.putIfAbsent(2, "bar"));
assertEquals("bar", cache.get(2));
}
@Test
void testSoftCacheRemove() {
cache.put(1, "foo");
assertFalse(cache.remove(2, "foo"));
assertFalse(cache.remove(1, "bar"));
assertEquals("foo", cache.get(1));
assertFalse(cache.isEmpty());
assertTrue(cache.remove(1, "foo"));
assertNull(cache.get(1));
assertTrue(cache.isEmpty());
}
@Test
void testSoftCacheReplaceSpecific() {
cache.put(1, "foo");
assertFalse(cache.replace(2, "foo", "bar"));
assertFalse(cache.replace(1, "bar", "foo"));
assertEquals("foo", cache.get(1));
assertTrue(cache.replace(1, "foo", "bar"));
assertEquals("bar", cache.get(1));
}
@Test
void testSoftCacheReplace() {
cache.put(1, "foo");
assertNull(cache.replace(2, "bar"));
assertEquals("foo", cache.get(1));
assertEquals("foo", cache.replace(1, "bar"));
assertEquals("bar", cache.get(1));
}
@Test
void testSoftCacheComputeIfAbsent() {
cache.put(1, "foo");
assertEquals("foo", cache.computeIfAbsent(1, k -> "bar"));
assertEquals("foo", cache.get(1));
assertEquals("bar", cache.computeIfAbsent(2, k -> "bar"));
assertEquals("bar", cache.get(2));
}
@Test
void testSoftCacheComputeIfPresent() {
cache.put(1, "foo");
assertEquals("bar", cache.computeIfPresent(1, (k, v) -> "bar"));
assertEquals("bar", cache.get(1));
assertNull(cache.computeIfPresent(1, (k, v) -> null));
assertNull(cache.get(1));
assertNull(cache.computeIfPresent(1, (k, v) -> "bar"));
}
@Test
void testSoftCacheCompute() {
cache.put(1, "foo");
assertEquals("bar", cache.compute(1, (k, v) -> "bar"));
assertEquals("bar", cache.get(1));
assertNull(cache.compute(1, (k, v) -> null));
assertNull(cache.get(1));
assertEquals("bar", cache.compute(1, (k, v) -> "bar"));
assertEquals("bar", cache.get(1));
assertNull(cache.compute(2, (k, v) -> null));
assertNull(cache.get(2));
}
@Test
void testSoftCacheMerge() {
cache.put(1, "foo");
assertEquals("foo-2", cache.merge(1, "2", (v1, v2) -> v1 + "-" + v2));
assertEquals("foo-2", cache.get(1));
assertNull(cache.merge(1, "2", (v1, v2) -> null));
assertNull(cache.get(1));
assertEquals("2", cache.merge(1, "2", (v1, v2) -> "bar"));
assertEquals("2", cache.get(1));
assertEquals("2", cache.merge(2, "2", (v1, v2) -> null));
assertEquals("2", cache.get(2));
}
@Test
void testSimpleSoftCachePutOverride() {
Object old = cache.put(1, "foo");
assertNull(old);
old = cache.put(2, "bar");
assertNull(old);
assertEquals("foo", cache.get(1));
assertEquals("bar", cache.get(2));
old = cache.put(1, "changed");
assertEquals("foo", old);
assertEquals("changed", cache.get(1));
assertEquals(2, cache.size());
}
@Test
void testSimpleSoftCachePutAll() {
Map<Integer, Object> map = new HashMap<>();
map.put(1, "foo");
map.put(2, "bar");
cache.putAll(map);
assertEquals("foo", cache.get(1));
assertEquals("bar", cache.get(2));
assertNull(cache.get(3));
assertEquals(2, cache.size());
}
@Test
void testSimpleSoftCacheRemove() {
cache.put(1, "foo");
cache.put(2, "bar");
assertEquals("bar", cache.get(2));
cache.remove(2);
assertNull(cache.get(2));
}
@Test
void testSimpleSoftCacheValues() {
cache.put(1, "foo");
cache.put(2, "bar");
Collection<Object> col = cache.values();
assertEquals(2, col.size());
Iterator<Object> it = col.iterator();
assertEquals("foo", it.next());
assertEquals("bar", it.next());
}
@Test
void testSimpleSoftCacheEmpty() {
assertTrue(cache.isEmpty());
cache.put(1, "foo");
assertFalse(cache.isEmpty());
cache.put(2, "bar");
assertFalse(cache.isEmpty());
cache.remove(2);
assertFalse(cache.isEmpty());
cache.clear();
assertTrue(cache.isEmpty());
}
@Test
void testSimpleSoftCacheContainsKey() {
assertFalse(cache.containsKey(1));
cache.put(1, "foo");
assertTrue(cache.containsKey(1));
assertFalse(cache.containsKey(2));
cache.put(2, "foo");
assertTrue(cache.containsKey(2));
}
@Test
void testSimpleSoftCacheKeySet() {
cache.put(1, "foo");
cache.put(2, "foo");
Set<Integer> keys = cache.keySet();
assertEquals(2, keys.size());
Iterator<Integer> it = keys.iterator();
assertEquals(1, it.next().intValue());
assertEquals(2, it.next().intValue());
}
@Test
void testSimpleSoftCacheNotRunOutOfMemory() {
// we should not run out of memory using the soft cache
// if you run this test with a regular cache then you will run out of memory
int maximumCacheSize = 1024;
for (int i = 0; i < maximumCacheSize; i++) {
Object data = new LargeObject();
Integer key = Integer.valueOf(i);
cache.put(key, data);
}
Map<Integer, Object> tmp = new HashMap<>(cache);
int size = tmp.size();
assertTrue(size < maximumCacheSize, "Cache size should not be max, was: " + size);
}
public static | SimpleSoftCacheTest |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/support/ContextLoaderUtilsContextHierarchyTests.java | {
"start": 25185,
"end": 25477
} | class ____ extends
TestClass2WithSingleLevelContextHierarchyFromMetaAnnotation {
}
// -------------------------------------------------------------------------
@ContextConfiguration
@Retention(RetentionPolicy.RUNTIME)
private @ | TestClass3WithSingleLevelContextHierarchyFromMetaAnnotation |
java | quarkusio__quarkus | extensions/tls-registry/deployment/src/test/java/io/quarkus/tls/DefaultTrustStoreProviderTest.java | {
"start": 1075,
"end": 2182
} | class ____ {
private static final String configuration = """
# no configuration by default
""";
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest().setArchiveProducer(
() -> ShrinkWrap.create(JavaArchive.class)
.add(new StringAsset(configuration), "application.properties"));
@Inject
TlsConfigurationRegistry certificates;
@Test
void test() throws KeyStoreException, CertificateParsingException {
TlsConfiguration def = certificates.getDefault().orElseThrow();
assertThat(def.getTrustStoreOptions()).isNotNull();
assertThat(def.getTrustStore()).isNotNull();
X509Certificate certificate = (X509Certificate) def.getTrustStore().getCertificate("cert-0");
assertThat(certificate).isNotNull();
assertThat(certificate.getSubjectAlternativeNames()).anySatisfy(l -> {
assertThat(l.get(0)).isEqualTo(2);
assertThat(l.get(1)).isEqualTo("localhost");
});
}
@ApplicationScoped
static | DefaultTrustStoreProviderTest |
java | alibaba__nacos | api/src/test/java/com/alibaba/nacos/api/remote/DefaultRequestFutureTest.java | {
"start": 14509,
"end": 14991
} | class ____ implements DefaultRequestFuture.FutureTrigger {
boolean isTimeout;
boolean isCancel;
@Override
public void defaultTrigger() {
// do nothing
}
@Override
public void triggerOnTimeout() {
isTimeout = true;
}
@Override
public void triggerOnCancel() {
isCancel = true;
}
}
private | MockFutureTrigger |
java | alibaba__nacos | common/src/test/java/com/alibaba/nacos/common/executor/ThreadPoolManagerTest.java | {
"start": 924,
"end": 3535
} | class ____ {
@Test
void test() {
ThreadPoolManager manager = ThreadPoolManager.getInstance();
ExecutorService executor = ExecutorFactory.newSingleExecutorService();
String namespace = "test";
String group = "test";
manager.register(namespace, group, executor);
assertTrue(manager.getResourcesManager().containsKey(namespace));
assertEquals(1, manager.getResourcesManager().get(namespace).get(group).size());
manager.register(namespace, group, ExecutorFactory.newSingleExecutorService());
assertEquals(2, manager.getResourcesManager().get(namespace).get(group).size());
manager.destroy(namespace, group);
assertFalse(manager.getResourcesManager().get(namespace).containsKey(group));
manager.register(namespace, group, executor);
manager.destroy(namespace);
assertFalse(manager.getResourcesManager().containsKey(namespace));
manager.register(namespace, group, executor);
manager.deregister(namespace, group, ExecutorFactory.newSingleExecutorService());
assertEquals(1, manager.getResourcesManager().get(namespace).get(group).size());
manager.deregister(namespace, group, executor);
assertEquals(0, manager.getResourcesManager().get(namespace).get(group).size());
manager.register(namespace, group, executor);
manager.deregister(namespace, group);
assertFalse(manager.getResourcesManager().get(namespace).containsKey(group));
manager.register(namespace, group, executor);
manager.register(namespace, group, ExecutorFactory.newSingleExecutorService());
ThreadPoolManager.shutdown();
assertFalse(manager.getResourcesManager().containsKey(namespace));
manager.destroy(namespace);
manager.destroy(namespace, group);
assertFalse(manager.getResourcesManager().containsKey(namespace));
}
@Test
void testDestroyWithNull() {
ThreadPoolManager.getInstance().register("t", "g", ExecutorFactory.newFixedExecutorService(1));
try {
ThreadPoolManager.getInstance().destroy("null");
assertTrue(ThreadPoolManager.getInstance().getResourcesManager().containsKey("t"));
ThreadPoolManager.getInstance().destroy("null", "g");
assertTrue(ThreadPoolManager.getInstance().getResourcesManager().containsKey("t"));
} finally {
ThreadPoolManager.getInstance().destroy("t", "g");
}
}
}
| ThreadPoolManagerTest |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/ServletUtils.java | {
"start": 1219,
"end": 2111
} | class ____ {
/**
* Extract a query string parameter without triggering http parameters
* processing by the servlet container.
*
* @param request the request
* @param name the parameter to get the value.
* @return the parameter value, or <code>NULL</code> if the parameter is not
* defined.
* @throws IOException thrown if there was an error parsing the query string.
*/
public static String getParameter(HttpServletRequest request, String name)
throws IOException {
String queryString = request.getQueryString();
if (queryString == null) {
return null;
}
List<NameValuePair> list = URLEncodedUtils.parse(queryString, StandardCharsets.UTF_8);
if (list != null) {
for (NameValuePair nv : list) {
if (name.equals(nv.getName())) {
return nv.getValue();
}
}
}
return null;
}
}
| ServletUtils |
java | elastic__elasticsearch | x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/stats/FeatureMetric.java | {
"start": 330,
"end": 904
} | enum ____ {
SEQUENCE,
JOIN,
EVENT,
SEQUENCE_MAXSPAN,
SEQUENCE_UNTIL,
SEQUENCE_QUERIES_TWO,
SEQUENCE_QUERIES_THREE,
SEQUENCE_QUERIES_FOUR,
SEQUENCE_QUERIES_FIVE_OR_MORE,
JOIN_QUERIES_TWO,
JOIN_QUERIES_THREE,
JOIN_QUERIES_FOUR,
JOIN_QUERIES_FIVE_OR_MORE,
JOIN_UNTIL,
JOIN_KEYS_ONE,
JOIN_KEYS_TWO,
JOIN_KEYS_THREE,
JOIN_KEYS_FOUR,
JOIN_KEYS_FIVE_OR_MORE,
PIPE_HEAD,
PIPE_TAIL;
@Override
public String toString() {
return this.name().toLowerCase(Locale.ROOT);
}
}
| FeatureMetric |
java | quarkusio__quarkus | extensions/grpc/deployment/src/main/java/io/quarkus/grpc/deployment/GrpcServerProcessor.java | {
"start": 13806,
"end": 15229
} | enum ____ {
UNDEFINED(false),
BLOCKING(true),
VIRTUAL_THREAD(true),
NON_BLOCKING(false),
// @Transactional on a method
IMPLICIT(true);
final boolean blocking;
BlockingMode(boolean blocking) {
this.blocking = blocking;
}
}
private static BlockingMode nonInheritedBlockingMode(Predicate<DotName> checker,
Supplier<String> exceptionMsgSupplier) {
boolean blocking = checker.test(BLOCKING);
boolean nonBlocking = checker.test(NON_BLOCKING);
boolean vt = checker.test(RUN_ON_VIRTUAL_THREAD);
if (blocking && nonBlocking) {
throw new DeploymentException(exceptionMsgSupplier.get());
}
if (nonBlocking && vt) {
throw new DeploymentException(exceptionMsgSupplier.get());
}
if (blocking && !vt) {
return BlockingMode.BLOCKING;
}
if (vt) {
return BlockingMode.VIRTUAL_THREAD;
}
if (nonBlocking) {
return BlockingMode.NON_BLOCKING;
}
boolean transactional = checker.test(TRANSACTIONAL);
if (transactional) { // Cannot be on a virtual thread here.
return BlockingMode.IMPLICIT;
}
return BlockingMode.UNDEFINED;
}
/**
* Retrieve the blocking mode determined by inheritable annotations declared on a | BlockingMode |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/streaming/api/functions/source/legacy/FileReadFunction.java | {
"start": 1650,
"end": 2397
} | class ____ implements FlatMapFunction<Tuple3<String, Long, Long>, String> {
private static final long serialVersionUID = 1L;
@Override
public void flatMap(Tuple3<String, Long, Long> value, Collector<String> out) throws Exception {
FSDataInputStream stream = FileSystem.get(new URI(value.f0)).open(new Path(value.f0));
stream.seek(value.f1);
BufferedReader reader = new BufferedReader(new InputStreamReader(stream));
String line;
try {
while ((line = reader.readLine()) != null
&& (value.f2 == -1L || stream.getPos() <= value.f2)) {
out.collect(line);
}
} finally {
reader.close();
}
}
}
| FileReadFunction |
java | elastic__elasticsearch | x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/loadingservice/LocalModelTests.java | {
"start": 3629,
"end": 24108
} | class ____ extends ESTestCase {
public void testClassificationInfer() throws Exception {
TrainedModelStatsService modelStatsService = mock(TrainedModelStatsService.class);
doAnswer((args) -> null).when(modelStatsService).queueStats(any(InferenceStats.class), anyBoolean());
String modelId = "classification_model";
List<String> inputFields = Arrays.asList("field.foo", "field.bar", "categorical");
InferenceDefinition definition = InferenceDefinition.builder()
.setPreProcessors(Collections.singletonList(new OneHotEncoding("categorical", oneHotMap(), false)))
.setTrainedModel(buildClassificationInference(false))
.build();
LocalModel model = new LocalModel(
modelId,
"test-node",
definition,
new TrainedModelInput(inputFields),
Collections.singletonMap("field.foo", "field.foo.keyword"),
ClassificationConfig.EMPTY_PARAMS,
randomFrom(License.OperationMode.values()),
TrainedModelType.TREE_ENSEMBLE,
modelStatsService,
mock(CircuitBreaker.class)
);
Map<String, Object> fields = new HashMap<>() {
{
put("field.foo", 1.0);
put("field", Collections.singletonMap("bar", 0.5));
put("categorical", "dog");
}
};
SingleValueInferenceResults result = getSingleValue(model, fields, ClassificationConfigUpdate.EMPTY_PARAMS);
assertThat(result.value(), equalTo(0.0));
assertThat(result.valueAsString(), is("0"));
assertThat(model.getLatestStatsAndReset().getInferenceCount(), equalTo(1L));
ClassificationInferenceResults classificationResult = (ClassificationInferenceResults) getSingleValue(
model,
fields,
new ClassificationConfigUpdate(1, null, null, null, null)
);
assertThat(classificationResult.getTopClasses().get(0).getProbability(), closeTo(0.5498339973124778, 0.0000001));
assertThat(classificationResult.getTopClasses().get(0).getClassification(), equalTo("0"));
assertThat(model.getLatestStatsAndReset().getInferenceCount(), equalTo(1L));
// Test with labels
definition = InferenceDefinition.builder()
.setPreProcessors(Collections.singletonList(new OneHotEncoding("categorical", oneHotMap(), false)))
.setTrainedModel(buildClassificationInference(true))
.build();
model = new LocalModel(
modelId,
"test-node",
definition,
new TrainedModelInput(inputFields),
Collections.singletonMap("field.foo", "field.foo.keyword"),
ClassificationConfig.EMPTY_PARAMS,
License.OperationMode.PLATINUM,
TrainedModelType.TREE_ENSEMBLE,
modelStatsService,
mock(CircuitBreaker.class)
);
result = getSingleValue(model, fields, ClassificationConfigUpdate.EMPTY_PARAMS);
assertThat(result.value(), equalTo(0.0));
assertThat(result.valueAsString(), equalTo("no"));
classificationResult = (ClassificationInferenceResults) getSingleValue(
model,
fields,
new ClassificationConfigUpdate(1, null, null, null, null)
);
assertThat(classificationResult.getTopClasses().get(0).getProbability(), closeTo(0.5498339973124778, 0.0000001));
assertThat(classificationResult.getTopClasses().get(0).getClassification(), equalTo("no"));
assertThat(model.getLatestStatsAndReset().getInferenceCount(), equalTo(2L));
classificationResult = (ClassificationInferenceResults) getSingleValue(
model,
fields,
new ClassificationConfigUpdate(2, null, null, null, null)
);
assertThat(classificationResult.getTopClasses(), hasSize(2));
assertThat(model.getLatestStatsAndReset().getInferenceCount(), equalTo(1L));
classificationResult = (ClassificationInferenceResults) getSingleValue(
model,
fields,
new ClassificationConfigUpdate(-1, null, null, null, null)
);
assertThat(classificationResult.getTopClasses(), hasSize(2));
assertThat(model.getLatestStatsAndReset().getInferenceCount(), equalTo(1L));
}
@SuppressWarnings("unchecked")
public void testClassificationInferWithDifferentPredictionFieldTypes() throws Exception {
TrainedModelStatsService modelStatsService = mock(TrainedModelStatsService.class);
doAnswer((args) -> null).when(modelStatsService).queueStats(any(InferenceStats.class), anyBoolean());
String modelId = "classification_model";
List<String> inputFields = Arrays.asList("field.foo.keyword", "field.bar", "categorical");
InferenceDefinition definition = InferenceDefinition.builder()
.setPreProcessors(Collections.singletonList(new OneHotEncoding("categorical", oneHotMap(), false)))
.setTrainedModel(buildClassificationInference(true))
.build();
LocalModel model = new LocalModel(
modelId,
"test-node",
definition,
new TrainedModelInput(inputFields),
Collections.singletonMap("field.foo", "field.foo.keyword"),
ClassificationConfig.EMPTY_PARAMS,
License.OperationMode.PLATINUM,
TrainedModelType.TREE_ENSEMBLE,
modelStatsService,
mock(CircuitBreaker.class)
);
Map<String, Object> fields = new HashMap<>() {
{
put("field.foo", 1.0);
put("field.bar", 0.5);
put("categorical", "dog");
}
};
InferenceResults result = getInferenceResult(
model,
fields,
new ClassificationConfigUpdate(2, null, null, null, PredictionFieldType.STRING)
);
IngestDocument document = TestIngestDocument.emptyIngestDocument();
writeResult(result, document, "result_field", modelId);
assertThat(document.getFieldValue("result_field.predicted_value", String.class), equalTo("no"));
List<?> list = document.getFieldValue("result_field.top_classes", List.class);
assertThat(list.size(), equalTo(2));
assertThat(((Map<String, Object>) list.get(0)).get("class_name"), equalTo("no"));
assertThat(((Map<String, Object>) list.get(1)).get("class_name"), equalTo("yes"));
result = getInferenceResult(model, fields, new ClassificationConfigUpdate(2, null, null, null, PredictionFieldType.NUMBER));
document = TestIngestDocument.emptyIngestDocument();
writeResult(result, document, "result_field", modelId);
assertThat(document.getFieldValue("result_field.predicted_value", Double.class), equalTo(0.0));
list = document.getFieldValue("result_field.top_classes", List.class);
assertThat(list.size(), equalTo(2));
assertThat(((Map<String, Object>) list.get(0)).get("class_name"), equalTo(0.0));
assertThat(((Map<String, Object>) list.get(1)).get("class_name"), equalTo(1.0));
result = getInferenceResult(model, fields, new ClassificationConfigUpdate(2, null, null, null, PredictionFieldType.BOOLEAN));
document = TestIngestDocument.emptyIngestDocument();
writeResult(result, document, "result_field", modelId);
assertThat(document.getFieldValue("result_field.predicted_value", Boolean.class), equalTo(false));
list = document.getFieldValue("result_field.top_classes", List.class);
assertThat(list.size(), equalTo(2));
assertThat(((Map<String, Object>) list.get(0)).get("class_name"), equalTo(false));
assertThat(((Map<String, Object>) list.get(1)).get("class_name"), equalTo(true));
}
public void testRegression() throws Exception {
TrainedModelStatsService modelStatsService = mock(TrainedModelStatsService.class);
doAnswer((args) -> null).when(modelStatsService).queueStats(any(InferenceStats.class), anyBoolean());
List<String> inputFields = Arrays.asList("foo", "bar", "categorical");
InferenceDefinition trainedModelDefinition = InferenceDefinition.builder()
.setPreProcessors(Collections.singletonList(new OneHotEncoding("categorical", oneHotMap(), false)))
.setTrainedModel(buildRegressionInference())
.build();
LocalModel model = new LocalModel(
"regression_model",
"test-node",
trainedModelDefinition,
new TrainedModelInput(inputFields),
Collections.singletonMap("bar", "bar.keyword"),
RegressionConfig.EMPTY_PARAMS,
License.OperationMode.PLATINUM,
TrainedModelType.TREE_ENSEMBLE,
modelStatsService,
mock(CircuitBreaker.class)
);
Map<String, Object> fields = new HashMap<>() {
{
put("foo", 1.0);
put("bar.keyword", 0.5);
put("categorical", "dog");
}
};
SingleValueInferenceResults results = getSingleValue(model, fields, RegressionConfigUpdate.EMPTY_PARAMS);
assertThat(results.value(), equalTo(1.3));
}
public void testAllFieldsMissing() throws Exception {
TrainedModelStatsService modelStatsService = mock(TrainedModelStatsService.class);
doAnswer((args) -> null).when(modelStatsService).queueStats(any(InferenceStats.class), anyBoolean());
List<String> inputFields = Arrays.asList("foo", "bar", "categorical");
InferenceDefinition trainedModelDefinition = InferenceDefinition.builder()
.setPreProcessors(Collections.singletonList(new OneHotEncoding("categorical", oneHotMap(), false)))
.setTrainedModel(buildRegressionInference())
.build();
LocalModel model = new LocalModel(
"regression_model",
"test-node",
trainedModelDefinition,
new TrainedModelInput(inputFields),
null,
RegressionConfig.EMPTY_PARAMS,
License.OperationMode.PLATINUM,
TrainedModelType.TREE_ENSEMBLE,
modelStatsService,
mock(CircuitBreaker.class)
);
Map<String, Object> fields = new HashMap<>() {
{
put("something", 1.0);
put("other", 0.5);
put("baz", "dog");
}
};
WarningInferenceResults results = (WarningInferenceResults) getInferenceResult(model, fields, RegressionConfigUpdate.EMPTY_PARAMS);
assertThat(results.getWarning(), equalTo(Messages.getMessage(Messages.INFERENCE_WARNING_ALL_FIELDS_MISSING, "regression_model")));
assertThat(model.getLatestStatsAndReset().getMissingAllFieldsCount(), equalTo(1L));
}
public void testInferPersistsStatsAfterNumberOfCalls() throws Exception {
TrainedModelStatsService modelStatsService = mock(TrainedModelStatsService.class);
doAnswer((args) -> null).when(modelStatsService).queueStats(any(InferenceStats.class), anyBoolean());
String modelId = "classification_model";
List<String> inputFields = Arrays.asList("field.foo", "field.bar", "categorical");
InferenceDefinition definition = InferenceDefinition.builder()
.setPreProcessors(Collections.singletonList(new OneHotEncoding("categorical", oneHotMap(), false)))
.setTrainedModel(buildClassificationInference(false))
.build();
LocalModel model = new LocalModel(
modelId,
"test-node",
definition,
new TrainedModelInput(inputFields),
null,
ClassificationConfig.EMPTY_PARAMS,
License.OperationMode.PLATINUM,
TrainedModelType.TREE_ENSEMBLE,
modelStatsService,
mock(CircuitBreaker.class)
);
Map<String, Object> fields = new HashMap<>() {
{
put("field.foo", 1.0);
put("field.bar", 0.5);
put("categorical", "dog");
}
};
for (int i = 0; i < 100; i++) {
getSingleValue(model, fields, ClassificationConfigUpdate.EMPTY_PARAMS);
}
SingleValueInferenceResults result = getSingleValue(model, fields, ClassificationConfigUpdate.EMPTY_PARAMS);
assertThat(result.value(), equalTo(0.0));
assertThat(result.valueAsString(), is("0"));
// Should have reset after persistence, so only 2 docs have been seen since last persistence
assertThat(model.getLatestStatsAndReset().getInferenceCount(), equalTo(2L));
verify(modelStatsService, times(1)).queueStats(argThat(o -> o.getInferenceCount() == 99L), anyBoolean());
}
public void testMapFieldsIfNecessary() {
Map<String, String> fieldMap = new TreeMap<>();
fieldMap.put("a1", "a2");
fieldMap.put("b1", "b2");
fieldMap.put("missing1", "missing2");
Map<String, Object> fields = new TreeMap<>();
fields.put("a1", "a_value");
fields.put("b1", "b_value");
LocalModel.mapFieldsIfNecessary(fields, fieldMap);
Map<String, Object> expectedMap = new TreeMap<>();
expectedMap.put("a1", "a_value");
expectedMap.put("b1", "b_value");
expectedMap.put("a2", "a_value");
expectedMap.put("b2", "b_value");
assertThat(fields, equalTo(expectedMap));
}
public void testReferenceCounting() throws IOException {
TrainedModelStatsService modelStatsService = mock(TrainedModelStatsService.class);
String modelId = "ref-count-model";
List<String> inputFields = Arrays.asList("field.foo", "field.bar");
InferenceDefinition definition = InferenceDefinition.builder().setTrainedModel(buildClassificationInference(false)).build();
{
CircuitBreaker breaker = mock(CircuitBreaker.class);
LocalModel model = new LocalModel(
modelId,
"test-node",
definition,
new TrainedModelInput(inputFields),
null,
ClassificationConfig.EMPTY_PARAMS,
License.OperationMode.PLATINUM,
TrainedModelType.TREE_ENSEMBLE,
modelStatsService,
breaker
);
model.release();
verify(breaker, times(1)).addWithoutBreaking(eq(-definition.ramBytesUsed()));
verifyNoMoreInteractions(breaker);
assertEquals(0L, model.getReferenceCount());
// reacquire
model.acquire();
verify(breaker, times(1)).addEstimateBytesAndMaybeBreak(eq(definition.ramBytesUsed()), eq(modelId));
verifyNoMoreInteractions(breaker);
assertEquals(1L, model.getReferenceCount());
}
{
CircuitBreaker breaker = mock(CircuitBreaker.class);
LocalModel model = new LocalModel(
modelId,
"test-node",
definition,
new TrainedModelInput(inputFields),
null,
ClassificationConfig.EMPTY_PARAMS,
License.OperationMode.PLATINUM,
TrainedModelType.TREE_ENSEMBLE,
modelStatsService,
breaker
);
model.acquire();
model.acquire();
model.release();
model.release();
model.release();
verify(breaker, times(1)).addWithoutBreaking(eq(-definition.ramBytesUsed()));
verifyNoMoreInteractions(breaker);
assertEquals(0L, model.getReferenceCount());
}
}
private static SingleValueInferenceResults getSingleValue(LocalModel model, Map<String, Object> fields, InferenceConfigUpdate config)
throws Exception {
return (SingleValueInferenceResults) getInferenceResult(model, fields, config);
}
private static InferenceResults getInferenceResult(LocalModel model, Map<String, Object> fields, InferenceConfigUpdate config)
throws Exception {
PlainActionFuture<InferenceResults> future = new PlainActionFuture<>();
model.infer(fields, config, future);
return future.get();
}
private static Map<String, String> oneHotMap() {
Map<String, String> oneHotEncoding = new HashMap<>();
oneHotEncoding.put("cat", "animal_cat");
oneHotEncoding.put("dog", "animal_dog");
return oneHotEncoding;
}
public static InferenceModel buildClassificationInference(boolean includeLables) throws IOException {
return serializeFromTrainedModel((Ensemble) buildClassification(includeLables));
}
public static TrainedModel buildClassification(boolean includeLabels) {
List<String> featureNames = Arrays.asList("field.foo", "field.bar", "animal_cat", "animal_dog");
Tree tree1 = Tree.builder()
.setFeatureNames(featureNames)
.setRoot(TreeNode.builder(0).setLeftChild(1).setRightChild(2).setSplitFeature(0).setThreshold(0.5))
.addNode(TreeNode.builder(1).setLeafValue(1.0))
.addNode(TreeNode.builder(2).setThreshold(0.8).setSplitFeature(1).setLeftChild(3).setRightChild(4))
.addNode(TreeNode.builder(3).setLeafValue(0.0))
.addNode(TreeNode.builder(4).setLeafValue(1.0))
.build();
Tree tree2 = Tree.builder()
.setFeatureNames(featureNames)
.setRoot(TreeNode.builder(0).setLeftChild(1).setRightChild(2).setSplitFeature(3).setThreshold(1.0))
.addNode(TreeNode.builder(1).setLeafValue(0.0))
.addNode(TreeNode.builder(2).setLeafValue(1.0))
.build();
Tree tree3 = Tree.builder()
.setFeatureNames(featureNames)
.setRoot(TreeNode.builder(0).setLeftChild(1).setRightChild(2).setSplitFeature(0).setThreshold(1.0))
.addNode(TreeNode.builder(1).setLeafValue(1.0))
.addNode(TreeNode.builder(2).setLeafValue(0.0))
.build();
return Ensemble.builder()
.setClassificationLabels(includeLabels ? Arrays.asList("no", "yes") : null)
.setTargetType(TargetType.CLASSIFICATION)
.setFeatureNames(featureNames)
.setTrainedModels(Arrays.asList(tree1, tree2, tree3))
.setOutputAggregator(new WeightedMode(new double[] { 0.7, 0.5, 1.0 }, 2))
.build();
}
public static InferenceModel buildRegressionInference() throws IOException {
return serializeFromTrainedModel((Ensemble) buildRegression());
}
public static TrainedModel buildRegression() {
List<String> featureNames = Arrays.asList("field.foo", "field.bar", "animal_cat", "animal_dog");
Tree tree1 = Tree.builder()
.setFeatureNames(featureNames)
.setRoot(TreeNode.builder(0).setLeftChild(1).setRightChild(2).setSplitFeature(0).setThreshold(0.5))
.addNode(TreeNode.builder(1).setLeafValue(0.3))
.addNode(TreeNode.builder(2).setThreshold(0.0).setSplitFeature(3).setLeftChild(3).setRightChild(4))
.addNode(TreeNode.builder(3).setLeafValue(0.1))
.addNode(TreeNode.builder(4).setLeafValue(0.2))
.build();
Tree tree2 = Tree.builder()
.setFeatureNames(featureNames)
.setRoot(TreeNode.builder(0).setLeftChild(1).setRightChild(2).setSplitFeature(2).setThreshold(1.0))
.addNode(TreeNode.builder(1).setLeafValue(1.5))
.addNode(TreeNode.builder(2).setLeafValue(0.9))
.build();
Tree tree3 = Tree.builder()
.setFeatureNames(featureNames)
.setRoot(TreeNode.builder(0).setLeftChild(1).setRightChild(2).setSplitFeature(1).setThreshold(0.2))
.addNode(TreeNode.builder(1).setLeafValue(1.5))
.addNode(TreeNode.builder(2).setLeafValue(0.9))
.build();
return Ensemble.builder()
.setTargetType(TargetType.REGRESSION)
.setFeatureNames(featureNames)
.setTrainedModels(Arrays.asList(tree1, tree2, tree3))
.setOutputAggregator(new WeightedSum(new double[] { 0.5, 0.5, 0.5 }))
.build();
}
}
| LocalModelTests |
java | elastic__elasticsearch | x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/RerankingIT.java | {
"start": 534,
"end": 4025
} | class ____ extends InferenceBaseRestTest {
public void testPutCohereRerankEndpoint() throws IOException {
String endpoint = putCohereRerankEndpoint();
postCohereRerankEndpoint(
endpoint,
"what is elasticsearch for?",
new String[] { "for search", "for security", "for logs", "for email", "for rubber bands", "for kiwis" }
);
}
private String putCohereRerankEndpoint() throws IOException {
String endpointID = randomAlphaOfLength(10).toLowerCase();
putRequest("/_inference/rerank/" + endpointID, """
{
"service": "cohere",
"service_settings": {
"model_id": "rerank-english-v2.0",
"api_key": ""
}
}
""");
return endpointID;
}
public void testPutCohereRerankEndpointWithDocuments() throws IOException {
String endpoint = putCohereRerankEndpointWithDocuments();
postCohereRerankEndpoint(
endpoint,
"what is elasticsearch for?",
new String[] { "for search", "for security", "for logs", "for email", "for rubber bands", "for kiwis" }
);
}
private String putCohereRerankEndpointWithDocuments() throws IOException {
String endpointID = randomAlphaOfLength(10).toLowerCase();
putRequest("/_inference/rerank/" + endpointID, """
{
"service": "cohere",
"service_settings": {
"model_id": "rerank-english-v2.0",
"api_key": ""
},
"task_settings": {
"return_documents": true
}
}
""");
return endpointID;
}
public void testPutCohereRerankEndpointWithTop2() throws IOException {
String endpoint = putCohereRerankEndpointWithTop2();
postCohereRerankEndpoint(
endpoint,
"what is elasticsearch for?",
new String[] { "for search", "for security", "for logs", "for email", "for rubber bands", "for kiwis" }
);
}
private String putCohereRerankEndpointWithTop2() throws IOException {
String endpointID = randomAlphaOfLength(10).toLowerCase();
putRequest("/_inference/rerank/" + endpointID, """
{
"service": "cohere",
"service_settings": {
"model_id": "rerank-english-v2.0",
"api_key": ""
},
"task_settings": {
"top_n": 2
}
}
""");
return endpointID;
}
public void postCohereRerankEndpoint(String endpoint, String query, String[] input) throws IOException {
StringBuilder body = new StringBuilder();
// Start the JSON object
body.append("{");
// Add the query to the JSON object
body.append("\"query\":\"").append(query).append("\",");
// Start the input array
body.append("\"input\":[");
// Add each element of the input array to the JSON array
for (int i = 0; i < input.length; i++) {
body.append("\"").append(input[i]).append("\"");
if (i < input.length - 1) {
body.append(",");
}
}
// End the input array and the JSON object
body.append("]}");
postRequest("/_inference/rerank/" + endpoint, body.toString());
}
}
| RerankingIT |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/KubernetesNamespacesEndpointBuilderFactory.java | {
"start": 1639,
"end": 16244
} | interface ____
extends
EndpointConsumerBuilder {
default AdvancedKubernetesNamespacesEndpointConsumerBuilder advanced() {
return (AdvancedKubernetesNamespacesEndpointConsumerBuilder) this;
}
/**
* The Kubernetes API Version to use.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param apiVersion the value to set
* @return the dsl builder
*/
default KubernetesNamespacesEndpointConsumerBuilder apiVersion(String apiVersion) {
doSetProperty("apiVersion", apiVersion);
return this;
}
/**
* The dns domain, used for ServiceCall EIP.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param dnsDomain the value to set
* @return the dsl builder
*/
default KubernetesNamespacesEndpointConsumerBuilder dnsDomain(String dnsDomain) {
doSetProperty("dnsDomain", dnsDomain);
return this;
}
/**
* Default KubernetesClient to use if provided.
*
* The option is a:
* <code>io.fabric8.kubernetes.client.KubernetesClient</code> type.
*
* Group: common
*
* @param kubernetesClient the value to set
* @return the dsl builder
*/
default KubernetesNamespacesEndpointConsumerBuilder kubernetesClient(io.fabric8.kubernetes.client.KubernetesClient kubernetesClient) {
doSetProperty("kubernetesClient", kubernetesClient);
return this;
}
/**
* Default KubernetesClient to use if provided.
*
* The option will be converted to a
* <code>io.fabric8.kubernetes.client.KubernetesClient</code> type.
*
* Group: common
*
* @param kubernetesClient the value to set
* @return the dsl builder
*/
default KubernetesNamespacesEndpointConsumerBuilder kubernetesClient(String kubernetesClient) {
doSetProperty("kubernetesClient", kubernetesClient);
return this;
}
/**
* The namespace.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param namespace the value to set
* @return the dsl builder
*/
default KubernetesNamespacesEndpointConsumerBuilder namespace(String namespace) {
doSetProperty("namespace", namespace);
return this;
}
/**
* The port name, used for ServiceCall EIP.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param portName the value to set
* @return the dsl builder
*/
default KubernetesNamespacesEndpointConsumerBuilder portName(String portName) {
doSetProperty("portName", portName);
return this;
}
/**
* The port protocol, used for ServiceCall EIP.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: tcp
* Group: common
*
* @param portProtocol the value to set
* @return the dsl builder
*/
default KubernetesNamespacesEndpointConsumerBuilder portProtocol(String portProtocol) {
doSetProperty("portProtocol", portProtocol);
return this;
}
/**
* The Consumer CRD Resource Group we would like to watch.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*
* @param crdGroup the value to set
* @return the dsl builder
*/
default KubernetesNamespacesEndpointConsumerBuilder crdGroup(String crdGroup) {
doSetProperty("crdGroup", crdGroup);
return this;
}
/**
* The Consumer CRD Resource name we would like to watch.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*
* @param crdName the value to set
* @return the dsl builder
*/
default KubernetesNamespacesEndpointConsumerBuilder crdName(String crdName) {
doSetProperty("crdName", crdName);
return this;
}
/**
* The Consumer CRD Resource Plural we would like to watch.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*
* @param crdPlural the value to set
* @return the dsl builder
*/
default KubernetesNamespacesEndpointConsumerBuilder crdPlural(String crdPlural) {
doSetProperty("crdPlural", crdPlural);
return this;
}
/**
* The Consumer CRD Resource Scope we would like to watch.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*
* @param crdScope the value to set
* @return the dsl builder
*/
default KubernetesNamespacesEndpointConsumerBuilder crdScope(String crdScope) {
doSetProperty("crdScope", crdScope);
return this;
}
/**
* The Consumer CRD Resource Version we would like to watch.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*
* @param crdVersion the value to set
* @return the dsl builder
*/
default KubernetesNamespacesEndpointConsumerBuilder crdVersion(String crdVersion) {
doSetProperty("crdVersion", crdVersion);
return this;
}
/**
* The Consumer Label key when watching at some resources.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*
* @param labelKey the value to set
* @return the dsl builder
*/
default KubernetesNamespacesEndpointConsumerBuilder labelKey(String labelKey) {
doSetProperty("labelKey", labelKey);
return this;
}
/**
* The Consumer Label value when watching at some resources.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*
* @param labelValue the value to set
* @return the dsl builder
*/
default KubernetesNamespacesEndpointConsumerBuilder labelValue(String labelValue) {
doSetProperty("labelValue", labelValue);
return this;
}
/**
* The Consumer pool size.
*
* The option is a: <code>int</code> type.
*
* Default: 1
* Group: consumer
*
* @param poolSize the value to set
* @return the dsl builder
*/
default KubernetesNamespacesEndpointConsumerBuilder poolSize(int poolSize) {
doSetProperty("poolSize", poolSize);
return this;
}
/**
* The Consumer pool size.
*
* The option will be converted to a <code>int</code> type.
*
* Default: 1
* Group: consumer
*
* @param poolSize the value to set
* @return the dsl builder
*/
default KubernetesNamespacesEndpointConsumerBuilder poolSize(String poolSize) {
doSetProperty("poolSize", poolSize);
return this;
}
/**
* The Consumer Resource Name we would like to watch.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*
* @param resourceName the value to set
* @return the dsl builder
*/
default KubernetesNamespacesEndpointConsumerBuilder resourceName(String resourceName) {
doSetProperty("resourceName", resourceName);
return this;
}
/**
* The CA Cert Data.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param caCertData the value to set
* @return the dsl builder
*/
default KubernetesNamespacesEndpointConsumerBuilder caCertData(String caCertData) {
doSetProperty("caCertData", caCertData);
return this;
}
/**
* The CA Cert File.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param caCertFile the value to set
* @return the dsl builder
*/
default KubernetesNamespacesEndpointConsumerBuilder caCertFile(String caCertFile) {
doSetProperty("caCertFile", caCertFile);
return this;
}
/**
* The Client Cert Data.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param clientCertData the value to set
* @return the dsl builder
*/
default KubernetesNamespacesEndpointConsumerBuilder clientCertData(String clientCertData) {
doSetProperty("clientCertData", clientCertData);
return this;
}
/**
* The Client Cert File.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param clientCertFile the value to set
* @return the dsl builder
*/
default KubernetesNamespacesEndpointConsumerBuilder clientCertFile(String clientCertFile) {
doSetProperty("clientCertFile", clientCertFile);
return this;
}
/**
* The Key Algorithm used by the client.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param clientKeyAlgo the value to set
* @return the dsl builder
*/
default KubernetesNamespacesEndpointConsumerBuilder clientKeyAlgo(String clientKeyAlgo) {
doSetProperty("clientKeyAlgo", clientKeyAlgo);
return this;
}
/**
* The Client Key data.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param clientKeyData the value to set
* @return the dsl builder
*/
default KubernetesNamespacesEndpointConsumerBuilder clientKeyData(String clientKeyData) {
doSetProperty("clientKeyData", clientKeyData);
return this;
}
/**
* The Client Key file.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param clientKeyFile the value to set
* @return the dsl builder
*/
default KubernetesNamespacesEndpointConsumerBuilder clientKeyFile(String clientKeyFile) {
doSetProperty("clientKeyFile", clientKeyFile);
return this;
}
/**
* The Client Key Passphrase.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param clientKeyPassphrase the value to set
* @return the dsl builder
*/
default KubernetesNamespacesEndpointConsumerBuilder clientKeyPassphrase(String clientKeyPassphrase) {
doSetProperty("clientKeyPassphrase", clientKeyPassphrase);
return this;
}
/**
* The Auth Token.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param oauthToken the value to set
* @return the dsl builder
*/
default KubernetesNamespacesEndpointConsumerBuilder oauthToken(String oauthToken) {
doSetProperty("oauthToken", oauthToken);
return this;
}
/**
* Password to connect to Kubernetes.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param password the value to set
* @return the dsl builder
*/
default KubernetesNamespacesEndpointConsumerBuilder password(String password) {
doSetProperty("password", password);
return this;
}
/**
* Define if the certs we used are trusted anyway or not.
*
* The option is a: <code>java.lang.Boolean</code> type.
*
* Default: false
* Group: security
*
* @param trustCerts the value to set
* @return the dsl builder
*/
default KubernetesNamespacesEndpointConsumerBuilder trustCerts(Boolean trustCerts) {
doSetProperty("trustCerts", trustCerts);
return this;
}
/**
* Define if the certs we used are trusted anyway or not.
*
* The option will be converted to a <code>java.lang.Boolean</code>
* type.
*
* Default: false
* Group: security
*
* @param trustCerts the value to set
* @return the dsl builder
*/
default KubernetesNamespacesEndpointConsumerBuilder trustCerts(String trustCerts) {
doSetProperty("trustCerts", trustCerts);
return this;
}
/**
* Username to connect to Kubernetes.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param username the value to set
* @return the dsl builder
*/
default KubernetesNamespacesEndpointConsumerBuilder username(String username) {
doSetProperty("username", username);
return this;
}
}
/**
* Advanced builder for endpoint consumers for the Kubernetes Namespaces component.
*/
public | KubernetesNamespacesEndpointConsumerBuilder |
java | quarkusio__quarkus | extensions/smallrye-openapi/deployment/src/test/java/io/quarkus/smallrye/openapi/test/jaxrs/MyBuildTimeFilterPrio0.java | {
"start": 496,
"end": 978
} | class ____ implements OASFilter {
private IndexView view;
public MyBuildTimeFilterPrio0(IndexView aView) {
this.view = aView;
}
@Override
public void filterOpenAPI(OpenAPI aOpenAPI) {
String currentDesc = Optional
.ofNullable(aOpenAPI.getInfo())
.map(Info::getDescription)
.orElse("");
aOpenAPI.setInfo(OASFactory.createInfo().description(currentDesc + "0"));
}
}
| MyBuildTimeFilterPrio0 |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvtVO/DataTransaction.java | {
"start": 251,
"end": 563
} | class ____ implements Serializable {
private static final long serialVersionUID = 1L;
private Head head = new Head();
private Body body = new Body();
public DataTransaction(){
}
/**
* Head
**/
public static | DataTransaction |
java | apache__flink | flink-core/src/test/java/org/apache/flink/types/parser/IntValueParserTest.java | {
"start": 888,
"end": 2346
} | class ____ extends ParserTestBase<IntValue> {
@Override
public String[] getValidTestValues() {
return new String[] {
"0",
"1",
"576",
"-877678",
String.valueOf(Integer.MAX_VALUE),
String.valueOf(Integer.MIN_VALUE),
"1239"
};
}
@Override
public IntValue[] getValidTestResults() {
return new IntValue[] {
new IntValue(0),
new IntValue(1),
new IntValue(576),
new IntValue(-877678),
new IntValue(Integer.MAX_VALUE),
new IntValue(Integer.MIN_VALUE),
new IntValue(1239)
};
}
@Override
public String[] getInvalidTestValues() {
return new String[] {
"a",
"1569a86",
"-57-6",
"7-877678",
Integer.MAX_VALUE + "0",
String.valueOf(Long.MIN_VALUE),
String.valueOf(((long) Integer.MAX_VALUE) + 1),
String.valueOf(((long) Integer.MIN_VALUE) - 1),
" 1",
"2 ",
" ",
"\t"
};
}
@Override
public boolean allowsEmptyField() {
return false;
}
@Override
public FieldParser<IntValue> getParser() {
return new IntValueParser();
}
@Override
public Class<IntValue> getTypeClass() {
return IntValue.class;
}
}
| IntValueParserTest |
java | bumptech__glide | library/test/src/test/java/com/bumptech/glide/RegistryTest.java | {
"start": 5830,
"end": 5911
} | class ____ represent resource classes for readability.
}
private static final | to |
java | apache__camel | tooling/maven/camel-package-maven-plugin/src/main/java/org/apache/camel/maven/packaging/generics/OwbTypeVariableImpl.java | {
"start": 1112,
"end": 2150
} | class ____ {
private static final Class<?>[] TYPE_VARIABLE_TYPES = new Class<?>[] { TypeVariable.class };
/**
* Java TypeVariable is different in various JDK versions. Thus it is not possible to e.g. write a custom
* TypeVariable which works in either Java7 and Java8 as they introduced new methods in Java8 which have return
* generics which only exist in Java8 :( As workaround we dynamically crate a proxy to wrap this and do the
* delegation manually. This is of course slower, but as we do not use it often it might not have much impact.
*
* @param typeVariable
* @param bounds
* @return the typeVariable with the defined bounds.
*/
public static TypeVariable createTypeVariable(TypeVariable typeVariable, Type... bounds) {
return (TypeVariable) Proxy.newProxyInstance(OwbTypeVariableImpl.class.getClassLoader(), TYPE_VARIABLE_TYPES,
new OwbTypeVariableInvocationHandler(typeVariable, bounds));
}
public static | OwbTypeVariableImpl |
java | google__guice | core/src/com/google/inject/spi/InjectionPoint.java | {
"start": 9648,
"end": 10581
} | class ____ to {@code type}.
*
* @param type a concrete type with exactly one constructor annotated {@literal @}{@link Inject},
* or a no-arguments constructor that is not private.
* @throws ConfigurationException if there is no injectable constructor, more than one injectable
* constructor, or if parameters of the injectable constructor are malformed, such as a
* parameter with multiple binding annotations.
*/
public static InjectionPoint forConstructorOf(TypeLiteral<?> type) {
return forConstructorOf(type, false);
}
/**
* Returns a new injection point for the injectable constructor of {@code type}.
*
* <p>If {@code atInjectRequired} is true, the constructor must be annotated with {@code @Inject}.
* If {@code atInjectRequired} is false, either a {@code @Inject} annotated constructor or a
* non-private no arg constructor is required to be defined by the | corresponding |
java | resilience4j__resilience4j | resilience4j-feign/src/main/java/io/github/resilience4j/feign/Resilience4jFeign.java | {
"start": 2605,
"end": 3039
} | class ____ implements feign.Capability {
private final FeignDecorator feignDecorator;
public Capability(FeignDecorator feignDecorator) {
this.feignDecorator = feignDecorator;
}
@Override
public feign.InvocationHandlerFactory enrich(feign.InvocationHandlerFactory invocationHandlerFactory) {
return new InvocationHandlerFactory(feignDecorator);
}
}
}
| Capability |
java | apache__dubbo | dubbo-common/src/main/java/org/apache/dubbo/common/utils/ClassUtils.java | {
"start": 2053,
"end": 3203
} | class ____: "[]"
*/
public static final String ARRAY_SUFFIX = "[]";
/**
* Simple Types including:
* <ul>
* <li>{@link Void}</li>
* <li>{@link Boolean}</li>
* <li>{@link Character}</li>
* <li>{@link Byte}</li>
* <li>{@link Integer}</li>
* <li>{@link Float}</li>
* <li>{@link Double}</li>
* <li>{@link String}</li>
* <li>{@link BigDecimal}</li>
* <li>{@link BigInteger}</li>
* <li>{@link Date}</li>
* <li>{@link Object}</li>
* </ul>
*
* @see javax.management.openmbean.SimpleType
* @since 2.7.6
*/
public static final Set<Class<?>> SIMPLE_TYPES = ofSet(
Void.class,
Boolean.class,
Character.class,
Byte.class,
Short.class,
Integer.class,
Long.class,
Float.class,
Double.class,
String.class,
BigDecimal.class,
BigInteger.class,
Date.class,
Object.class,
Duration.class);
/**
* Prefix for internal array | names |
java | google__guava | android/guava-testlib/src/com/google/common/collect/testing/google/SortedMapGenerators.java | {
"start": 4323,
"end": 4755
} | class ____ extends TestStringListGenerator {
@Override
protected List<String> create(String[] elements) {
ImmutableSortedMap.Builder<Integer, String> builder = ImmutableSortedMap.naturalOrder();
for (int i = 0; i < elements.length; i++) {
builder.put(i, elements[i]);
}
return builder.build().values().asList();
}
}
/**
* Useless constructor for a | ImmutableSortedMapValueListGenerator |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/optionallong/OptionalLongAssert_isNotEmpty_Test.java | {
"start": 1005,
"end": 1716
} | class ____ {
@Test
void should_pass_when_OptionalLong_is_present() {
assertThat(OptionalLong.of(10L)).isNotEmpty();
}
@Test
void should_fail_when_OptionalLong_is_empty() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> assertThat(OptionalLong.empty()).isNotEmpty())
.withMessage(shouldBePresent(OptionalLong.empty()).create());
}
@Test
void should_fail_when_OptionalLong_is_null() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> assertThat((OptionalLong) null).isNotEmpty())
.withMessage(actualIsNull());
}
}
| OptionalLongAssert_isNotEmpty_Test |
java | apache__camel | core/camel-api/src/main/java/org/apache/camel/spi/CompileStrategy.java | {
"start": 902,
"end": 1223
} | interface ____ {
/**
* Work directory for compiler. Can be used to write compiled classes or other resources.
*/
String getWorkDir();
/**
* Work directory for compiler. Can be used to write compiled classes or other resources.
*/
void setWorkDir(String compileWorkDir);
}
| CompileStrategy |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/loader/ast/internal/SingleUniqueKeyEntityLoaderStandard.java | {
"start": 6576,
"end": 7675
} | class ____ extends BaseExecutionContext {
private final String uniqueKeyAttributePath;
private final Object uniqueKey;
private final Callback callback;
private final QueryOptions queryOptions;
public SingleUKEntityLoaderExecutionContext(
String uniqueKeyAttributePath,
Object uniqueKey,
SharedSessionContractImplementor session,
Boolean readOnly) {
super( session );
this.uniqueKeyAttributePath = uniqueKeyAttributePath;
this.uniqueKey = uniqueKey;
if ( readOnly == null ) { //Careful, readOnly is possibly null
queryOptions = QueryOptions.NONE;
}
else {
queryOptions = readOnly ? QueryOptions.READ_ONLY : QueryOptions.READ_WRITE;
}
callback = new CallbackImpl();
}
@Override
public QueryOptions getQueryOptions() {
return queryOptions;
}
@Override
public Callback getCallback() {
return callback;
}
@Override
public String getEntityUniqueKeyAttributePath() {
return uniqueKeyAttributePath;
}
@Override
public Object getEntityUniqueKey() {
return uniqueKey;
}
}
}
| SingleUKEntityLoaderExecutionContext |
java | apache__camel | dsl/camel-java-joor-dsl/src/main/java/org/apache/camel/dsl/java/joor/Helper.java | {
"start": 1102,
"end": 1185
} | class ____ to reuse part of the code outside of Camel easily.
*/
public final | allowing |
java | spring-projects__spring-boot | module/spring-boot-rsocket/src/main/java/org/springframework/boot/rsocket/netty/NettyRSocketServerFactory.java | {
"start": 2711,
"end": 8299
} | class ____ implements RSocketServerFactory, ConfigurableRSocketServerFactory {
private int port = 9898;
private @Nullable DataSize fragmentSize;
private @Nullable InetAddress address;
private RSocketServer.Transport transport = RSocketServer.Transport.TCP;
private @Nullable ReactorResourceFactory resourceFactory;
private @Nullable Duration lifecycleTimeout;
private List<RSocketServerCustomizer> rSocketServerCustomizers = new ArrayList<>();
private @Nullable Ssl ssl;
private @Nullable SslBundles sslBundles;
@Override
public void setPort(int port) {
this.port = port;
}
@Override
public void setFragmentSize(@Nullable DataSize fragmentSize) {
this.fragmentSize = fragmentSize;
}
@Override
public void setAddress(@Nullable InetAddress address) {
this.address = address;
}
@Override
public void setTransport(RSocketServer.Transport transport) {
this.transport = transport;
}
@Override
public void setSsl(@Nullable Ssl ssl) {
this.ssl = ssl;
}
@Override
public void setSslBundles(@Nullable SslBundles sslBundles) {
this.sslBundles = sslBundles;
}
/**
* Set the {@link ReactorResourceFactory} to get the shared resources from.
* @param resourceFactory the server resources
*/
public void setResourceFactory(@Nullable ReactorResourceFactory resourceFactory) {
this.resourceFactory = resourceFactory;
}
/**
* Set {@link RSocketServerCustomizer}s that should be called to configure the
* {@link io.rsocket.core.RSocketServer} while building the server. Calling this
* method will replace any existing customizers.
* @param rSocketServerCustomizers customizers to apply before the server starts
* @since 2.2.7
*/
public void setRSocketServerCustomizers(Collection<? extends RSocketServerCustomizer> rSocketServerCustomizers) {
Assert.notNull(rSocketServerCustomizers, "'rSocketServerCustomizers' must not be null");
this.rSocketServerCustomizers = new ArrayList<>(rSocketServerCustomizers);
}
/**
* Add {@link RSocketServerCustomizer}s that should be called to configure the
* {@link io.rsocket.core.RSocketServer}.
* @param rSocketServerCustomizers customizers to apply before the server starts
* @since 2.2.7
*/
public void addRSocketServerCustomizers(RSocketServerCustomizer... rSocketServerCustomizers) {
Assert.notNull(rSocketServerCustomizers, "'rSocketServerCustomizers' must not be null");
this.rSocketServerCustomizers.addAll(Arrays.asList(rSocketServerCustomizers));
}
/**
* Set the maximum amount of time that should be waited when starting or stopping the
* server.
* @param lifecycleTimeout the lifecycle timeout
*/
public void setLifecycleTimeout(Duration lifecycleTimeout) {
this.lifecycleTimeout = lifecycleTimeout;
}
@Override
public NettyRSocketServer create(SocketAcceptor socketAcceptor) {
ServerTransport<CloseableChannel> transport = createTransport();
io.rsocket.core.RSocketServer server = io.rsocket.core.RSocketServer.create(socketAcceptor);
configureServer(server);
Mono<CloseableChannel> starter = server.bind(transport);
return new NettyRSocketServer(starter, this.lifecycleTimeout);
}
private void configureServer(io.rsocket.core.RSocketServer server) {
PropertyMapper map = PropertyMapper.get();
map.from(this.fragmentSize).asInt(DataSize::toBytes).to(server::fragment);
this.rSocketServerCustomizers.forEach((customizer) -> customizer.customize(server));
}
private ServerTransport<CloseableChannel> createTransport() {
if (this.transport == RSocketServer.Transport.WEBSOCKET) {
return createWebSocketTransport();
}
return createTcpTransport();
}
private ServerTransport<CloseableChannel> createWebSocketTransport() {
HttpServer httpServer = HttpServer.create();
if (this.resourceFactory != null) {
httpServer = httpServer.runOn(this.resourceFactory.getLoopResources());
}
if (Ssl.isEnabled(this.ssl)) {
httpServer = customizeSslConfiguration(httpServer, this.ssl);
}
return WebsocketServerTransport.create(httpServer.bindAddress(this::getListenAddress));
}
private HttpServer customizeSslConfiguration(HttpServer httpServer, Ssl ssl) {
return new HttpServerSslCustomizer(ssl.getClientAuth(), getSslBundle(), getServerNameSslBundles())
.apply(httpServer);
}
private ServerTransport<CloseableChannel> createTcpTransport() {
TcpServer tcpServer = TcpServer.create();
if (this.resourceFactory != null) {
tcpServer = tcpServer.runOn(this.resourceFactory.getLoopResources());
}
if (Ssl.isEnabled(this.ssl)) {
tcpServer = new TcpServerSslCustomizer(this.ssl.getClientAuth(), getSslBundle(), getServerNameSslBundles())
.apply(tcpServer);
}
return TcpServerTransport.create(tcpServer.bindAddress(this::getListenAddress));
}
private SslBundle getSslBundle() {
return WebServerSslBundle.get(this.ssl, this.sslBundles);
}
protected final Map<String, SslBundle> getServerNameSslBundles() {
Assert.state(this.ssl != null, "'ssl' must not be null");
return this.ssl.getServerNameBundles()
.stream()
.collect(Collectors.toMap(Ssl.ServerNameSslBundle::serverName, this::getBundle));
}
private SslBundle getBundle(ServerNameSslBundle serverNameSslBundle) {
Assert.state(this.sslBundles != null, "'sslBundles' must not be null");
return this.sslBundles.getBundle(serverNameSslBundle.bundle());
}
private InetSocketAddress getListenAddress() {
if (this.address != null) {
return new InetSocketAddress(this.address.getHostAddress(), this.port);
}
return new InetSocketAddress(this.port);
}
private abstract static | NettyRSocketServerFactory |
java | elastic__elasticsearch | x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/TransportPutWatchAction.java | {
"start": 2885,
"end": 7976
} | class ____ extends WatcherTransportAction<PutWatchRequest, PutWatchResponse> {
private final ThreadPool threadPool;
private final Clock clock;
private final WatchParser parser;
private final Client client;
private final ClusterService clusterService;
private static final ToXContent.Params DEFAULT_PARAMS = WatcherParams.builder()
.hideSecrets(false)
.hideHeaders(false)
.includeStatus(true)
.build();
@Inject
public TransportPutWatchAction(
TransportService transportService,
ThreadPool threadPool,
ActionFilters actionFilters,
ClockHolder clockHolder,
XPackLicenseState licenseState,
WatchParser parser,
Client client,
ClusterService clusterService
) {
super(PutWatchAction.NAME, transportService, actionFilters, licenseState, PutWatchRequest::new);
this.threadPool = threadPool;
this.clock = clockHolder.clock;
this.parser = parser;
this.client = client;
this.clusterService = clusterService;
}
@Override
protected void doExecute(PutWatchRequest request, ActionListener<PutWatchResponse> listener) {
try {
ZonedDateTime now = clock.instant().atZone(ZoneOffset.UTC);
boolean isUpdate = request.getVersion() > 0 || request.getIfSeqNo() != SequenceNumbers.UNASSIGNED_SEQ_NO;
Watch watch = parser.parseWithSecrets(
request.getId(),
false,
request.getSource(),
now,
request.xContentType(),
isUpdate,
request.getIfSeqNo(),
request.getIfPrimaryTerm()
);
watch.setState(request.isActive(), now);
// ensure we only filter for the allowed headers
Map<String, String> filteredHeaders = ClientHelper.getPersistableSafeSecurityHeaders(
threadPool.getThreadContext(),
clusterService.state()
);
watch.status().setHeaders(filteredHeaders);
try (XContentBuilder builder = jsonBuilder()) {
watch.toXContent(builder, DEFAULT_PARAMS);
if (isUpdate) {
UpdateRequest updateRequest = new UpdateRequest(Watch.INDEX, request.getId());
if (request.getIfSeqNo() != SequenceNumbers.UNASSIGNED_SEQ_NO) {
updateRequest.setIfSeqNo(request.getIfSeqNo());
updateRequest.setIfPrimaryTerm(request.getIfPrimaryTerm());
} else {
updateRequest.version(request.getVersion());
}
updateRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
updateRequest.doc(builder);
executeAsyncWithOrigin(
client.threadPool().getThreadContext(),
WATCHER_ORIGIN,
updateRequest,
ActionListener.<UpdateResponse>wrap(response -> {
boolean created = response.getResult() == DocWriteResponse.Result.CREATED;
listener.onResponse(
new PutWatchResponse(
response.getId(),
response.getVersion(),
response.getSeqNo(),
response.getPrimaryTerm(),
created
)
);
}, listener::onFailure),
client::update
);
} else {
IndexRequest indexRequest = new IndexRequest(Watch.INDEX).id(request.getId());
indexRequest.source(builder);
indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
executeAsyncWithOrigin(
client.threadPool().getThreadContext(),
WATCHER_ORIGIN,
indexRequest,
ActionListener.<DocWriteResponse>wrap(response -> {
boolean created = response.getResult() == DocWriteResponse.Result.CREATED;
listener.onResponse(
new PutWatchResponse(
response.getId(),
response.getVersion(),
response.getSeqNo(),
response.getPrimaryTerm(),
created
)
);
}, listener::onFailure),
client::index
);
}
}
} catch (Exception e) {
listener.onFailure(e);
}
}
}
| TransportPutWatchAction |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/issue_1200/Issue1299.java | {
"start": 265,
"end": 1127
} | class ____ extends TestCase {
public void test_for_issue() throws Exception {
String jsonStr = "{\"code\":201,\"data\":{\"materials\":[{\"material\":\"locale\",\"success\":true,"
+ "\"material_id\":356,\"id\":\"5099\"}],\"unitInfo\":{\"languages\":[\"'en_US'\",\"ru_RU\"],"
+ "\"unitName\":\"PC_ROCKBROS\",\"sceneKey\":\"shop_activity_page\",\"domain\":\"shopcdp.aliexpress"
+ ".com\",\"format\":\"HTML\",\"unitId\":\"1625\",\"id\":1761,\"rootPath\":\"shopcdp\","
+ "\"userId\":\"jianqing.zengjq\",\"platforms\":[\"pc\",\"mobile\"],\"status\":2}},\"success\":true}";
UnitsSaveResponse response = JSON.parseObject(jsonStr, UnitsSaveResponse.class);
Class<?> dataClass = response.getData().getClass();
System.out.println(dataClass);
}
public static | Issue1299 |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/basic/ImplicitBasicTypeTest.java | {
"start": 459,
"end": 778
} | class ____ {
@Test
public void test(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
Product product = new Product();
product.id = 1;
entityManager.persist(product);
});
}
//tag::basic-annotation-implicit-example[]
@Entity(name = "Product")
public static | ImplicitBasicTypeTest |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/GetRepositoriesResponse.java | {
"start": 1102,
"end": 2197
} | class ____ extends ActionResponse implements ToXContentObject {
private final RepositoriesMetadata repositories;
GetRepositoriesResponse(RepositoriesMetadata repositories) {
this.repositories = repositories;
}
public GetRepositoriesResponse(StreamInput in) throws IOException {
repositories = new RepositoriesMetadata(in);
}
/**
* List of repositories to return
*
* @return list or repositories
*/
public List<RepositoryMetadata> repositories() {
return repositories.repositories();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
repositories.writeTo(out);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
ChunkedToXContent.wrapAsToXContent(repositories)
.toXContent(builder, new DelegatingMapParams(Map.of(RepositoriesMetadata.HIDE_GENERATIONS_PARAM, "true"), params));
builder.endObject();
return builder;
}
}
| GetRepositoriesResponse |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/http/HttpHeadersConfigTests.java | {
"start": 37152,
"end": 37260
} | class ____ {
@GetMapping("/")
public String ok() {
return "ok";
}
}
public static | SimpleController |
java | apache__camel | components/camel-spring-parent/camel-spring-xml/src/main/java/org/apache/camel/spring/xml/PackageScanRouteBuilderFinder.java | {
"start": 4013,
"end": 4544
} | class ____ a public, non-abstract class
*/
protected boolean isValidClass(Class<?> type) {
// should skip non public classes
if (!Modifier.isPublic(type.getModifiers())) {
return false;
}
if (!Modifier.isAbstract(type.getModifiers()) && !type.isInterface()) {
return true;
}
return false;
}
protected RoutesBuilder instantiateBuilder(Class<? extends RoutesBuilder> type) {
return camelContext.getInjector().newInstance(type);
}
}
| is |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/hql/MapJoinTests.java | {
"start": 572,
"end": 2450
} | class ____ {
@Test void test(EntityManagerFactoryScope scope) {
scope.getEntityManagerFactory().getSchemaManager().truncate();
scope.inTransaction( s -> {
Book book = new Book();
book.isbn = "978-1932394153";
book.title = "Hibernate in Action";
book.translations = Map.of( "en", book );
s.persist( book );
var t =
s.createQuery( "select t from Book b join b.translations t", Book.class )
.getSingleResult();
assertEquals(book, t);
var title =
s.createQuery( "select t.title from Book b join b.translations t", String.class )
.getSingleResult();
assertEquals("Hibernate in Action", title);
} );
}
@Test void testValue(EntityManagerFactoryScope scope) {
scope.getEntityManagerFactory().getSchemaManager().truncate();
scope.inTransaction( s -> {
Book book = new Book();
book.isbn = "978-1932394153";
book.title = "Hibernate in Action";
book.translations = Map.of( "en", book );
s.persist( book );
var b =
s.createQuery( "select t from Book b join value(b.translations) t", Book.class )
.getSingleResult();
assertEquals(book, b);
var title =
s.createQuery( "select t.title from Book b join value(b.translations) t", String.class )
.getSingleResult();
assertEquals("Hibernate in Action", title);
} );
}
@FailureExpected(jiraKey = "HHH-19759")
@Test void testKey(EntityManagerFactoryScope scope) {
scope.getEntityManagerFactory().getSchemaManager().truncate();
scope.inTransaction( s -> {
Book book = new Book();
book.isbn = "978-1932394153";
book.title = "Hibernate in Action";
book.translations = Map.of( "en", book );
s.persist( book );
var lang =
s.createQuery( "select l from Book b join key(b.translations) l", String.class )
.getSingleResult();
assertEquals("en", lang);
} );
}
@Entity(name="Book")
static | MapJoinTests |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/enums/EnumDefaultRead4403Test.java | {
"start": 548,
"end": 607
} | class ____
{
// [databind#4403]
| EnumDefaultRead4403Test |
java | resilience4j__resilience4j | resilience4j-core/src/test/java/io/github/resilience4j/core/ClassUtilsTest.java | {
"start": 2304,
"end": 2478
} | class ____ implements BiConsumer<Integer, String>{
@Override
public void accept(Integer integer, String s) {
}
}
public static | PublicBiConsumer |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/timeline/TimelineUtils.java | {
"start": 1877,
"end": 8602
} | class ____ {
public static final String FLOW_NAME_TAG_PREFIX = "TIMELINE_FLOW_NAME_TAG";
public static final String FLOW_VERSION_TAG_PREFIX =
"TIMELINE_FLOW_VERSION_TAG";
public static final String FLOW_RUN_ID_TAG_PREFIX =
"TIMELINE_FLOW_RUN_ID_TAG";
public final static String DEFAULT_FLOW_VERSION = "1";
private static ObjectMapper mapper;
static {
mapper = new ObjectMapper();
YarnJacksonJaxbJsonProvider.configObjectMapper(mapper);
}
/**
* Serialize a POJO object into a JSON string not in a pretty format
*
* @param o
* an object to serialize
* @return a JSON string
* @throws IOException io error occur.
* @throws JsonMappingException exception used to signal fatal problems with mapping of content.
* @throws JsonGenerationException exception type for exceptions during JSON writing.
*/
public static String dumpTimelineRecordtoJSON(Object o)
throws JsonGenerationException, JsonMappingException, IOException {
return dumpTimelineRecordtoJSON(o, false);
}
/**
* Serialize a POJO object into a JSON string
*
* @param o
* an object to serialize
* @param pretty
* whether in a pretty format or not
* @return a JSON string
* @throws IOException io error occur.
* @throws JsonMappingException exception used to signal fatal problems with mapping of content.
* @throws JsonGenerationException exception type for exceptions during JSON writing.
*/
public static String dumpTimelineRecordtoJSON(Object o, boolean pretty)
throws JsonGenerationException, JsonMappingException, IOException {
if (pretty) {
return mapper.writerWithDefaultPrettyPrinter().writeValueAsString(o);
} else {
return mapper.writeValueAsString(o);
}
}
/**
* Returns whether the timeline service is enabled via configuration.
*
* @param conf the configuration
* @return whether the timeline service is enabled.
*/
public static boolean timelineServiceEnabled(Configuration conf) {
return conf.getBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED,
YarnConfiguration.DEFAULT_TIMELINE_SERVICE_ENABLED);
}
/**
* Returns the timeline service version. It does not check whether the
* timeline service itself is enabled.
*
* @param conf the configuration
* @return the timeline service version as a float.
*/
public static float getTimelineServiceVersion(Configuration conf) {
return conf.getFloat(YarnConfiguration.TIMELINE_SERVICE_VERSION,
YarnConfiguration.DEFAULT_TIMELINE_SERVICE_VERSION);
}
/**
* Returns whether the timeline service v.1.5 is enabled by default via
* configuration.
*
* @param conf the configuration
* @return whether the timeline service v.1.5 is enabled. V.1.5 refers to a
* version equal to 1.5.
*/
public static boolean timelineServiceV1_5Enabled(Configuration conf) {
return YarnConfiguration.timelineServiceV15Enabled(conf);
}
public static TimelineAbout createTimelineAbout(String about) {
TimelineAbout tsInfo = new TimelineAbout(about);
tsInfo.setHadoopBuildVersion(VersionInfo.getBuildVersion());
tsInfo.setHadoopVersion(VersionInfo.getVersion());
tsInfo.setHadoopVersionBuiltOn(VersionInfo.getDate());
tsInfo.setTimelineServiceBuildVersion(YarnVersionInfo.getBuildVersion());
tsInfo.setTimelineServiceVersion(YarnVersionInfo.getVersion());
tsInfo.setTimelineServiceVersionBuiltOn(YarnVersionInfo.getDate());
return tsInfo;
}
public static InetSocketAddress getTimelineTokenServiceAddress(
Configuration conf) {
InetSocketAddress timelineServiceAddr = null;
if (YarnConfiguration.useHttps(conf)) {
timelineServiceAddr = conf.getSocketAddr(
YarnConfiguration.TIMELINE_SERVICE_WEBAPP_HTTPS_ADDRESS,
YarnConfiguration.DEFAULT_TIMELINE_SERVICE_WEBAPP_HTTPS_ADDRESS,
YarnConfiguration.DEFAULT_TIMELINE_SERVICE_WEBAPP_HTTPS_PORT);
} else {
timelineServiceAddr = conf.getSocketAddr(
YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS,
YarnConfiguration.DEFAULT_TIMELINE_SERVICE_WEBAPP_ADDRESS,
YarnConfiguration.DEFAULT_TIMELINE_SERVICE_WEBAPP_PORT);
}
return timelineServiceAddr;
}
public static Text buildTimelineTokenService(Configuration conf) {
InetSocketAddress timelineServiceAddr =
getTimelineTokenServiceAddress(conf);
return SecurityUtil.buildTokenService(timelineServiceAddr);
}
public static String generateDefaultFlowName(String appName,
ApplicationId appId) {
return (appName != null &&
!appName.equals(YarnConfiguration.DEFAULT_APPLICATION_NAME)) ?
appName :
"flow_" + appId.getClusterTimestamp() + "_" + appId.getId();
}
/**
* Generate flow name tag.
*
* @param flowName flow name that identifies a distinct flow application which
* can be run repeatedly over time
* @return flow name tag.
*/
public static String generateFlowNameTag(String flowName) {
return FLOW_NAME_TAG_PREFIX + ":" + flowName;
}
/**
* Shortens the flow name for the configured size by removing UUID if present.
*
* @param flowName which has to be shortened
* @param conf to resize the flow name
* @return shortened flowName
*/
public static String shortenFlowName(String flowName, Configuration conf) {
if (flowName == null) {
return null;
}
// remove UUID inside flowname if present
flowName = removeUUID(flowName);
// resize flowname
int length = conf.getInt(YarnConfiguration.FLOW_NAME_MAX_SIZE,
YarnConfiguration.FLOW_NAME_DEFAULT_MAX_SIZE);
if (length <= 0) {
return flowName;
}
return StringUtils.substring(flowName, 0, length);
}
@VisibleForTesting
static String removeUUID(String flowName) {
flowName = StringUtils.replaceAll(flowName,
"-?([a-fA-F0-9]{8}-[a-fA-F0-9]{4}-" +
"[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}){1}", "");
return flowName;
}
/**
* Generate flow version tag.
*
* @param flowVersion flow version that keeps track of the changes made to the
* flow
* @return flow version tag.
*/
public static String generateFlowVersionTag(String flowVersion) {
return FLOW_VERSION_TAG_PREFIX + ":" + flowVersion;
}
/**
* Generate flow run ID tag.
*
* @param flowRunId flow run ID that identifies one instance (or specific
* execution) of that flow
* @return flow run id tag.
*/
public static String generateFlowRunIdTag(long flowRunId) {
return FLOW_RUN_ID_TAG_PREFIX + ":" + flowRunId;
}
}
| TimelineUtils |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/telemetry/metric/LongHistogram.java | {
"start": 653,
"end": 1377
} | interface ____ extends Instrument {
/**
* Record a sample for the measured item
* @param value
*/
void record(long value);
/**
* Record a sample for the measured item
* @param attributes key-value pairs to associate with the current sample
*/
void record(long value, Map<String, Object> attributes);
/**
* Noop histogram for tests
*/
LongHistogram NOOP = new LongHistogram() {
@Override
public String getName() {
return "noop";
}
@Override
public void record(long value) {
}
@Override
public void record(long value, Map<String, Object> attributes) {
}
};
}
| LongHistogram |
java | apache__camel | components/camel-smb/src/test/java/org/apache/camel/component/smb/FromSmbMoveFileToSubdirIT.java | {
"start": 1134,
"end": 2497
} | class ____ extends SmbServerTestSupport {
@Override
public void doPostSetup() throws Exception {
sendFile(getSmbUrl(), "Hello World this file will be moved", "hello.txt");
}
// create intermediate dirs, and rename file
protected String getSmbUrl() {
return String.format(
"smb:%s/%s/movefiletosubdir?username=%s&password=%s&move=greet/${file:name}.old",
service.address(), service.shareName(), service.userName(), service.password());
}
@Test
public void testMoveFileCreatingSubdir() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
mock.expectedBodiesReceived("Hello World this file will be moved");
mock.assertIsSatisfied();
// verify file moved in smb dir
await().atMost(6, TimeUnit.SECONDS)
.untilAsserted(() -> assertEquals("Hello World this file will be moved",
new String(copyFileContentFromContainer("/data/rw/movefiletosubdir/greet/hello.txt.old"))));
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from(getSmbUrl())
.to("mock:result");
}
};
}
}
| FromSmbMoveFileToSubdirIT |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/cluster/metadata/LifecycleExecutionState.java | {
"start": 13776,
"end": 18114
} | class ____ {
private String phase;
private String action;
private String step;
private String failedStep;
private String stepInfo;
private String previousStepInfo;
private String phaseDefinition;
private Long indexCreationDate;
private Long phaseTime;
private Long actionTime;
private Long stepTime;
private Boolean isAutoRetryableError;
private Integer failedStepRetryCount;
private String snapshotName;
private String snapshotRepository;
private String shrinkIndexName;
private String snapshotIndexName;
private String downsampleIndexName;
private String forceMergeCloneIndexName;
public Builder setPhase(String phase) {
this.phase = phase;
return this;
}
public Builder setAction(String action) {
this.action = action;
return this;
}
public Builder setStep(String step) {
this.step = step;
return this;
}
public Builder setFailedStep(String failedStep) {
this.failedStep = failedStep;
return this;
}
public Builder setStepInfo(String stepInfo) {
this.stepInfo = potentiallyTruncateLongJsonWithExplanation(stepInfo);
return this;
}
public Builder setPreviousStepInfo(String previousStepInfo) {
this.previousStepInfo = previousStepInfo;
return this;
}
public Builder setPhaseDefinition(String phaseDefinition) {
this.phaseDefinition = phaseDefinition;
return this;
}
public Builder setIndexCreationDate(Long indexCreationDate) {
this.indexCreationDate = indexCreationDate;
return this;
}
public Builder setPhaseTime(Long phaseTime) {
this.phaseTime = phaseTime;
return this;
}
public Builder setActionTime(Long actionTime) {
this.actionTime = actionTime;
return this;
}
public Builder setStepTime(Long stepTime) {
this.stepTime = stepTime;
return this;
}
public Builder setIsAutoRetryableError(Boolean isAutoRetryableError) {
this.isAutoRetryableError = isAutoRetryableError;
return this;
}
public Builder setFailedStepRetryCount(Integer failedStepRetryCount) {
this.failedStepRetryCount = failedStepRetryCount;
return this;
}
public Builder setSnapshotRepository(String snapshotRepository) {
this.snapshotRepository = snapshotRepository;
return this;
}
public Builder setSnapshotName(String snapshotName) {
this.snapshotName = snapshotName;
return this;
}
public Builder setShrinkIndexName(String shrinkIndexName) {
this.shrinkIndexName = shrinkIndexName;
return this;
}
public Builder setSnapshotIndexName(String snapshotIndexName) {
this.snapshotIndexName = snapshotIndexName;
return this;
}
public Builder setDownsampleIndexName(String downsampleIndexName) {
this.downsampleIndexName = downsampleIndexName;
return this;
}
public Builder setForceMergeCloneIndexName(String forceMergeCloneIndexName) {
this.forceMergeCloneIndexName = forceMergeCloneIndexName;
return this;
}
public LifecycleExecutionState build() {
return new LifecycleExecutionState(
phase,
action,
step,
failedStep,
isAutoRetryableError,
failedStepRetryCount,
stepInfo,
previousStepInfo,
phaseDefinition,
indexCreationDate,
phaseTime,
actionTime,
stepTime,
snapshotRepository,
snapshotName,
shrinkIndexName,
snapshotIndexName,
downsampleIndexName,
forceMergeCloneIndexName
);
}
}
}
| Builder |
java | alibaba__nacos | console/src/main/java/com/alibaba/nacos/console/filter/XssFilter.java | {
"start": 971,
"end": 1539
} | class ____ extends OncePerRequestFilter {
private static final String CONTENT_SECURITY_POLICY_HEADER = "Content-Security-Policy";
private static final String CONTENT_SECURITY_POLICY = "script-src 'self'";
@Override
protected void doFilterInternal(HttpServletRequest request, HttpServletResponse response, FilterChain filterChain)
throws ServletException, IOException {
response.setHeader(CONTENT_SECURITY_POLICY_HEADER, CONTENT_SECURITY_POLICY);
filterChain.doFilter(request, response);
}
}
| XssFilter |
java | spring-projects__spring-security | core/src/test/java/org/springframework/security/core/annotation/ExpressionTemplateSecurityAnnotationScannerTests.java | {
"start": 3130,
"end": 3308
} | interface ____ {
}
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target({ ElementType.TYPE, ElementType.METHOD })
@PreAuthorize("check({object})")
@ | HasAllCustomPermissions |
java | hibernate__hibernate-orm | local-build-plugins/src/main/java/org/hibernate/orm/post/IndexManager.java | {
"start": 1022,
"end": 5796
} | class ____ {
private final Configuration artifactsToProcess;
private final Provider<RegularFile> indexFileReferenceAccess;
private final Provider<RegularFile> packageFileReferenceAccess;
private final Project project;
private Index index;
private TreeSet<Inclusion> internalPackageNames;
public IndexManager(Configuration artifactsToProcess, Project project) {
this.artifactsToProcess = artifactsToProcess;
this.indexFileReferenceAccess = project.getLayout()
.getBuildDirectory()
.file( "orm/reports/indexing/jandex.idx" );
this.packageFileReferenceAccess = project.getLayout()
.getBuildDirectory()
.file( "orm/reports/indexing/internal-packages.txt" );
this.project = project;
}
public Configuration getArtifactsToProcess() {
return artifactsToProcess;
}
public Provider<RegularFile> getIndexFileReferenceAccess() {
return indexFileReferenceAccess;
}
public Provider<RegularFile> getPackageFileReferenceAccess() {
return packageFileReferenceAccess;
}
public TreeSet<Inclusion> getInternalPackageNames() {
return internalPackageNames;
}
public Index getIndex() {
if ( index == null ) {
index = loadIndex( indexFileReferenceAccess );
internalPackageNames = loadInternalPackageNames( packageFileReferenceAccess );
}
return index;
}
private static Index loadIndex(Provider<RegularFile> indexFileReferenceAccess) {
final File indexFile = indexFileReferenceAccess.get().getAsFile();
if ( !indexFile.exists() ) {
throw new IllegalStateException( "Cannot load index; the stored file does not exist - " + indexFile.getAbsolutePath() );
}
try ( final FileInputStream stream = new FileInputStream( indexFile ) ) {
final IndexReader indexReader = new IndexReader( stream );
return indexReader.read();
}
catch (FileNotFoundException e) {
throw new IllegalStateException( "Cannot load index; the stored file does not exist - " + indexFile.getAbsolutePath(), e );
}
catch (IOException e) {
throw new IllegalStateException( "Cannot load index; unable to read stored file - " + indexFile.getAbsolutePath(), e );
}
}
private static TreeSet<Inclusion> loadInternalPackageNames(Provider<RegularFile> packageFileReferenceAccess) {
final File packageNameFile = packageFileReferenceAccess.get().getAsFile();
if ( !packageNameFile.exists() ) {
throw new IllegalStateException( "Cannot load internal packages; the stored file does not exist - " + packageNameFile.getAbsolutePath() );
}
final TreeSet<Inclusion> inclusions = new TreeSet<>( Comparator.comparing( Inclusion::getPath ) );
try {
final List<String> lines = Files.readAllLines( packageNameFile.toPath() );
lines.forEach( (line) -> {
if ( line == null || line.isEmpty() ) {
return;
}
inclusions.add( new Inclusion( line, true ) );
} );
return inclusions;
}
catch (IOException e) {
throw new RuntimeException( "Unable to read package-name file - " + packageNameFile.getAbsolutePath(), e );
}
}
/**
* Used from {@link IndexerTask} as its action
*/
void index() {
if ( index != null ) {
throw new IllegalStateException( "Index was already created or loaded" );
}
final Indexer indexer = new Indexer();
internalPackageNames = new TreeSet<>( Comparator.comparing( Inclusion::getPath ) );
// note: each of `artifacts` is a jar-file
final Set<File> artifacts = artifactsToProcess.resolve();
artifacts.forEach( (jar) -> {
final FileTree jarFileTree = project.zipTree( jar );
jarFileTree.visit(
new FileVisitor() {
private boolean isInOrmPackage(RelativePath relativePath) {
return relativePath.getPathString().startsWith( "org/hibernate/" );
}
@Override
public void visitDir(FileVisitDetails details) {
final RelativePath relativePath = details.getRelativePath();
if ( !isInOrmPackage( relativePath ) ) {
return;
}
if ( relativePath.getPathString().endsWith( "internal" )
|| relativePath.getPathString().endsWith( "internal/" ) ) {
final String packageName = relativePath.toString().replace( '/', '.' );
internalPackageNames.add( new Inclusion( packageName, true ) );
}
}
@Override
public void visitFile(FileVisitDetails details) {
final RelativePath relativePath = details.getRelativePath();
if ( !isInOrmPackage( relativePath ) ) {
return;
}
if ( relativePath.getPathString().endsWith( ".class" ) ) {
try (final FileInputStream stream = new FileInputStream( details.getFile() )) {
final ClassSummary classSummary = indexer.indexWithSummary( stream );
if ( classSummary == null ) {
project.getLogger()
.lifecycle( "Problem indexing | IndexManager |
java | micronaut-projects__micronaut-core | core/src/main/java/io/micronaut/core/util/clhm/ConcurrentLinkedHashMap.java | {
"start": 49077,
"end": 50378
} | class ____ extends AbstractSet<Entry<K, V>> {
final ConcurrentLinkedHashMap<K, V> map = ConcurrentLinkedHashMap.this;
@Override
public int size() {
return map.size();
}
@Override
public void clear() {
map.clear();
}
@Override
public Iterator<Entry<K, V>> iterator() {
return new EntryIterator();
}
@Override
public boolean contains(Object obj) {
if (!(obj instanceof Entry<?, ?>)) {
return false;
}
Entry<?, ?> entry = (Entry<?, ?>) obj;
Node<K, V> node = map.data.get(entry.getKey());
return (node != null) && (node.getValue().equals(entry.getValue()));
}
@Override
public boolean add(Entry<K, V> entry) {
return (map.putIfAbsent(entry.getKey(), entry.getValue()) == null);
}
@Override
public boolean remove(Object obj) {
if (!(obj instanceof Entry<?, ?>)) {
return false;
}
Entry<?, ?> entry = (Entry<?, ?>) obj;
return map.remove(entry.getKey(), entry.getValue());
}
}
/** An adapter to safely externalize the entry iterator. */
final | EntrySet |
java | elastic__elasticsearch | modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AbstractStringProcessorFactoryTestCase.java | {
"start": 812,
"end": 4061
} | class ____ extends ESTestCase {
protected abstract AbstractStringProcessor.Factory newFactory();
protected Map<String, Object> modifyConfig(Map<String, Object> config) {
return config;
}
protected void assertProcessor(AbstractStringProcessor<?> processor) {}
public void testCreate() throws Exception {
AbstractStringProcessor.Factory factory = newFactory();
String fieldName = RandomDocumentPicks.randomFieldName(random());
String processorTag = randomAlphaOfLength(10);
Map<String, Object> config = new HashMap<>();
config.put("field", fieldName);
AbstractStringProcessor<?> processor = factory.create(null, processorTag, null, modifyConfig(config), null);
assertThat(processor.getTag(), equalTo(processorTag));
assertThat(processor.getField(), equalTo(fieldName));
assertThat(processor.isIgnoreMissing(), is(false));
assertThat(processor.getTargetField(), equalTo(fieldName));
assertProcessor(processor);
}
public void testCreateWithIgnoreMissing() throws Exception {
AbstractStringProcessor.Factory factory = newFactory();
String fieldName = RandomDocumentPicks.randomFieldName(random());
String processorTag = randomAlphaOfLength(10);
Map<String, Object> config = new HashMap<>();
config.put("field", fieldName);
config.put("ignore_missing", true);
AbstractStringProcessor<?> processor = factory.create(null, processorTag, null, modifyConfig(config), null);
assertThat(processor.getTag(), equalTo(processorTag));
assertThat(processor.getField(), equalTo(fieldName));
assertThat(processor.isIgnoreMissing(), is(true));
assertThat(processor.getTargetField(), equalTo(fieldName));
assertProcessor(processor);
}
public void testCreateWithTargetField() throws Exception {
AbstractStringProcessor.Factory factory = newFactory();
String fieldName = RandomDocumentPicks.randomFieldName(random());
String targetFieldName = RandomDocumentPicks.randomFieldName(random());
String processorTag = randomAlphaOfLength(10);
Map<String, Object> config = new HashMap<>();
config.put("field", fieldName);
config.put("target_field", targetFieldName);
AbstractStringProcessor<?> processor = factory.create(null, processorTag, null, modifyConfig(config), null);
assertThat(processor.getTag(), equalTo(processorTag));
assertThat(processor.getField(), equalTo(fieldName));
assertThat(processor.isIgnoreMissing(), is(false));
assertThat(processor.getTargetField(), equalTo(targetFieldName));
assertProcessor(processor);
}
public void testCreateMissingField() throws Exception {
AbstractStringProcessor.Factory factory = newFactory();
Map<String, Object> config = new HashMap<>();
try {
factory.create(null, null, null, config, null);
fail("factory create should have failed");
} catch (ElasticsearchParseException e) {
assertThat(e.getMessage(), equalTo("[field] required property is missing"));
}
}
}
| AbstractStringProcessorFactoryTestCase |
java | apache__dubbo | dubbo-rpc/dubbo-rpc-triple/src/test/java/org/apache/dubbo/rpc/protocol/tri/test/TestRequest.java | {
"start": 1496,
"end": 6661
} | class ____ {
private final HttpHeaders headers = HttpHeaders.create();
private final Map<String, String> cookies = new LinkedHashMap<>();
private final Map<String, Object> params = new LinkedHashMap<>();
private final Map<String, String> providerParams = new LinkedHashMap<>();
private List<Object> bodies;
public TestRequest(HttpMethods method, String path) {
setMethod(method);
setPath(path);
}
public TestRequest(String path) {
setPath(path);
}
public TestRequest() {}
public String getPath() {
return headers.getFirst(PseudoHeaderName.PATH.value());
}
public TestRequest setPath(String path) {
headers.set(PseudoHeaderName.PATH.value(), path);
return this;
}
public String getMethod() {
return headers.getFirst(PseudoHeaderName.METHOD.value());
}
public TestRequest setMethod(String method) {
headers.set(PseudoHeaderName.METHOD.value(), method);
return this;
}
public TestRequest setMethod(HttpMethods method) {
return setMethod(method.name());
}
public String getContentType() {
return headers.getFirst(HttpHeaderNames.CONTENT_TYPE.getKey());
}
public TestRequest setContentType(String contentType) {
if (StringUtils.isNotEmpty(contentType)) {
headers.set(HttpHeaderNames.CONTENT_TYPE.getKey(), contentType);
}
return this;
}
public TestRequest setContentType(MediaType mediaType) {
return setContentType(mediaType.getName());
}
public TestRequest setContentType(MediaType mediaType, String charset) {
return setContentType(mediaType.getName() + "; charset=" + charset);
}
public TestRequest setContentType(MediaType mediaType, Charset charset) {
return setContentType(mediaType.getName() + "; charset=" + charset.name());
}
public TestRequest setAccept(String accept) {
if (StringUtils.isNotEmpty(accept)) {
headers.set(HttpHeaderNames.ACCEPT.getKey(), accept);
}
return this;
}
public TestRequest setAccept(MediaType mediaType) {
return setAccept(mediaType.getName());
}
public TestRequest setHeader(String name, Object value) {
if (value != null) {
headers.set(name, value.toString());
}
return this;
}
@SuppressWarnings("unchecked")
public TestRequest setHeaders(Map<String, ?> headers) {
for (Map.Entry<String, ?> entry : headers.entrySet()) {
Object value = entry.getValue();
if (value instanceof List) {
List<String> items = new ArrayList<>();
for (Object obj : (List<Object>) value) {
if (obj != null) {
items.add(obj.toString());
}
}
this.headers.add(entry.getKey(), items);
} else if (value instanceof Object[]) {
List<String> items = new ArrayList<>();
for (Object obj : (Object[]) value) {
if (obj != null) {
items.add(obj.toString());
}
}
this.headers.add(entry.getKey(), items);
} else if (value != null) {
this.headers.set(entry.getKey(), value.toString());
}
}
return this;
}
public TestRequest setCookie(String name, String value) {
cookies.put(name, value);
return this;
}
public TestRequest setCookies(Map<String, String> cookies) {
this.cookies.putAll(cookies);
return this;
}
public Map<String, String> getCookies() {
return cookies;
}
public TestRequest param(String name, Object value) {
params.put(name, value);
return this;
}
public TestRequest setParams(Map<String, ?> params) {
this.params.putAll(params);
return this;
}
public Map<String, Object> getParams() {
return params;
}
public TestRequest setProviderParam(String name, String value) {
providerParams.put(name, value);
return this;
}
public TestRequest setProviderParams(Map<String, String> params) {
providerParams.putAll(params);
return this;
}
public Map<String, String> getProviderParams() {
return providerParams;
}
public TestRequest setBody(Object body) {
List<Object> bodies = this.bodies;
if (bodies == null) {
bodies = new ArrayList<>();
this.bodies = bodies;
}
bodies.add(body);
return this;
}
public List<Object> getBodies() {
return bodies;
}
public TestRequest post(Object body) {
setMethod(HttpMethods.POST);
setBody(body);
return this;
}
public TestRequest post() {
setMethod(HttpMethods.POST);
return this;
}
public Http2Header toMetadata() {
return new Http2MetadataFrame(headers, !HttpMethods.supportBody(getMethod()));
}
}
| TestRequest |
java | google__auto | factory/src/test/resources/good/SimpleClassVarargs.java | {
"start": 859,
"end": 944
} | interface ____ {
SimpleClassVarargs build(String... args);
}
}
| InterfaceWithVarargs |
java | apache__kafka | coordinator-common/src/main/java/org/apache/kafka/coordinator/common/runtime/CoordinatorRuntime.java | {
"start": 12261,
"end": 13202
} | interface ____ provides an event based
* timer which turns timeouts of a regular {@link Timer} into {@link CoordinatorWriteEvent} events which
* are executed by the {@link CoordinatorEventProcessor} used by this coordinator runtime. This is done
* to ensure that the timer respects the threading model of the coordinator runtime.
*
* The {@link CoordinatorWriteEvent} events pushed by the coordinator timer wraps the
* {@link TimeoutOperation} operations scheduled by the coordinators.
*
* It also keeps track of all the scheduled {@link TimerTask}. This allows timeout operations to be
* cancelled or rescheduled. When a timer is cancelled or overridden, the previous timer is guaranteed to
* not be executed even if it already expired and got pushed to the event processor.
*
* When a timer fails with an unexpected exception, the timer is rescheduled with a backoff.
*/
| and |
java | redisson__redisson | redisson/src/main/java/org/redisson/spring/support/RedissonLiveObjectDefinitionParser.java | {
"start": 1199,
"end": 2154
} | class ____
extends AbstractRedissonNamespaceDefinitionParser {
public RedissonLiveObjectDefinitionParser(RedissonNamespaceParserSupport helper) {
super(helper, RedissonNamespaceParserSupport.LIVE_OBJECT_SERVICE_REF_ATTRIBUTE);
}
@Override
protected void parseNested(Element element, ParserContext parserContext, BeanDefinitionBuilder builder, BeanDefinition bd) {
Class<?> apiClass;
try {
apiClass = Class.forName(helper.getAttribute(element,
RedissonNamespaceParserSupport.CLASS_ATTRIBUTE));
} catch (ClassNotFoundException ex) {
throw new IllegalArgumentException(
"The class [" + helper.getAttribute(element,
RedissonNamespaceParserSupport.CLASS_ATTRIBUTE)
+ "] specified in \"api-class\" attribute has not "
+ "found. Please check the | RedissonLiveObjectDefinitionParser |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/diversification/DiversifyRetrieverBuilder.java | {
"start": 2296,
"end": 3179
} | class ____ extends CompoundRetrieverBuilder<DiversifyRetrieverBuilder> {
public static final Float DEFAULT_LAMBDA_VALUE = 0.7f;
public static final int DEFAULT_SIZE_VALUE = 10;
public static final NodeFeature RETRIEVER_RESULT_DIVERSIFICATION_MMR_FEATURE = new NodeFeature("retriever.result_diversification_mmr");
public static final String NAME = "diversify";
public static final ParseField RETRIEVER_FIELD = new ParseField("retriever");
public static final ParseField TYPE_FIELD = new ParseField("type");
public static final ParseField FIELD_FIELD = new ParseField("field");
public static final ParseField QUERY_VECTOR_FIELD = new ParseField("query_vector");
public static final ParseField LAMBDA_FIELD = new ParseField("lambda");
public static final ParseField SIZE_FIELD = new ParseField("size");
public static | DiversifyRetrieverBuilder |
java | netty__netty | codec-redis/src/main/java/io/netty/handler/codec/redis/RedisBulkStringAggregator.java | {
"start": 1788,
"end": 3803
} | class ____ extends MessageAggregator<RedisMessage, BulkStringHeaderRedisMessage,
BulkStringRedisContent, FullBulkStringRedisMessage> {
/**
* Creates a new instance.
*/
public RedisBulkStringAggregator() {
super(RedisConstants.REDIS_MESSAGE_MAX_LENGTH, RedisMessage.class);
}
@Override
protected boolean isStartMessage(RedisMessage msg) throws Exception {
return msg instanceof BulkStringHeaderRedisMessage && !isAggregated(msg);
}
@Override
protected boolean isContentMessage(RedisMessage msg) throws Exception {
return msg instanceof BulkStringRedisContent;
}
@Override
protected boolean isLastContentMessage(BulkStringRedisContent msg) throws Exception {
return msg instanceof LastBulkStringRedisContent;
}
@Override
protected boolean isAggregated(RedisMessage msg) throws Exception {
return msg instanceof FullBulkStringRedisMessage;
}
@Override
protected boolean isContentLengthInvalid(BulkStringHeaderRedisMessage start, int maxContentLength)
throws Exception {
return start.bulkStringLength() > maxContentLength;
}
@Override
protected Object newContinueResponse(BulkStringHeaderRedisMessage start, int maxContentLength,
ChannelPipeline pipeline) throws Exception {
return null;
}
@Override
protected boolean closeAfterContinueResponse(Object msg) throws Exception {
throw new UnsupportedOperationException();
}
@Override
protected boolean ignoreContentAfterContinueResponse(Object msg) throws Exception {
throw new UnsupportedOperationException();
}
@Override
protected FullBulkStringRedisMessage beginAggregation(BulkStringHeaderRedisMessage start, ByteBuf content)
throws Exception {
return new FullBulkStringRedisMessage(content);
}
}
| RedisBulkStringAggregator |
java | spring-projects__spring-framework | spring-tx/src/test/java/org/springframework/transaction/annotation/AnnotationTransactionAttributeSourceTests.java | {
"start": 11402,
"end": 12954
} | class ____ {
@Test
void transactionAttributeDeclaredOnClassMethod() {
TransactionAttribute getAgeAttr = getTransactionAttribute(JtaAnnotatedBean1.class, ITestBean1.class, "getAge");
assertThat(getAgeAttr.getPropagationBehavior()).isEqualTo(TransactionAttribute.PROPAGATION_REQUIRED);
TransactionAttribute getNameAttr = getTransactionAttribute(JtaAnnotatedBean1.class, ITestBean1.class, "getName");
assertThat(getNameAttr.getPropagationBehavior()).isEqualTo(TransactionAttribute.PROPAGATION_SUPPORTS);
}
@Test
void transactionAttributeDeclaredOnClass() {
TransactionAttribute getAgeAttr = getTransactionAttribute(JtaAnnotatedBean2.class, ITestBean1.class, "getAge");
assertThat(getAgeAttr.getPropagationBehavior()).isEqualTo(TransactionAttribute.PROPAGATION_REQUIRED);
TransactionAttribute getNameAttr = getTransactionAttribute(JtaAnnotatedBean2.class, ITestBean1.class, "getName");
assertThat(getNameAttr.getPropagationBehavior()).isEqualTo(TransactionAttribute.PROPAGATION_SUPPORTS);
}
@Test
void transactionAttributeDeclaredOnInterface() {
TransactionAttribute getAgeAttr = getTransactionAttribute(JtaAnnotatedBean3.class, ITestJta.class, "getAge");
assertThat(getAgeAttr.getPropagationBehavior()).isEqualTo(TransactionAttribute.PROPAGATION_REQUIRED);
TransactionAttribute getNameAttr = getTransactionAttribute(JtaAnnotatedBean3.class, ITestJta.class, "getName");
assertThat(getNameAttr.getPropagationBehavior()).isEqualTo(TransactionAttribute.PROPAGATION_SUPPORTS);
}
static | JtaAttributeTests |
java | apache__flink | flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/operators/aggregate/asyncwindow/buffers/AsyncStateWindowBuffer.java | {
"start": 5247,
"end": 6717
} | interface ____ extends Serializable {
/**
* Creates a {@link WindowBuffer} that buffers elements in memory before flushing.
*
* @param operatorOwner the owner of the operator
* @param memoryManager the manager that governs memory by Flink framework
* @param memorySize the managed memory size can be used by this operator
* @param runtimeContext the current {@link RuntimeContext}
* @param timerService the service to register event-time and processing-time timers
* @param keyContext the state context to accessing states
* @param windowState the window async state to flush buffered data into.
* @param isEventTime indicates whether the operator works in event-time or processing-time
* mode, used for register corresponding timers.
* @param shiftTimeZone the shift timezone of the window
* @throws IOException thrown if the buffer can't be opened
*/
AsyncStateWindowBuffer create(
Object operatorOwner,
MemoryManager memoryManager,
long memorySize,
RuntimeContext runtimeContext,
WindowTimerService<Long> timerService,
AsyncStateKeyContext keyContext,
WindowAsyncState<Long> windowState,
boolean isEventTime,
ZoneId shiftTimeZone)
throws Exception;
}
}
| Factory |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/date/DateTest_dotnet_2.java | {
"start": 162,
"end": 472
} | class ____ extends TestCase {
public void test_date() throws Exception {
String text = "{\"date\":\"/Date(1461081600000+0500)/\"}";
Model model = JSON.parseObject(text, Model.class);
Assert.assertEquals(1461081600000L, model.date.getTime());
}
public static | DateTest_dotnet_2 |
java | elastic__elasticsearch | test/framework/src/test/java/org/elasticsearch/search/internal/ConcurrentSearchTestPluginTests.java | {
"start": 756,
"end": 1460
} | class ____ extends ESIntegTestCase {
private final boolean concurrentSearch = randomBoolean();
public void testConcurrentSearch() {
client().admin().indices().prepareCreate("index").get();
ClusterService clusterService = internalCluster().getDataNodeInstance(ClusterService.class);
int minDocsPerSlice = SearchService.MINIMUM_DOCS_PER_SLICE.get(clusterService.getSettings());
if (concurrentSearch) {
assertEquals(1, minDocsPerSlice);
} else {
assertEquals(50_000, minDocsPerSlice);
}
}
@Override
protected boolean enableConcurrentSearch() {
return concurrentSearch;
}
}
| ConcurrentSearchTestPluginTests |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/interceptors/producer/ProducerWithMethodLevelInterceptorsTest.java | {
"start": 2290,
"end": 2891
} | class ____ {
private final String value;
MyNonbean() {
this(null);
}
MyNonbean(String value) {
this.value = value;
}
@MyBinding1
String hello1() {
return "hello1_" + value;
}
@MyBinding2
String hello2() {
return "hello2_" + value;
}
@MyBinding1
@MyBinding2
String hello3() {
return "hello3_" + value;
}
String hello4() {
return "hello4_" + value;
}
}
@Dependent
static | MyNonbean |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/jsontype/vld/BasicPTVKnownTypesTest.java | {
"start": 466,
"end": 848
} | class ____ extends DatabindTestUtil
{
private final ObjectMapper DEFAULTING_MAPPER = jsonMapperBuilder()
.activateDefaultTyping(BasicPolymorphicTypeValidator.builder()
.allowSubTypesWithExplicitDeserializer()
.build(),
DefaultTyping.NON_FINAL_AND_ENUMS)
.build();
static | BasicPTVKnownTypesTest |
java | apache__rocketmq | remoting/src/main/java/org/apache/rocketmq/remoting/protocol/header/DeleteUserRequestHeader.java | {
"start": 1326,
"end": 1802
} | class ____ implements CommandCustomHeader {
private String username;
public DeleteUserRequestHeader() {
}
public DeleteUserRequestHeader(String username) {
this.username = username;
}
@Override
public void checkFields() throws RemotingCommandException {
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
}
| DeleteUserRequestHeader |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/jobmaster/slotpool/SlotPoolInteractionsTest.java | {
"start": 1475,
"end": 5519
} | class ____ {
private static final Duration fastTimeout = Duration.ofMillis(1L);
@RegisterExtension
private static final TestingComponentMainThreadExecutor.Extension EXECUTOR_EXTENSION =
new TestingComponentMainThreadExecutor.Extension(10L);
private final TestingComponentMainThreadExecutor testMainThreadExecutor =
EXECUTOR_EXTENSION.getComponentMainThreadTestExecutor();
// ------------------------------------------------------------------------
// tests
// ------------------------------------------------------------------------
@Test
void testSlotAllocationNoResourceManager() throws Exception {
try (SlotPool pool = createAndSetUpSlotPoolWithoutResourceManager()) {
final CompletableFuture<PhysicalSlot> future =
testMainThreadExecutor.execute(
() ->
pool.requestNewAllocatedSlot(
PhysicalSlotRequestUtils.normalRequest(
ResourceProfile.UNKNOWN),
fastTimeout));
assertThatThrownBy(future::get)
.withFailMessage("We expected an ExecutionException.")
.isInstanceOf(ExecutionException.class)
.hasCauseInstanceOf(TimeoutException.class);
}
}
@Test
void testCancelSlotAllocationWithoutResourceManager() throws Exception {
try (DeclarativeSlotPoolBridge pool = createAndSetUpSlotPoolWithoutResourceManager()) {
final CompletableFuture<PhysicalSlot> future =
testMainThreadExecutor.execute(
() ->
pool.requestNewAllocatedSlot(
PhysicalSlotRequestUtils.normalRequest(
ResourceProfile.UNKNOWN),
fastTimeout));
assertThatThrownBy(future::get)
.withFailMessage("We expected a TimeoutException.")
.isInstanceOf(ExecutionException.class)
.hasCauseInstanceOf(TimeoutException.class);
CommonTestUtils.waitUntilCondition(() -> pool.getNumPendingRequests() == 0);
}
}
/** Tests that a slot allocation times out wrt to the specified time out. */
@Test
void testSlotAllocationTimeout() throws Exception {
try (DeclarativeSlotPoolBridge pool = createAndSetUpSlotPool()) {
final CompletableFuture<PhysicalSlot> future =
testMainThreadExecutor.execute(
() ->
pool.requestNewAllocatedSlot(
PhysicalSlotRequestUtils.normalRequest(
ResourceProfile.UNKNOWN),
fastTimeout));
assertThatThrownBy(future::get)
.withFailMessage("We expected a TimeoutException.")
.isInstanceOf(ExecutionException.class)
.hasCauseInstanceOf(TimeoutException.class);
CommonTestUtils.waitUntilCondition(() -> pool.getNumPendingRequests() == 0);
}
}
private DeclarativeSlotPoolBridge createAndSetUpSlotPool() throws Exception {
return new DeclarativeSlotPoolBridgeBuilder()
.setMainThreadExecutor(testMainThreadExecutor.getMainThreadExecutor())
.buildAndStart();
}
private DeclarativeSlotPoolBridge createAndSetUpSlotPoolWithoutResourceManager()
throws Exception {
return new DeclarativeSlotPoolBridgeBuilder()
.setResourceManagerGateway(null)
.setMainThreadExecutor(testMainThreadExecutor.getMainThreadExecutor())
.buildAndStart();
}
}
| SlotPoolInteractionsTest |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/parallel/ParallelReduceFull.java | {
"start": 5286,
"end": 7126
} | class ____<T>
extends AtomicReference<Subscription>
implements FlowableSubscriber<T> {
private static final long serialVersionUID = -7954444275102466525L;
final ParallelReduceFullMainSubscriber<T> parent;
final BiFunction<T, T, T> reducer;
T value;
boolean done;
ParallelReduceFullInnerSubscriber(ParallelReduceFullMainSubscriber<T> parent, BiFunction<T, T, T> reducer) {
this.parent = parent;
this.reducer = reducer;
}
@Override
public void onSubscribe(Subscription s) {
SubscriptionHelper.setOnce(this, s, Long.MAX_VALUE);
}
@Override
public void onNext(T t) {
if (!done) {
T v = value;
if (v == null) {
value = t;
} else {
try {
v = Objects.requireNonNull(reducer.apply(v, t), "The reducer returned a null value");
} catch (Throwable ex) {
Exceptions.throwIfFatal(ex);
get().cancel();
onError(ex);
return;
}
value = v;
}
}
}
@Override
public void onError(Throwable t) {
if (done) {
RxJavaPlugins.onError(t);
return;
}
done = true;
parent.innerError(t);
}
@Override
public void onComplete() {
if (!done) {
done = true;
parent.innerComplete(value);
}
}
void cancel() {
SubscriptionHelper.cancel(this);
}
}
static final | ParallelReduceFullInnerSubscriber |
java | google__guice | core/test/com/google/inject/TypeLiteralTypeResolutionTest.java | {
"start": 6975,
"end": 7342
} | interface ____<T extends Number> {
T[] getArray();
Set<T[]> getSetOfArray();
}
public void testRelatedTypeVariables() {
TypeLiteral<?> resolver = TypeLiteral.get(hasRelatedOfString);
assertEquals(String.class, resolver.getParameterTypes(echo).get(0).getType());
assertEquals(String.class, resolver.getReturnType(echo).getType());
}
| HasArray |
java | google__dagger | hilt-compiler/main/java/dagger/hilt/processor/internal/generatesrootinput/GeneratesRootInputProcessor.java | {
"start": 1238,
"end": 1464
} | class ____ extends JavacBaseProcessingStepProcessor {
@Override
public GeneratesRootInputProcessingStep processingStep() {
return new GeneratesRootInputProcessingStep(getXProcessingEnv());
}
}
| GeneratesRootInputProcessor |
java | quarkusio__quarkus | extensions/panache/mongodb-panache/runtime/src/main/java/io/quarkus/mongodb/panache/reactive/ReactivePanacheMongoEntityBase.java | {
"start": 668,
"end": 844
} | class ____ gain auto-generated accessors
* to all their public fields, as well as a lot of useful
* methods. Unless you have a custom ID strategy, you should not extend this | they |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-globalpolicygenerator/src/main/java/org/apache/hadoop/yarn/server/globalpolicygenerator/webapp/GPGPoliciesBlock.java | {
"start": 1669,
"end": 4174
} | class ____ extends HtmlBlock {
private final GlobalPolicyGenerator gpg;
private final FederationStateStoreFacade facade;
@Inject
GPGPoliciesBlock(GlobalPolicyGenerator gpg, ViewContext ctx) {
super(ctx);
this.gpg = gpg;
this.facade = FederationStateStoreFacade.getInstance(gpg.getConfig());
}
@Override
protected void render(Block html) {
try {
Collection<SubClusterPolicyConfiguration> policies =
facade.getPoliciesConfigurations().values();
initYarnFederationPolicies(policies, html);
} catch (Exception e) {
LOG.error("Get GPGPolicies Error.", e);
}
}
private void initYarnFederationPolicies(Collection<SubClusterPolicyConfiguration> policies,
Block html) throws FederationPolicyInitializationException {
Hamlet.TBODY<Hamlet.TABLE<Hamlet>> tbody = html.table("#policies").
thead().
tr().
th(".queue", "Queue Name").
th(".policyType", "Policy Type").
th(".routerPolicyWeights", "Router PolicyWeights").
th(".amrmPolicyWeights", "Router AMRMPolicyWeights").
th(".headroomAlpha", "Router Headroom Alpha").
__().__().
tbody();
if (policies != null) {
for (SubClusterPolicyConfiguration policy : policies) {
Hamlet.TR<Hamlet.TBODY<Hamlet.TABLE<Hamlet>>> row = tbody.tr().td(policy.getQueue());
// Policy Type
String type = policy.getType();
row = row.td(type);
// WeightedPolicyInfo
ByteBuffer params = policy.getParams();
WeightedPolicyInfo weightedPolicyInfo = WeightedPolicyInfo.fromByteBuffer(params);
row = row.td(policyWeight2String(weightedPolicyInfo.getRouterPolicyWeights()));
row = row.td(policyWeight2String(weightedPolicyInfo.getAMRMPolicyWeights()));
row.td(String.valueOf(weightedPolicyInfo.getHeadroomAlpha())).__();
}
}
tbody.__().__();
}
/**
* We will convert the PolicyWeight to string format.
*
* @param weights PolicyWeight.
* @return string format PolicyWeight. example: SC-1:0.91, SC-2:0.09
*/
private String policyWeight2String(Map<SubClusterIdInfo, Float> weights) {
StringBuilder sb = new StringBuilder();
for (Map.Entry<SubClusterIdInfo, Float> entry : weights.entrySet()) {
sb.append(entry.getKey().toId()).append(": ").append(entry.getValue()).append(", ");
}
if (sb.length() > 2) {
sb.setLength(sb.length() - 2);
}
return sb.toString();
}
}
| GPGPoliciesBlock |
java | greenrobot__greendao | DaoCore/src/main/java/org/greenrobot/greendao/rx/RxTransaction.java | {
"start": 962,
"end": 2308
} | class ____ extends RxBase {
private final AbstractDaoSession daoSession;
public RxTransaction(AbstractDaoSession daoSession) {
this.daoSession = daoSession;
}
public RxTransaction(AbstractDaoSession daoSession, Scheduler scheduler) {
super(scheduler);
this.daoSession = daoSession;
}
/**
* Rx version of {@link AbstractDaoSession#runInTx(Runnable)} returning an Observable.
*/
@Experimental
public Observable<Void> run(final Runnable runnable) {
return wrap(new Callable<Void>() {
@Override
public Void call() throws Exception {
daoSession.runInTx(runnable);
return null;
}
});
}
/**
* Rx version of {@link AbstractDaoSession#callInTx(Callable)} returning an Observable.
*/
@Experimental
public <T> Observable<T> call(final Callable<T> callable) {
return wrap(new Callable<T>() {
@Override
public T call() throws Exception {
return daoSession.callInTx(callable);
}
});
}
// Note: wrapping callInTxNoException does not make sense, because the Exception is handled by Rx anyway.
@Experimental
public AbstractDaoSession getDaoSession() {
return daoSession;
}
}
| RxTransaction |
java | apache__camel | components/camel-openstack/src/main/java/org/apache/camel/component/openstack/neutron/producer/NetworkProducer.java | {
"start": 1602,
"end": 5546
} | class ____ extends AbstractOpenstackProducer {
public NetworkProducer(NeutronEndpoint endpoint, OSClient client) {
super(endpoint, client);
}
@Override
public void process(Exchange exchange) throws Exception {
final String operation = getOperation(exchange);
switch (operation) {
case OpenstackConstants.CREATE:
doCreate(exchange);
break;
case OpenstackConstants.GET:
doGet(exchange);
break;
case OpenstackConstants.GET_ALL:
doGetAll(exchange);
break;
case OpenstackConstants.DELETE:
doDelete(exchange);
break;
default:
throw new IllegalArgumentException("Unsupported operation " + operation);
}
}
private void doCreate(Exchange exchange) {
final Network in = messageToNetwork(exchange.getIn());
final Network out = os.networking().network().create(in);
exchange.getIn().setBody(out);
}
private void doGet(Exchange exchange) {
final Message msg = exchange.getIn();
final String id
= msg.getHeader(OpenstackConstants.ID, msg.getHeader(NeutronConstants.NETWORK_ID, String.class), String.class);
StringHelper.notEmpty(id, "Network ID");
final Network out = os.networking().network().get(id);
exchange.getIn().setBody(out);
}
private void doGetAll(Exchange exchange) {
final List<? extends Network> out = os.networking().network().list();
exchange.getIn().setBody(out);
}
private void doDelete(Exchange exchange) {
final Message msg = exchange.getIn();
final String id
= msg.getHeader(OpenstackConstants.ID, msg.getHeader(NeutronConstants.NETWORK_ID, String.class), String.class);
StringHelper.notEmpty(id, "Network ID");
final ActionResponse response = os.networking().network().delete(id);
checkFailure(response, exchange, "Delete network" + id);
}
private Network messageToNetwork(Message message) {
Network network = message.getBody(Network.class);
if (network == null) {
Map headers = message.getHeaders();
NetworkBuilder builder = Builders.network();
StringHelper.notEmpty(message.getHeader(OpenstackConstants.NAME, String.class), "Name");
builder.name(message.getHeader(OpenstackConstants.NAME, String.class));
if (headers.containsKey(NeutronConstants.ADMIN_STATE_UP)) {
builder.adminStateUp(message.getHeader(NeutronConstants.ADMIN_STATE_UP, Boolean.class));
}
if (headers.containsKey(NeutronConstants.NETWORK_TYPE)) {
builder.networkType(message.getHeader(NeutronConstants.NETWORK_TYPE, NetworkType.class));
}
if (headers.containsKey(NeutronConstants.IS_SHARED)) {
builder.isShared(message.getHeader(NeutronConstants.IS_SHARED, Boolean.class));
}
if (headers.containsKey(NeutronConstants.IS_ROUTER_EXTERNAL)) {
builder.isRouterExternal(message.getHeader(NeutronConstants.IS_ROUTER_EXTERNAL, Boolean.class));
}
if (headers.containsKey(NeutronConstants.TENANT_ID)) {
builder.tenantId(message.getHeader(NeutronConstants.TENANT_ID, String.class));
}
if (headers.containsKey(NeutronConstants.PHYSICAL_NETWORK)) {
builder.physicalNetwork(message.getHeader(NeutronConstants.PHYSICAL_NETWORK, String.class));
}
if (headers.containsKey(NeutronConstants.SEGMENT_ID)) {
builder.segmentId(message.getHeader(NeutronConstants.SEGMENT_ID, String.class));
}
network = builder.build();
}
return network;
}
}
| NetworkProducer |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/CloudtrailEndpointBuilderFactory.java | {
"start": 33516,
"end": 41977
} | interface ____
extends
EndpointConsumerBuilder {
default CloudtrailEndpointBuilder basic() {
return (CloudtrailEndpointBuilder) this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions (if possible) occurred while the Camel
* consumer is trying to pickup incoming messages, or the likes, will
* now be processed as a message and handled by the routing Error
* Handler. Important: This is only possible if the 3rd party component
* allows Camel to be alerted if an exception was thrown. Some
* components handle this internally only, and therefore
* bridgeErrorHandler is not possible. In other situations we may
* improve the Camel component to hook into the 3rd party component and
* make this possible for future releases. By default the consumer will
* use the org.apache.camel.spi.ExceptionHandler to deal with
* exceptions, that will be logged at WARN or ERROR level and ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer (advanced)
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default AdvancedCloudtrailEndpointBuilder bridgeErrorHandler(boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions (if possible) occurred while the Camel
* consumer is trying to pickup incoming messages, or the likes, will
* now be processed as a message and handled by the routing Error
* Handler. Important: This is only possible if the 3rd party component
* allows Camel to be alerted if an exception was thrown. Some
* components handle this internally only, and therefore
* bridgeErrorHandler is not possible. In other situations we may
* improve the Camel component to hook into the 3rd party component and
* make this possible for future releases. By default the consumer will
* use the org.apache.camel.spi.ExceptionHandler to deal with
* exceptions, that will be logged at WARN or ERROR level and ignored.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: consumer (advanced)
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default AdvancedCloudtrailEndpointBuilder bridgeErrorHandler(String bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option is a: <code>org.apache.camel.spi.ExceptionHandler</code>
* type.
*
* Group: consumer (advanced)
*
* @param exceptionHandler the value to set
* @return the dsl builder
*/
default AdvancedCloudtrailEndpointBuilder exceptionHandler(org.apache.camel.spi.ExceptionHandler exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option will be converted to a
* <code>org.apache.camel.spi.ExceptionHandler</code> type.
*
* Group: consumer (advanced)
*
* @param exceptionHandler the value to set
* @return the dsl builder
*/
default AdvancedCloudtrailEndpointBuilder exceptionHandler(String exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option is a: <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*
* @param exchangePattern the value to set
* @return the dsl builder
*/
default AdvancedCloudtrailEndpointBuilder exchangePattern(org.apache.camel.ExchangePattern exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option will be converted to a
* <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*
* @param exchangePattern the value to set
* @return the dsl builder
*/
default AdvancedCloudtrailEndpointBuilder exchangePattern(String exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
/**
* A pluggable org.apache.camel.PollingConsumerPollingStrategy allowing
* you to provide your custom implementation to control error handling
* usually occurred during the poll operation before an Exchange have
* been created and being routed in Camel.
*
* The option is a:
* <code>org.apache.camel.spi.PollingConsumerPollStrategy</code> type.
*
* Group: consumer (advanced)
*
* @param pollStrategy the value to set
* @return the dsl builder
*/
default AdvancedCloudtrailEndpointBuilder pollStrategy(org.apache.camel.spi.PollingConsumerPollStrategy pollStrategy) {
doSetProperty("pollStrategy", pollStrategy);
return this;
}
/**
* A pluggable org.apache.camel.PollingConsumerPollingStrategy allowing
* you to provide your custom implementation to control error handling
* usually occurred during the poll operation before an Exchange have
* been created and being routed in Camel.
*
* The option will be converted to a
* <code>org.apache.camel.spi.PollingConsumerPollStrategy</code> type.
*
* Group: consumer (advanced)
*
* @param pollStrategy the value to set
* @return the dsl builder
*/
default AdvancedCloudtrailEndpointBuilder pollStrategy(String pollStrategy) {
doSetProperty("pollStrategy", pollStrategy);
return this;
}
/**
* Amazon Cloudtrail client to use for all requests for this endpoint.
*
* The option is a:
* <code>software.amazon.awssdk.services.cloudtrail.CloudTrailClient</code> type.
*
* Group: advanced
*
* @param cloudTrailClient the value to set
* @return the dsl builder
*/
default AdvancedCloudtrailEndpointBuilder cloudTrailClient(software.amazon.awssdk.services.cloudtrail.CloudTrailClient cloudTrailClient) {
doSetProperty("cloudTrailClient", cloudTrailClient);
return this;
}
/**
* Amazon Cloudtrail client to use for all requests for this endpoint.
*
* The option will be converted to a
* <code>software.amazon.awssdk.services.cloudtrail.CloudTrailClient</code> type.
*
* Group: advanced
*
* @param cloudTrailClient the value to set
* @return the dsl builder
*/
default AdvancedCloudtrailEndpointBuilder cloudTrailClient(String cloudTrailClient) {
doSetProperty("cloudTrailClient", cloudTrailClient);
return this;
}
}
public | AdvancedCloudtrailEndpointBuilder |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/action/admin/indices/alias/AliasActionsTests.java | {
"start": 1752,
"end": 17834
} | class ____ extends ESTestCase {
public void testValidate() {
AliasActions.Type type = randomFrom(AliasActions.Type.values());
if (type == AliasActions.Type.REMOVE_INDEX) {
Exception e = expectThrows(IllegalArgumentException.class, () -> new AliasActions(type).validate());
assertEquals("One of [index] or [indices] is required", e.getMessage());
} else {
Exception e = expectThrows(
IllegalArgumentException.class,
() -> new AliasActions(type).alias(randomAlphaOfLength(5)).validate()
);
assertEquals("One of [index] or [indices] is required", e.getMessage());
e = expectThrows(IllegalArgumentException.class, () -> new AliasActions(type).index(randomAlphaOfLength(5)).validate());
assertEquals("One of [alias] or [aliases] is required", e.getMessage());
}
}
public void testEmptyIndex() {
Exception e = expectThrows(
IllegalArgumentException.class,
() -> new AliasActions(randomFrom(AliasActions.Type.values())).index(null)
);
assertEquals("[index] can't be empty string", e.getMessage());
e = expectThrows(IllegalArgumentException.class, () -> new AliasActions(randomFrom(AliasActions.Type.values())).index(""));
assertEquals("[index] can't be empty string", e.getMessage());
e = expectThrows(
IllegalArgumentException.class,
() -> new AliasActions(randomFrom(AliasActions.Type.values())).indices((String[]) null)
);
assertEquals("[indices] can't be empty", e.getMessage());
e = expectThrows(
IllegalArgumentException.class,
() -> new AliasActions(randomFrom(AliasActions.Type.values())).indices(new String[0])
);
assertEquals("[indices] can't be empty", e.getMessage());
e = expectThrows(
IllegalArgumentException.class,
() -> new AliasActions(randomFrom(AliasActions.Type.values())).indices("test", null)
);
assertEquals("[indices] can't contain empty string", e.getMessage());
e = expectThrows(
IllegalArgumentException.class,
() -> new AliasActions(randomFrom(AliasActions.Type.values())).indices("test", "")
);
assertEquals("[indices] can't contain empty string", e.getMessage());
}
public void testEmptyAlias() {
AliasActions.Type type = randomValueOtherThan(AliasActions.Type.REMOVE_INDEX, () -> randomFrom(AliasActions.Type.values()));
Exception e = expectThrows(IllegalArgumentException.class, () -> new AliasActions(type).alias(null));
assertEquals("[alias] can't be empty string", e.getMessage());
e = expectThrows(IllegalArgumentException.class, () -> new AliasActions(type).alias(""));
assertEquals("[alias] can't be empty string", e.getMessage());
e = expectThrows(IllegalArgumentException.class, () -> new AliasActions(type).aliases((String[]) null));
assertEquals("[aliases] can't be empty", e.getMessage());
e = expectThrows(IllegalArgumentException.class, () -> new AliasActions(type).aliases(new String[0]));
assertEquals("[aliases] can't be empty", e.getMessage());
e = expectThrows(IllegalArgumentException.class, () -> new AliasActions(type).aliases("test", null));
assertEquals("[aliases] can't contain empty string", e.getMessage());
e = expectThrows(IllegalArgumentException.class, () -> new AliasActions(type).aliases("test", ""));
assertEquals("[aliases] can't contain empty string", e.getMessage());
}
public void testBadOptionsInNonIndex() {
AliasActions action = randomBoolean() ? AliasActions.remove() : AliasActions.removeIndex();
Exception e = expectThrows(IllegalArgumentException.class, () -> action.routing("test"));
assertEquals("[routing] is unsupported for [" + action.actionType() + "]", e.getMessage());
e = expectThrows(IllegalArgumentException.class, () -> action.searchRouting("test"));
assertEquals("[search_routing] is unsupported for [" + action.actionType() + "]", e.getMessage());
e = expectThrows(IllegalArgumentException.class, () -> action.indexRouting("test"));
assertEquals("[index_routing] is unsupported for [" + action.actionType() + "]", e.getMessage());
e = expectThrows(IllegalArgumentException.class, () -> action.filter("test"));
assertEquals("[filter] is unsupported for [" + action.actionType() + "]", e.getMessage());
}
public void testMustExistOption() {
final boolean mustExist = randomBoolean();
AliasActions removeAliasAction = AliasActions.remove();
assertNull(removeAliasAction.mustExist());
removeAliasAction.mustExist(mustExist);
assertEquals(mustExist, removeAliasAction.mustExist());
AliasActions action = randomBoolean() ? AliasActions.add() : AliasActions.removeIndex();
Exception e = expectThrows(IllegalArgumentException.class, () -> action.mustExist(mustExist));
assertEquals("[must_exist] is unsupported for [" + action.actionType() + "]", e.getMessage());
}
public void testParseAdd() throws IOException {
String[] indices = generateRandomStringArray(10, 5, false, false);
String[] aliases = generateRandomStringArray(10, 5, false, false);
Map<String, Object> filter = randomBoolean() ? RandomAliasActionsGenerator.randomMap(5) : null;
Object searchRouting = randomBoolean() ? randomRouting() : null;
Object indexRouting = randomBoolean() ? randomBoolean() ? searchRouting : randomRouting() : null;
boolean writeIndex = randomBoolean();
boolean isHidden = randomBoolean();
XContentBuilder b = XContentBuilder.builder(randomFrom(XContentType.values()).xContent());
b.startObject();
{
b.startObject("add");
{
if (indices.length > 1 || randomBoolean()) {
b.array("indices", indices);
} else {
b.field("index", indices[0]);
}
if (aliases.length > 1 || randomBoolean()) {
b.array("aliases", aliases);
} else {
b.field("alias", aliases[0]);
}
if (filter != null) {
b.field("filter", filter);
}
if (searchRouting != null) {
if (searchRouting.equals(indexRouting)) {
b.field("routing", searchRouting);
} else {
b.field("search_routing", searchRouting);
}
}
if (indexRouting != null && false == indexRouting.equals(searchRouting)) {
b.field("index_routing", indexRouting);
}
b.field("is_write_index", writeIndex);
b.field("is_hidden", isHidden);
}
b.endObject();
}
b.endObject();
b = shuffleXContent(b, "filter");
try (XContentParser parser = createParser(b)) {
AliasActions action = AliasActions.PARSER.apply(parser, null);
assertEquals(AliasActions.Type.ADD, action.actionType());
assertThat(action.indices(), equalTo(indices));
assertThat(action.aliases(), equalTo(aliases));
if (filter == null || filter.isEmpty()) {
assertNull(action.filter());
} else {
assertEquals(Strings.toString(XContentFactory.contentBuilder(XContentType.JSON).map(filter)), action.filter());
}
assertEquals(Objects.toString(searchRouting, null), action.searchRouting());
assertEquals(Objects.toString(indexRouting, null), action.indexRouting());
assertEquals(writeIndex, action.writeIndex());
assertEquals(isHidden, action.isHidden());
}
}
public void testParseAddDefaultRouting() throws IOException {
String index = randomAlphaOfLength(5);
String alias = randomAlphaOfLength(5);
Object searchRouting = randomRouting();
Object indexRouting = randomRouting();
XContentBuilder b = XContentBuilder.builder(randomFrom(XContentType.values()).xContent());
b.startObject();
{
b.startObject("add");
{
b.field("index", index);
b.field("alias", alias);
if (randomBoolean()) {
b.field("routing", searchRouting);
b.field("index_routing", indexRouting);
} else {
b.field("search_routing", searchRouting);
b.field("routing", indexRouting);
}
}
b.endObject();
}
b.endObject();
b = shuffleXContent(b);
try (XContentParser parser = createParser(b)) {
AliasActions action = AliasActions.PARSER.apply(parser, null);
assertEquals(AliasActions.Type.ADD, action.actionType());
assertThat(action.indices(), arrayContaining(index));
assertThat(action.aliases(), arrayContaining(alias));
assertEquals(searchRouting.toString(), action.searchRouting());
assertEquals(indexRouting.toString(), action.indexRouting());
}
}
public void testParseRemove() throws IOException {
String[] indices = generateRandomStringArray(10, 5, false, false);
String[] aliases = generateRandomStringArray(10, 5, false, false);
Boolean mustExist = null;
XContentBuilder b = XContentBuilder.builder(randomFrom(XContentType.values()).xContent());
b.startObject();
{
b.startObject("remove");
{
if (indices.length > 1 || randomBoolean()) {
b.array("indices", indices);
} else {
b.field("index", indices[0]);
}
if (aliases.length > 1 || randomBoolean()) {
b.array("aliases", aliases);
} else {
b.field("alias", aliases[0]);
}
if (randomBoolean()) {
mustExist = randomBoolean();
b.field("must_exist", mustExist);
}
}
b.endObject();
}
b.endObject();
b = shuffleXContent(b);
try (XContentParser parser = createParser(b)) {
AliasActions action = AliasActions.PARSER.apply(parser, null);
assertEquals(AliasActions.Type.REMOVE, action.actionType());
assertThat(action.indices(), equalTo(indices));
assertThat(action.aliases(), equalTo(aliases));
assertThat(action.mustExist(), equalTo(mustExist));
}
}
public void testParseRemoveIndex() throws IOException {
String[] indices = randomBoolean() ? new String[] { randomAlphaOfLength(5) } : generateRandomStringArray(10, 5, false, false);
XContentBuilder b = XContentBuilder.builder(randomFrom(XContentType.values()).xContent());
b.startObject();
{
b.startObject("remove_index");
{
if (indices.length > 1 || randomBoolean()) {
b.array("indices", indices);
} else {
b.field("index", indices[0]);
}
}
b.endObject();
}
b.endObject();
b = shuffleXContent(b);
try (XContentParser parser = createParser(b)) {
AliasActions action = AliasActions.PARSER.apply(parser, null);
assertEquals(AliasActions.Type.REMOVE_INDEX, action.actionType());
assertArrayEquals(indices, action.indices());
assertThat(action.aliases(), arrayWithSize(0));
}
}
public void testParseIndexAndIndicesThrowsError() throws IOException {
XContentBuilder b = XContentBuilder.builder(randomFrom(XContentType.values()).xContent());
b.startObject();
{
b.startObject(randomFrom("add", "remove"));
{
b.field("index", randomAlphaOfLength(5));
b.array("indices", generateRandomStringArray(10, 5, false, false));
b.field("alias", randomAlphaOfLength(5));
}
b.endObject();
}
b.endObject();
try (XContentParser parser = createParser(b)) {
Exception e = expectThrows(XContentParseException.class, () -> AliasActions.PARSER.apply(parser, null));
assertThat(e.getCause().getCause(), instanceOf(IllegalArgumentException.class));
assertThat(e.getCause().getCause().getMessage(), containsString("Only one of [index] and [indices] is supported"));
}
}
public void testParseAliasAndAliasesThrowsError() throws IOException {
XContentBuilder b = XContentBuilder.builder(randomFrom(XContentType.values()).xContent());
b.startObject();
{
b.startObject(randomFrom("add", "remove"));
{
b.field("index", randomAlphaOfLength(5));
b.field("alias", randomAlphaOfLength(5));
b.array("aliases", generateRandomStringArray(10, 5, false, false));
}
b.endObject();
}
b.endObject();
try (XContentParser parser = createParser(b)) {
Exception e = expectThrows(XContentParseException.class, () -> AliasActions.PARSER.apply(parser, null));
assertThat(e.getCause().getCause(), instanceOf(IllegalArgumentException.class));
assertThat(e.getCause().getCause().getMessage(), containsString("Only one of [alias] and [aliases] is supported"));
}
}
public void testRoundTrip() throws IOException {
AliasActions action = new AliasActions(randomFrom(AliasActions.Type.values()));
if (randomBoolean()) {
action.index(randomAlphaOfLength(5));
} else {
action.indices(generateRandomStringArray(5, 5, false, false));
}
if (action.actionType() != AliasActions.Type.REMOVE_INDEX) {
if (randomBoolean()) {
action.alias(randomAlphaOfLength(5));
} else {
action.aliases(generateRandomStringArray(5, 5, false, false));
}
}
if (action.actionType() == AliasActions.Type.ADD) {
if (randomBoolean()) {
action.filter(randomAlphaOfLength(10));
}
if (randomBoolean()) {
if (randomBoolean()) {
action.routing(randomAlphaOfLength(5));
} else {
action.searchRouting(randomAlphaOfLength(5));
action.indexRouting(randomAlphaOfLength(5));
}
}
}
try (BytesStreamOutput out = new BytesStreamOutput()) {
action.writeTo(out);
try (StreamInput in = out.bytes().streamInput()) {
AliasActions read = new AliasActions(in);
assertEquals(action, read);
}
}
}
public void testFromToXContent() throws IOException {
for (int runs = 0; runs < 20; runs++) {
AliasActions action = randomAliasAction();
XContentType xContentType = randomFrom(XContentType.values());
BytesReference shuffled = toShuffledXContent(action, xContentType, ToXContent.EMPTY_PARAMS, false, "filter");
AliasActions parsedAction;
try (XContentParser parser = createParser(xContentType.xContent(), shuffled)) {
parsedAction = AliasActions.fromXContent(parser);
assertNull(parser.nextToken());
}
assertThat(parsedAction, equalTo(action));
}
}
}
| AliasActionsTests |
java | quarkusio__quarkus | extensions/arc/deployment/src/test/java/io/quarkus/arc/test/interceptor/producer/ProducerWithPrivateZeroParamCtorAndInterceptionTest.java | {
"start": 1726,
"end": 1900
} | class ____ {
private MyNonbean() {
}
@MyBinding
String hello() {
return "hello";
}
}
@Dependent
static | MyNonbean |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/streaming/api/TypeFillTest.java | {
"start": 7919,
"end": 8097
} | class ____<T, O> implements FlatMapFunction<T, O> {
@Override
public void flatMap(T value, Collector<O> out) throws Exception {}
}
private static | TestFlatMap |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.