language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | netty__netty | common/src/main/java/io/netty/util/ResourceLeak.java | {
"start": 777,
"end": 1532
} | interface ____ {
/**
* Records the caller's current stack trace so that the {@link ResourceLeakDetector} can tell where the leaked
* resource was accessed lastly. This method is a shortcut to {@link #record(Object) record(null)}.
*/
void record();
/**
* Records the caller's current stack trace and the specified additional arbitrary information
* so that the {@link ResourceLeakDetector} can tell where the leaked resource was accessed lastly.
*/
void record(Object hint);
/**
* Close the leak so that {@link ResourceLeakDetector} does not warn about leaked resources.
*
* @return {@code true} if called first time, {@code false} if called already
*/
boolean close();
}
| ResourceLeak |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/tofix/MergeWithCreator1921Test.java | {
"start": 1229,
"end": 3290
} | class ____ {
public static final String VALID_FROM_CANT_BE_NULL = "Valid from can't be null";
public static final String VALID_TO_CANT_BE_BEFORE_VALID_FROM = "Valid to can't be before valid from";
private final String _validFrom;
private final String _validTo;
@JsonCreator
public Validity(@JsonProperty(value = "validFrom", required = true) String validFrom,
@JsonProperty("validTo") String validTo) {
checkValidity(validFrom, validTo);
this._validFrom = validFrom;
this._validTo = validTo;
}
private void checkValidity(String from, String to) {
Objects.requireNonNull(from, VALID_FROM_CANT_BE_NULL);
if (to != null) {
if (from.compareTo(to) > 0) {
throw new IllegalStateException(VALID_TO_CANT_BE_BEFORE_VALID_FROM);
}
}
}
public String getValidFrom() {
return _validFrom;
}
public String getValidTo() {
return _validTo;
}
}
@JacksonTestFailureExpected
@Test
void mergeWithCreator() throws Exception {
final String JSON = "{ \"validity\": { \"validFrom\": \"2018-02-01\", \"validTo\": \"2018-01-31\" } }";
final ObjectMapper mapper = newJsonMapper();
try {
mapper.readValue(JSON, Account.class);
fail("Should not pass");
} catch (ValueInstantiationException e) {
verifyException(e, "Cannot construct");
verifyException(e, Validity.VALID_TO_CANT_BE_BEFORE_VALID_FROM);
}
try {
Account acc = new Account(new Validity("abc", "def"));
mapper.readerForUpdating(acc)
.readValue(JSON);
fail("Should not pass");
} catch (ValueInstantiationException e) {
verifyException(e, "Cannot construct");
verifyException(e, Validity.VALID_TO_CANT_BE_BEFORE_VALID_FROM);
}
}
}
| Validity |
java | resilience4j__resilience4j | resilience4j-ratelimiter/src/main/java/io/github/resilience4j/ratelimiter/event/RateLimiterEvent.java | {
"start": 830,
"end": 993
} | interface ____ {
String getRateLimiterName();
Type getEventType();
int getNumberOfPermits();
ZonedDateTime getCreationTime();
| RateLimiterEvent |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/plugin/Mapper.java | {
"start": 738,
"end": 851
} | interface ____ {
@Select("select name from users where id = #{id}")
String selectNameById(Integer id);
}
| Mapper |
java | apache__kafka | clients/src/test/java/org/apache/kafka/common/requests/DeleteGroupsRequestTest.java | {
"start": 1244,
"end": 2271
} | class ____ {
@Test
public void testGetErrorResultCollection() {
String groupId1 = "group-id-1";
String groupId2 = "group-id-2";
DeleteGroupsRequestData data = new DeleteGroupsRequestData()
.setGroupsNames(Arrays.asList(groupId1, groupId2));
DeleteGroupsResponseData.DeletableGroupResultCollection expectedResultCollection =
new DeleteGroupsResponseData.DeletableGroupResultCollection(Arrays.asList(
new DeleteGroupsResponseData.DeletableGroupResult()
.setGroupId(groupId1)
.setErrorCode(Errors.COORDINATOR_LOAD_IN_PROGRESS.code()),
new DeleteGroupsResponseData.DeletableGroupResult()
.setGroupId(groupId2)
.setErrorCode(Errors.COORDINATOR_LOAD_IN_PROGRESS.code())
).iterator());
assertEquals(expectedResultCollection, getErrorResultCollection(data.groupsNames(), Errors.COORDINATOR_LOAD_IN_PROGRESS));
}
}
| DeleteGroupsRequestTest |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/ref/RefTest10.java | {
"start": 226,
"end": 2424
} | class ____ extends TestCase {
public void test_bug_for_wanglin() throws Exception {
String text = "{ \"schedulerCluster\": \"xyQuestionImport\", \"log\": { \"abilityServiceId\": \"-1\", \"abilityServiceVersionId\": \"-1\", \"createTime\": 1456832040060, \"ip\": \"192.168.1.71\", \"jobDataMap\": { \"com.fjhb.context.v1.Context\": { \"domain\": \"dev.medical.com\", \"gUID\": \"25c5e12ec19946e8a6850237cd8182de\", \"ip\": \"127.0.0.1\", \"organizationId\": \"-1\", \"platformId\": \"2c9180e5520a5e70015214fb2849000a\", \"platformVersionId\": \"2c9180e5520a6063015214fc062d0006\", \"projectId\": \"2c9180e5520a60630152150b0b4a000e\", \"recordChain\": true, \"requestUrl\": \"http://dev.medical.com:9009/gateway/web/admin/questionIE/questionImport\", \"subProjectId\": \"2c9180e5520a606301521596e7070018\", \"test\": false, \"unitId\": \"2c9180e54e7580cd014e801793720010\", \"userId\": \"4028823c4e850e60014e853115dc00sa\" }, \"questionImportDto\": { \"filePath\": \"/work/A4Mode2.xls\", \"organizationId\": \"-1\", \"platformId\": \"2c9180e5520a5e70015214fb2849000a\", \"platformVersionId\": \"2c9180e5520a6063015214fc062d0006\", \"projectId\": \"2c9180e5520a60630152150b0b4a000e\", \"subProjectId\": \"2c9180e5520a606301521596e7070018\", \"unitId\": \"-1\" }, \"questionExcelModeType\": 2, \"user.job.current.execute.key\": \"402881c75331cc62015331e732ce0002\" }, \"jobGroup\": \"xyQuestionImport\", \"jobName\": \"questionImport\", \"key\": \"402881c75331cc62015331e732ce0002\", \"organizationId\": \"-1\", \"platformId\": \"-1\", \"platformVersionId\": \"-1\", \"projectId\": \"-1\", \"remark\": \"\\\"xyQuestionImport\\\"集群中名为:\\\"402881c75331cc62015331ccecbc0000\\\"的调度器开始运行此任务\", \"status\": \"toExecuted\", \"subProjectId\": \"-1\", \"unitId\": \"-1\", \"userId\": \"4028823c4e850e60014e853115dc00sa\" }, \"context\": { \"$ref\": \"$.log.jobDataMap.com.fjhb.context.v1.Context\" }, \"schedulerName\": \"402881c75331cc62015331ccecbc0000\" }";;
JSONObject jsonObj = JSON.parseObject(text);
Assert.assertSame(jsonObj.getJSONObject("log").getJSONObject("jobDataMap").get("com.fjhb.context.v1.Context"), jsonObj.get("context"));
}
public static | RefTest10 |
java | spring-projects__spring-security | config/src/main/java/org/springframework/security/config/annotation/web/configurers/AbstractAuthenticationFilterConfigurer.java | {
"start": 3185,
"end": 16533
} | class ____<B extends HttpSecurityBuilder<B>, T extends AbstractAuthenticationFilterConfigurer<B, T, F>, F extends AbstractAuthenticationProcessingFilter>
extends AbstractHttpConfigurer<T, B> {
private F authFilter;
private AuthenticationDetailsSource<HttpServletRequest, ?> authenticationDetailsSource;
private SavedRequestAwareAuthenticationSuccessHandler defaultSuccessHandler = new SavedRequestAwareAuthenticationSuccessHandler();
private AuthenticationSuccessHandler successHandler = this.defaultSuccessHandler;
private LoginUrlAuthenticationEntryPoint authenticationEntryPoint;
private boolean customLoginPage;
private String loginPage;
private String loginProcessingUrl;
private AuthenticationFailureHandler failureHandler;
private boolean permitAll;
private String failureUrl;
/**
* Creates a new instance with minimal defaults
*/
protected AbstractAuthenticationFilterConfigurer() {
setLoginPage("/login");
}
/**
* Creates a new instance
* @param authenticationFilter the {@link AbstractAuthenticationProcessingFilter} to
* use
* @param defaultLoginProcessingUrl the default URL to use for
* {@link #loginProcessingUrl(String)}
*/
protected AbstractAuthenticationFilterConfigurer(F authenticationFilter, String defaultLoginProcessingUrl) {
this();
this.authFilter = authenticationFilter;
if (defaultLoginProcessingUrl != null) {
loginProcessingUrl(defaultLoginProcessingUrl);
}
}
/**
* Specifies where users will be redirected after authenticating successfully if they
* have not visited a secured page prior to authenticating. This is a shortcut for
* calling {@link #defaultSuccessUrl(String, boolean)}.
* @param defaultSuccessUrl the default success url
* @return the {@link FormLoginConfigurer} for additional customization
*/
public final T defaultSuccessUrl(String defaultSuccessUrl) {
return defaultSuccessUrl(defaultSuccessUrl, false);
}
/**
* Specifies where users will be redirected after authenticating successfully if they
* have not visited a secured page prior to authenticating or {@code alwaysUse} is
* true. This is a shortcut for calling
* {@link #successHandler(AuthenticationSuccessHandler)}.
* @param defaultSuccessUrl the default success url
* @param alwaysUse true if the {@code defaultSuccessUrl} should be used after
* authentication despite if a protected page had been previously visited
* @return the {@link FormLoginConfigurer} for additional customization
*/
public final T defaultSuccessUrl(String defaultSuccessUrl, boolean alwaysUse) {
SavedRequestAwareAuthenticationSuccessHandler handler = new SavedRequestAwareAuthenticationSuccessHandler();
handler.setDefaultTargetUrl(defaultSuccessUrl);
handler.setAlwaysUseDefaultTargetUrl(alwaysUse);
this.defaultSuccessHandler = handler;
return successHandler(handler);
}
/**
* Specifies the URL to validate the credentials.
* @param loginProcessingUrl the URL to validate username and password
* @return the {@link FormLoginConfigurer} for additional customization
*/
public T loginProcessingUrl(String loginProcessingUrl) {
this.loginProcessingUrl = loginProcessingUrl;
this.authFilter.setRequiresAuthenticationRequestMatcher(createLoginProcessingUrlMatcher(loginProcessingUrl));
return getSelf();
}
public T securityContextRepository(SecurityContextRepository securityContextRepository) {
this.authFilter.setSecurityContextRepository(securityContextRepository);
return getSelf();
}
/**
* Create the {@link RequestMatcher} given a loginProcessingUrl
* @param loginProcessingUrl creates the {@link RequestMatcher} based upon the
* loginProcessingUrl
* @return the {@link RequestMatcher} to use based upon the loginProcessingUrl
*/
protected abstract RequestMatcher createLoginProcessingUrlMatcher(String loginProcessingUrl);
/**
* Specifies a custom {@link AuthenticationDetailsSource}. The default is
* {@link WebAuthenticationDetailsSource}.
* @param authenticationDetailsSource the custom {@link AuthenticationDetailsSource}
* @return the {@link FormLoginConfigurer} for additional customization
*/
public final T authenticationDetailsSource(
AuthenticationDetailsSource<HttpServletRequest, ?> authenticationDetailsSource) {
this.authenticationDetailsSource = authenticationDetailsSource;
return getSelf();
}
/**
* Specifies the {@link AuthenticationSuccessHandler} to be used. The default is
* {@link SavedRequestAwareAuthenticationSuccessHandler} with no additional properties
* set.
* @param successHandler the {@link AuthenticationSuccessHandler}.
* @return the {@link FormLoginConfigurer} for additional customization
*/
public final T successHandler(AuthenticationSuccessHandler successHandler) {
this.successHandler = successHandler;
return getSelf();
}
/**
* Equivalent of invoking permitAll(true)
* @return the {@link FormLoginConfigurer} for additional customization
*/
public final T permitAll() {
return permitAll(true);
}
/**
* Ensures the urls for {@link #failureUrl(String)} as well as for the
* {@link HttpSecurityBuilder}, the {@link #getLoginPage} and
* {@link #getLoginProcessingUrl} are granted access to any user.
* @param permitAll true to grant access to the URLs false to skip this step
* @return the {@link FormLoginConfigurer} for additional customization
*/
public final T permitAll(boolean permitAll) {
this.permitAll = permitAll;
return getSelf();
}
/**
* The URL to send users if authentication fails. This is a shortcut for invoking
* {@link #failureHandler(AuthenticationFailureHandler)}. The default is
* "/login?error".
* @param authenticationFailureUrl the URL to send users if authentication fails (i.e.
* "/login?error").
* @return the {@link FormLoginConfigurer} for additional customization
*/
public final T failureUrl(String authenticationFailureUrl) {
T result = failureHandler(new SimpleUrlAuthenticationFailureHandler(authenticationFailureUrl));
this.failureUrl = authenticationFailureUrl;
return result;
}
/**
* Specifies the {@link AuthenticationFailureHandler} to use when authentication
* fails. The default is redirecting to "/login?error" using
* {@link SimpleUrlAuthenticationFailureHandler}
* @param authenticationFailureHandler the {@link AuthenticationFailureHandler} to use
* when authentication fails.
* @return the {@link FormLoginConfigurer} for additional customization
*/
public final T failureHandler(AuthenticationFailureHandler authenticationFailureHandler) {
this.failureUrl = null;
this.failureHandler = authenticationFailureHandler;
return getSelf();
}
@Override
public void init(B http) {
updateAuthenticationDefaults();
updateAccessDefaults(http);
registerDefaultAuthenticationEntryPoint(http);
}
@SuppressWarnings("unchecked")
protected final void registerDefaultAuthenticationEntryPoint(B http) {
registerAuthenticationEntryPoint(http, this.authenticationEntryPoint);
}
@SuppressWarnings("unchecked")
protected final void registerAuthenticationEntryPoint(B http, AuthenticationEntryPoint authenticationEntryPoint) {
ExceptionHandlingConfigurer<B> exceptionHandling = http.getConfigurer(ExceptionHandlingConfigurer.class);
if (exceptionHandling == null) {
return;
}
exceptionHandling.defaultAuthenticationEntryPointFor(postProcess(authenticationEntryPoint),
getAuthenticationEntryPointMatcher(http));
}
protected final RequestMatcher getAuthenticationEntryPointMatcher(B http) {
ContentNegotiationStrategy contentNegotiationStrategy = http.getSharedObject(ContentNegotiationStrategy.class);
if (contentNegotiationStrategy == null) {
contentNegotiationStrategy = new HeaderContentNegotiationStrategy();
}
MediaTypeRequestMatcher mediaMatcher = new MediaTypeRequestMatcher(contentNegotiationStrategy,
MediaType.APPLICATION_XHTML_XML, new MediaType("image", "*"), MediaType.TEXT_HTML,
MediaType.TEXT_PLAIN);
mediaMatcher.setIgnoredMediaTypes(Collections.singleton(MediaType.ALL));
RequestMatcher notXRequestedWith = new NegatedRequestMatcher(
new RequestHeaderRequestMatcher("X-Requested-With", "XMLHttpRequest"));
return new AndRequestMatcher(Arrays.asList(notXRequestedWith, mediaMatcher));
}
@Override
public void configure(B http) {
PortMapper portMapper = http.getSharedObject(PortMapper.class);
if (portMapper != null) {
this.authenticationEntryPoint.setPortMapper(portMapper);
}
RequestCache requestCache = http.getSharedObject(RequestCache.class);
if (requestCache != null) {
this.defaultSuccessHandler.setRequestCache(requestCache);
}
this.authFilter.setAuthenticationManager(http.getSharedObject(AuthenticationManager.class));
this.authFilter.setAuthenticationSuccessHandler(this.successHandler);
this.authFilter.setAuthenticationFailureHandler(this.failureHandler);
if (this.authenticationDetailsSource != null) {
this.authFilter.setAuthenticationDetailsSource(this.authenticationDetailsSource);
}
SessionAuthenticationStrategy sessionAuthenticationStrategy = http
.getSharedObject(SessionAuthenticationStrategy.class);
if (sessionAuthenticationStrategy != null) {
this.authFilter.setSessionAuthenticationStrategy(sessionAuthenticationStrategy);
}
RememberMeServices rememberMeServices = http.getSharedObject(RememberMeServices.class);
if (rememberMeServices != null) {
this.authFilter.setRememberMeServices(rememberMeServices);
}
SecurityContextConfigurer securityContextConfigurer = http.getConfigurer(SecurityContextConfigurer.class);
if (securityContextConfigurer != null && securityContextConfigurer.isRequireExplicitSave()) {
SecurityContextRepository securityContextRepository = securityContextConfigurer
.getSecurityContextRepository();
this.authFilter.setSecurityContextRepository(securityContextRepository);
}
this.authFilter.setSecurityContextHolderStrategy(getSecurityContextHolderStrategy());
F filter = postProcess(this.authFilter);
http.addFilter(filter);
}
/**
* <p>
* Specifies the URL to send users to if login is required. If used with
* {@link EnableWebSecurity} a default login page will be generated when this
* attribute is not specified.
* </p>
*
* <p>
* If a URL is specified or this is not being used in conjunction with
* {@link EnableWebSecurity}, users are required to process the specified URL to
* generate a login page.
* </p>
*/
protected T loginPage(String loginPage) {
setLoginPage(loginPage);
updateAuthenticationDefaults();
this.customLoginPage = true;
return getSelf();
}
/**
* @return true if a custom login page has been specified, else false
*/
public final boolean isCustomLoginPage() {
return this.customLoginPage;
}
/**
* Gets the Authentication Filter
* @return the Authentication Filter
*/
protected final F getAuthenticationFilter() {
return this.authFilter;
}
/**
* Sets the Authentication Filter
* @param authFilter the Authentication Filter
*/
protected final void setAuthenticationFilter(F authFilter) {
this.authFilter = authFilter;
}
/**
* Gets the login page
* @return the login page
*/
protected final String getLoginPage() {
return this.loginPage;
}
/**
* Gets the Authentication Entry Point
* @return the Authentication Entry Point
*/
protected final AuthenticationEntryPoint getAuthenticationEntryPoint() {
return this.authenticationEntryPoint;
}
/**
* Gets the URL to submit an authentication request to (i.e. where username/password
* must be submitted)
* @return the URL to submit an authentication request to
*/
protected final String getLoginProcessingUrl() {
return this.loginProcessingUrl;
}
/**
* Gets the URL to send users to if authentication fails
* @return the URL to send users if authentication fails (e.g. "/login?error").
*/
protected final String getFailureUrl() {
return this.failureUrl;
}
/**
* Updates the default values for authentication.
*/
protected final void updateAuthenticationDefaults() {
if (this.loginProcessingUrl == null) {
loginProcessingUrl(this.loginPage);
}
if (this.failureHandler == null) {
failureUrl(this.loginPage + "?error");
}
LogoutConfigurer<B> logoutConfigurer = getBuilder().getConfigurer(LogoutConfigurer.class);
if (logoutConfigurer != null && !logoutConfigurer.isCustomLogoutSuccess()) {
logoutConfigurer.logoutSuccessUrl(this.loginPage + "?logout");
}
}
/**
* Updates the default values for access.
*/
protected final void updateAccessDefaults(B http) {
if (this.permitAll) {
PermitAllSupport.permitAll(http, this.loginPage, this.loginProcessingUrl, this.failureUrl);
}
}
/**
* Sets the loginPage and updates the {@link AuthenticationEntryPoint}.
* @param loginPage
*/
private void setLoginPage(String loginPage) {
this.loginPage = loginPage;
this.authenticationEntryPoint = new LoginUrlAuthenticationEntryPoint(loginPage);
}
private <C> C getBeanOrNull(B http, Class<C> clazz) {
ApplicationContext context = http.getSharedObject(ApplicationContext.class);
if (context == null) {
return null;
}
return context.getBeanProvider(clazz).getIfUnique();
}
@SuppressWarnings("unchecked")
private T getSelf() {
return (T) this;
}
}
| AbstractAuthenticationFilterConfigurer |
java | quarkusio__quarkus | independent-projects/arc/processor/src/main/java/io/quarkus/arc/processor/bcextensions/UnresolvedTypeVariableImpl.java | {
"start": 179,
"end": 827
} | class ____ extends TypeImpl<org.jboss.jandex.UnresolvedTypeVariable> implements TypeVariable {
UnresolvedTypeVariableImpl(org.jboss.jandex.IndexView jandexIndex,
org.jboss.jandex.MutableAnnotationOverlay annotationOverlay,
org.jboss.jandex.UnresolvedTypeVariable jandexType) {
super(jandexIndex, annotationOverlay, jandexType);
}
@Override
public String name() {
return jandexType.identifier();
}
@Override
public List<Type> bounds() {
return List.of(fromJandexType(jandexIndex, annotationOverlay, org.jboss.jandex.ClassType.OBJECT_TYPE));
}
}
| UnresolvedTypeVariableImpl |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/rules/logical/FlinkRightJoinToLeftJoinRule.java | {
"start": 1536,
"end": 4180
} | class ____ extends RelRule<FlinkRightJoinToLeftJoinRule.Config>
implements TransformationRule {
public static final FlinkRightJoinToLeftJoinRule INSTANCE =
FlinkRightJoinToLeftJoinRule.Config.DEFAULT.toRule();
/** Creates a FlinkRightJoinToLeftJoinRule. */
public FlinkRightJoinToLeftJoinRule(FlinkRightJoinToLeftJoinRule.Config config) {
super(config);
}
@Override
public boolean matches(RelOptRuleCall call) {
Join origJoin = call.rel(0);
return origJoin.getJoinType() == JoinRelType.RIGHT;
}
@Override
public void onMatch(RelOptRuleCall call) {
Join origJoin = call.rel(0);
RelNode left = call.rel(1);
RelNode right = call.rel(2);
RexNode newCondition = shiftCondition(origJoin, left, right);
Join leftJoin =
origJoin.copy(
origJoin.getTraitSet(), newCondition, right, left, JoinRelType.LEFT, false);
RelBuilder relBuilder = call.builder();
relBuilder.push(leftJoin);
RelNode project = reorderProjectedFields(left, right, relBuilder);
call.transformTo(project);
}
private RelNode reorderProjectedFields(RelNode left, RelNode right, RelBuilder relBuilder) {
int nFieldsOnLeft = left.getRowType().getFieldList().size();
int nFieldsOnRight = right.getRowType().getFieldList().size();
List<RexNode> reorderedFields = new ArrayList<>();
for (int i = 0; i < nFieldsOnLeft; i++) {
reorderedFields.add(relBuilder.field(i + nFieldsOnRight));
}
for (int i = 0; i < nFieldsOnRight; i++) {
reorderedFields.add(relBuilder.field(i));
}
return relBuilder.project(reorderedFields).build();
}
private RexNode shiftCondition(Join joinRel, RelNode left, RelNode right) {
RexNode condition = joinRel.getCondition();
int nFieldsOnLeft = left.getRowType().getFieldList().size();
int nFieldsOnRight = right.getRowType().getFieldList().size();
int[] adjustments = new int[nFieldsOnLeft + nFieldsOnRight];
for (int i = 0; i < nFieldsOnLeft + nFieldsOnRight; i++) {
adjustments[i] = i < nFieldsOnLeft ? nFieldsOnRight : -nFieldsOnLeft;
}
return condition.accept(
new RelOptUtil.RexInputConverter(
joinRel.getCluster().getRexBuilder(),
joinRel.getRowType().getFieldList(),
adjustments));
}
/** Rule configuration. */
@Value.Immutable(singleton = false)
public | FlinkRightJoinToLeftJoinRule |
java | spring-projects__spring-boot | module/spring-boot-security/src/test/java/org/springframework/boot/security/autoconfigure/web/servlet/ServletWebSecurityAutoConfigurationTests.java | {
"start": 3143,
"end": 9362
} | class ____ {
private final WebApplicationContextRunner contextRunner = new WebApplicationContextRunner()
.withConfiguration(AutoConfigurations.of(SecurityAutoConfiguration.class,
ServletWebSecurityAutoConfiguration.class, PropertyPlaceholderAutoConfiguration.class));
@Test
void testWebConfiguration() {
this.contextRunner.run((context) -> {
assertThat(context.getBean(AuthenticationManagerBuilder.class)).isNotNull();
assertThat(context.getBean(FilterChainProxy.class).getFilterChains()).hasSize(1);
});
}
@Test
void enableWebSecurityIsConditionalOnClass() {
this.contextRunner.withClassLoader(new FilteredClassLoader("org.springframework.security.config"))
.run((context) -> assertThat(context).doesNotHaveBean("springSecurityFilterChain"));
}
@Test
void filterChainBeanIsConditionalOnClassSecurityFilterChain() {
this.contextRunner.withClassLoader(new FilteredClassLoader(SecurityFilterChain.class))
.run((context) -> assertThat(context).doesNotHaveBean(SecurityFilterChain.class));
}
@Test
void securityConfigurerBacksOffWhenOtherSecurityFilterChainBeanPresent() {
this.contextRunner.withConfiguration(AutoConfigurations.of(WebMvcAutoConfiguration.class))
.withUserConfiguration(TestSecurityFilterChainConfig.class)
.run((context) -> {
assertThat(context.getBeansOfType(SecurityFilterChain.class)).hasSize(1);
assertThat(context.containsBean("testSecurityFilterChain")).isTrue();
});
}
@Test
void testFilterIsNotRegisteredInNonWeb() {
try (AnnotationConfigApplicationContext customContext = new AnnotationConfigApplicationContext()) {
customContext.register(SecurityAutoConfiguration.class, SecurityFilterAutoConfiguration.class,
PropertyPlaceholderAutoConfiguration.class);
customContext.refresh();
assertThat(customContext.containsBean("securityFilterChainRegistration")).isFalse();
}
}
@Test
void testDefaultFilterOrder() {
this.contextRunner.withConfiguration(AutoConfigurations.of(SecurityFilterAutoConfiguration.class))
.run((context) -> assertThat(
context.getBean("securityFilterChainRegistration", DelegatingFilterProxyRegistrationBean.class)
.getOrder())
.isEqualTo(OrderedFilter.REQUEST_WRAPPER_FILTER_MAX_ORDER - 100));
}
@Test
void testCustomFilterOrder() {
this.contextRunner.withConfiguration(AutoConfigurations.of(SecurityFilterAutoConfiguration.class))
.withPropertyValues("spring.security.filter.order:12345")
.run((context) -> assertThat(
context.getBean("securityFilterChainRegistration", DelegatingFilterProxyRegistrationBean.class)
.getOrder())
.isEqualTo(12345));
}
@Test
void defaultFilterDispatcherTypes() {
this.contextRunner.withConfiguration(AutoConfigurations.of(SecurityFilterAutoConfiguration.class))
.run((context) -> {
DelegatingFilterProxyRegistrationBean bean = context.getBean("securityFilterChainRegistration",
DelegatingFilterProxyRegistrationBean.class);
assertThat(bean).extracting("dispatcherTypes", InstanceOfAssertFactories.iterable(DispatcherType.class))
.containsExactlyInAnyOrderElementsOf(EnumSet.allOf(DispatcherType.class));
});
}
@Test
void customFilterDispatcherTypes() {
this.contextRunner.withPropertyValues("spring.security.filter.dispatcher-types:INCLUDE,ERROR")
.withConfiguration(AutoConfigurations.of(SecurityFilterAutoConfiguration.class))
.run((context) -> {
DelegatingFilterProxyRegistrationBean bean = context.getBean("securityFilterChainRegistration",
DelegatingFilterProxyRegistrationBean.class);
assertThat(bean).extracting("dispatcherTypes", InstanceOfAssertFactories.iterable(DispatcherType.class))
.containsOnly(DispatcherType.INCLUDE, DispatcherType.ERROR);
});
}
@Test
void emptyFilterDispatcherTypesDoNotThrowException() {
this.contextRunner.withPropertyValues("spring.security.filter.dispatcher-types:")
.withConfiguration(AutoConfigurations.of(SecurityFilterAutoConfiguration.class))
.run((context) -> {
DelegatingFilterProxyRegistrationBean bean = context.getBean("securityFilterChainRegistration",
DelegatingFilterProxyRegistrationBean.class);
assertThat(bean).extracting("dispatcherTypes", InstanceOfAssertFactories.iterable(DispatcherType.class))
.isEmpty();
});
}
@Test
@WithPublicKeyResource
void whenAConfigurationPropertyBindingConverterIsDefinedThenBindingToAnRsaKeySucceeds() {
this.contextRunner.withUserConfiguration(ConverterConfiguration.class, PropertiesConfiguration.class)
.withPropertyValues("jwt.public-key=classpath:public-key-location")
.run((context) -> assertThat(context.getBean(JwtProperties.class).getPublicKey()).isNotNull());
}
@Test
@WithPublicKeyResource
void whenTheBeanFactoryHasAConversionServiceAndAConfigurationPropertyBindingConverterIsDefinedThenBindingToAnRsaKeySucceeds() {
this.contextRunner
.withInitializer(
(context) -> context.getBeanFactory().setConversionService(new ApplicationConversionService()))
.withUserConfiguration(ConverterConfiguration.class, PropertiesConfiguration.class)
.withPropertyValues("jwt.public-key=classpath:public-key-location")
.run((context) -> assertThat(context.getBean(JwtProperties.class).getPublicKey()).isNotNull());
}
@Test
void whenDispatcherServletPathIsSetPathPatternRequestMatcherBuilderHasCustomBasePath() {
this.contextRunner.withBean(DispatcherServletPath.class, () -> () -> "/dispatcher-servlet").run((context) -> {
PathPatternRequestMatcher.Builder builder = context.getBean(PathPatternRequestMatcher.Builder.class);
assertThat(builder).extracting("basePath").isEqualTo("/dispatcher-servlet");
});
}
@Test
void givenACustomPathPatternRequestMatcherBuilderWhenDispatcherServletPathIsSetBuilderBasePathIsNotCustomized() {
this.contextRunner.withBean(PathPatternRequestMatcherBuilderFactoryBean.class)
.withBean(DispatcherServletPath.class, () -> () -> "/dispatcher-servlet")
.run((context) -> {
PathPatternRequestMatcher.Builder builder = context.getBean(PathPatternRequestMatcher.Builder.class);
assertThat(builder).extracting("basePath").isEqualTo("");
});
}
@Configuration(proxyBeanMethods = false)
static | ServletWebSecurityAutoConfigurationTests |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/builditem/ExcludeDependencyBuildItem.java | {
"start": 321,
"end": 1115
} | class ____ extends MultiBuildItem {
private final String groupId;
private final String artifactId;
private final Optional<String> classifier;
public ExcludeDependencyBuildItem(String groupId, String artifactId) {
this(groupId, artifactId, Optional.empty());
}
public ExcludeDependencyBuildItem(String groupId, String artifactId, Optional<String> classifier) {
this.groupId = Objects.requireNonNull(groupId);
this.artifactId = artifactId;
this.classifier = Objects.requireNonNull(classifier);
}
public String getGroupId() {
return groupId;
}
public String getArtifactId() {
return artifactId;
}
public Optional<String> getClassifier() {
return classifier;
}
}
| ExcludeDependencyBuildItem |
java | apache__thrift | lib/java/src/crossTest/java/org/apache/thrift/test/TestNonblockingServer.java | {
"start": 1186,
"end": 2492
} | class ____ extends TestServer {
public static void main(String[] args) {
try {
int port = 9090;
boolean hsha = false;
for (int i = 0; i < args.length; i++) {
if (args[i].equals("-p")) {
port = Integer.valueOf(args[i++]);
} else if (args[i].equals("-hsha")) {
hsha = true;
}
}
// @TODO add other protocol and transport types
// Processor
TestHandler testHandler = new TestHandler();
ThriftTest.Processor testProcessor = new ThriftTest.Processor(testHandler);
// Transport
TNonblockingServerSocket tServerSocket =
new TNonblockingServerSocket(
new TNonblockingServerSocket.NonblockingAbstractServerSocketArgs().port(port));
TServer serverEngine;
if (hsha) {
// HsHa Server
serverEngine = new THsHaServer(new Args(tServerSocket).processor(testProcessor));
} else {
// Nonblocking Server
serverEngine = new TNonblockingServer(new Args(tServerSocket).processor(testProcessor));
}
// Run it
System.out.println("Starting the server on port " + port + "...");
serverEngine.serve();
} catch (Exception x) {
x.printStackTrace();
}
System.out.println("done.");
}
}
| TestNonblockingServer |
java | lettuce-io__lettuce-core | src/test/jmh/io/lettuce/core/support/AsyncConnectionPoolBenchmark.java | {
"start": 337,
"end": 1370
} | class ____ {
private AsyncPool<StatefulRedisConnection<String, String>> pool;
private StatefulRedisConnection[] holder = new StatefulRedisConnection[20];
@Setup
public void setup() {
BoundedPoolConfig config = BoundedPoolConfig.builder().minIdle(0).maxIdle(20).maxTotal(20).build();
pool = AsyncConnectionPoolSupport.createBoundedObjectPool(
() -> CompletableFuture.completedFuture(new EmptyStatefulRedisConnection(EmptyRedisChannelWriter.INSTANCE)),
config);
}
@TearDown(Level.Iteration)
public void tearDown() {
pool.clear();
}
@Benchmark
public void singleConnection() {
pool.release(pool.acquire().join()).join();
}
@Benchmark
public void twentyConnections() {
for (int i = 0; i < holder.length; i++) {
holder[i] = pool.acquire().join();
}
for (int i = 0; i < holder.length; i++) {
pool.release(holder[i]).join();
}
}
}
| AsyncConnectionPoolBenchmark |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/db/BigDecimalSplitter.java | {
"start": 1427,
"end": 5144
} | class ____ implements DBSplitter {
private static final Logger LOG =
LoggerFactory.getLogger(BigDecimalSplitter.class);
public List<InputSplit> split(Configuration conf, ResultSet results, String colName)
throws SQLException {
BigDecimal minVal = results.getBigDecimal(1);
BigDecimal maxVal = results.getBigDecimal(2);
String lowClausePrefix = colName + " >= ";
String highClausePrefix = colName + " < ";
BigDecimal numSplits = new BigDecimal(conf.getInt(MRJobConfig.NUM_MAPS, 1));
if (minVal == null && maxVal == null) {
// Range is null to null. Return a null split accordingly.
List<InputSplit> splits = new ArrayList<InputSplit>();
splits.add(new DataDrivenDBInputFormat.DataDrivenDBInputSplit(
colName + " IS NULL", colName + " IS NULL"));
return splits;
}
if (minVal == null || maxVal == null) {
// Don't know what is a reasonable min/max value for interpolation. Fail.
LOG.error("Cannot find a range for NUMERIC or DECIMAL fields with one end NULL.");
return null;
}
// Get all the split points together.
List<BigDecimal> splitPoints = split(numSplits, minVal, maxVal);
List<InputSplit> splits = new ArrayList<InputSplit>();
// Turn the split points into a set of intervals.
BigDecimal start = splitPoints.get(0);
for (int i = 1; i < splitPoints.size(); i++) {
BigDecimal end = splitPoints.get(i);
if (i == splitPoints.size() - 1) {
// This is the last one; use a closed interval.
splits.add(new DataDrivenDBInputFormat.DataDrivenDBInputSplit(
lowClausePrefix + start.toString(),
colName + " <= " + end.toString()));
} else {
// Normal open-interval case.
splits.add(new DataDrivenDBInputFormat.DataDrivenDBInputSplit(
lowClausePrefix + start.toString(),
highClausePrefix + end.toString()));
}
start = end;
}
return splits;
}
private static final BigDecimal MIN_INCREMENT = new BigDecimal(10000 * Double.MIN_VALUE);
/**
* Divide numerator by denominator. If impossible in exact mode, use rounding.
*/
protected BigDecimal tryDivide(BigDecimal numerator, BigDecimal denominator) {
try {
return numerator.divide(denominator);
} catch (ArithmeticException ae) {
return numerator.divide(denominator, BigDecimal.ROUND_HALF_UP);
}
}
/**
* Returns a list of BigDecimals one element longer than the list of input splits.
* This represents the boundaries between input splits.
* All splits are open on the top end, except the last one.
*
* So the list [0, 5, 8, 12, 18] would represent splits capturing the intervals:
*
* [0, 5)
* [5, 8)
* [8, 12)
* [12, 18] note the closed interval for the last split.
*/
List<BigDecimal> split(BigDecimal numSplits, BigDecimal minVal, BigDecimal maxVal)
throws SQLException {
List<BigDecimal> splits = new ArrayList<BigDecimal>();
// Use numSplits as a hint. May need an extra task if the size doesn't
// divide cleanly.
BigDecimal splitSize = tryDivide(maxVal.subtract(minVal), (numSplits));
if (splitSize.compareTo(MIN_INCREMENT) < 0) {
splitSize = MIN_INCREMENT;
LOG.warn("Set BigDecimal splitSize to MIN_INCREMENT");
}
BigDecimal curVal = minVal;
while (curVal.compareTo(maxVal) <= 0) {
splits.add(curVal);
curVal = curVal.add(splitSize);
}
if (splits.get(splits.size() - 1).compareTo(maxVal) != 0 || splits.size() == 1) {
// We didn't end on the maxVal. Add that to the end of the list.
splits.add(maxVal);
}
return splits;
}
}
| BigDecimalSplitter |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/streaming/api/operators/BackendRestorerProcedureTest.java | {
"start": 2502,
"end": 10038
} | class ____ {
private final FunctionWithException<
Collection<OperatorStateHandle>, OperatorStateBackend, Exception>
backendSupplier =
(stateHandles) ->
new DefaultOperatorStateBackendBuilder(
getClass().getClassLoader(),
new ExecutionConfig(),
true,
stateHandles,
new CloseableRegistry())
.build();
/**
* Tests that the restore procedure follows the order of the iterator and will retries failed
* attempts if there are more options.
*/
@Test
void testRestoreProcedureOrderAndFailure() throws Exception {
CloseableRegistry closeableRegistry = new CloseableRegistry();
CheckpointStreamFactory checkpointStreamFactory = new MemCheckpointStreamFactory(1024);
ListStateDescriptor<Integer> stateDescriptor =
new ListStateDescriptor<>("test-state", Integer.class);
OperatorStateBackend originalBackend = backendSupplier.apply(Collections.emptyList());
SnapshotResult<OperatorStateHandle> snapshotResult;
try {
ListState<Integer> listState = originalBackend.getListState(stateDescriptor);
listState.add(0);
listState.add(1);
listState.add(2);
listState.add(3);
RunnableFuture<SnapshotResult<OperatorStateHandle>> snapshot =
originalBackend.snapshot(
0L,
0L,
checkpointStreamFactory,
CheckpointOptions.forCheckpointWithDefaultLocation());
snapshot.run();
snapshotResult = snapshot.get();
} finally {
originalBackend.close();
originalBackend.dispose();
}
OperatorStateHandle firstFailHandle = mock(OperatorStateHandle.class);
OperatorStateHandle secondSuccessHandle = spy(snapshotResult.getJobManagerOwnedSnapshot());
OperatorStateHandle thirdNotUsedHandle = mock(OperatorStateHandle.class);
List<StateObjectCollection<OperatorStateHandle>> sortedRestoreOptions =
Arrays.asList(
new StateObjectCollection<>(Collections.singletonList(firstFailHandle)),
new StateObjectCollection<>(Collections.singletonList(secondSuccessHandle)),
new StateObjectCollection<>(Collections.singletonList(thirdNotUsedHandle)));
BackendRestorerProcedure<OperatorStateBackend, OperatorStateHandle> restorerProcedure =
new BackendRestorerProcedure<>(
backendSupplier, closeableRegistry, "test op state backend");
OperatorStateBackend restoredBackend =
restorerProcedure.createAndRestore(
sortedRestoreOptions, StateObject.StateObjectSizeStatsCollector.create());
assertThat(restoredBackend).isNotNull();
try {
verify(firstFailHandle).openInputStream();
verify(secondSuccessHandle).openInputStream();
verify(thirdNotUsedHandle, times(0)).openInputStream();
ListState<Integer> listState = restoredBackend.getListState(stateDescriptor);
Iterator<Integer> stateIterator = listState.get().iterator();
assertThat(stateIterator.next()).isZero();
assertThat(stateIterator.next()).isOne();
assertThat(stateIterator.next()).isEqualTo(2);
assertThat(stateIterator.next()).isEqualTo(3);
assertThat(stateIterator).isExhausted();
} finally {
restoredBackend.close();
restoredBackend.dispose();
}
}
/** Tests if there is an exception if all restore attempts are exhausted and failed. */
@Test
void testExceptionThrownIfAllRestoresFailed() throws Exception {
CloseableRegistry closeableRegistry = new CloseableRegistry();
OperatorStateHandle firstFailHandle = mock(OperatorStateHandle.class);
OperatorStateHandle secondFailHandle = mock(OperatorStateHandle.class);
OperatorStateHandle thirdFailHandle = mock(OperatorStateHandle.class);
List<StateObjectCollection<OperatorStateHandle>> sortedRestoreOptions =
Arrays.asList(
new StateObjectCollection<>(Collections.singletonList(firstFailHandle)),
new StateObjectCollection<>(Collections.singletonList(secondFailHandle)),
new StateObjectCollection<>(Collections.singletonList(thirdFailHandle)));
BackendRestorerProcedure<OperatorStateBackend, OperatorStateHandle> restorerProcedure =
new BackendRestorerProcedure<>(
backendSupplier, closeableRegistry, "test op state backend");
assertThatThrownBy(
() ->
restorerProcedure.createAndRestore(
sortedRestoreOptions,
StateObject.StateObjectSizeStatsCollector.create()))
.isInstanceOf(FlinkException.class);
verify(firstFailHandle).openInputStream();
verify(secondFailHandle).openInputStream();
verify(thirdFailHandle).openInputStream();
}
/** Test that the restore can be stopped via the provided closeable registry. */
@Test
void testCanBeCanceledViaRegistry() throws Exception {
CloseableRegistry closeableRegistry = new CloseableRegistry();
OneShotLatch waitForBlock = new OneShotLatch();
OneShotLatch unblock = new OneShotLatch();
OperatorStateHandle blockingRestoreHandle = mock(OperatorStateHandle.class);
when(blockingRestoreHandle.openInputStream())
.thenReturn(new BlockingFSDataInputStream(waitForBlock, unblock));
List<StateObjectCollection<OperatorStateHandle>> sortedRestoreOptions =
Collections.singletonList(
new StateObjectCollection<>(
Collections.singletonList(blockingRestoreHandle)));
BackendRestorerProcedure<OperatorStateBackend, OperatorStateHandle> restorerProcedure =
new BackendRestorerProcedure<>(
backendSupplier, closeableRegistry, "test op state backend");
AtomicReference<Exception> exceptionReference = new AtomicReference<>(null);
Thread restoreThread =
new Thread(
() -> {
try {
restorerProcedure.createAndRestore(
sortedRestoreOptions,
StateObject.StateObjectSizeStatsCollector.create());
} catch (Exception e) {
exceptionReference.set(e);
}
});
restoreThread.start();
waitForBlock.await();
closeableRegistry.close();
unblock.trigger();
restoreThread.join();
Exception exception = exceptionReference.get();
assertThat(exception).isInstanceOf(FlinkException.class);
}
}
| BackendRestorerProcedureTest |
java | apache__camel | components/camel-ftp/src/test/java/org/apache/camel/component/file/remote/BaseServerTestSupport.java | {
"start": 914,
"end": 1034
} | class ____ extends CamelTestSupport {
protected static final String LS = System.lineSeparator();
}
| BaseServerTestSupport |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/reservation/ReservationSystem.java | {
"start": 1591,
"end": 2106
} | interface ____ the one implemented by any system that wants to support
* Reservations i.e. make {@code Resource} allocations in future. Implementors
* need to bootstrap all configured {@link Plan}s in the active
* {@link ResourceScheduler} along with their corresponding
* {@code ReservationAgent} and {@link SharingPolicy}. It is also responsible
* for managing the {@link PlanFollower} to ensure the {@link Plan}s are in sync
* with the {@link ResourceScheduler}.
*/
@LimitedPrivate("yarn")
@Unstable
public | is |
java | hibernate__hibernate-orm | hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/manytoone/bidirectional/ManyToOneCustomRevisionListenerTest.java | {
"start": 1647,
"end": 3668
} | class ____ {
private static final ThreadLocal<AuditReader> auditReader = ThreadLocal.withInitial( () -> null );
@BeforeClassTemplate
public void initData(EntityManagerFactoryScope scope) {
scope.inTransaction( em -> {
// store in thread-local to use it in custom revision listener
auditReader.set( AuditReaderFactory.get( em ) );
final Employee bilbo = new Employee( "Bilbo Baggins" );
em.persist( bilbo );
final Employee frodo = new Employee( "Frodo Baggins" );
em.persist( frodo );
} );
scope.inTransaction( em -> {
auditReader.set( AuditReaderFactory.get( em ) );
final Employee bilbo = em.createQuery( "from Employee where name = 'Bilbo Baggins'", Employee.class ).getSingleResult();
final Employee frodo = em.createQuery( "from Employee where name = 'Frodo Baggins'", Employee.class ).getSingleResult();
final Document document = new Document( "The Hobbit" );
document.getAuthors().add( new DocumentAuthorEmployee( 1L, document, bilbo ) );
document.getAuthors().add( new DocumentAuthorEmployee( 2L, document, frodo ) );
em.persist( document );
} );
}
@Test
public void testDocumentAuthorEmployeeRevisions(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
final AuditReader reader = AuditReaderFactory.get( em );
assertLastRevision( reader, 1L, "Bilbo Baggins" );
assertLastRevision( reader, 2L, "Frodo Baggins" );
} );
}
private static void assertLastRevision(AuditReader reader, Long id, String employee) {
final List<Number> revisions = reader.getRevisions( DocumentAuthorEmployee.class, id );
final Number revisionNumber = revisions.get( revisions.size() - 1 );
final DocumentAuthorEmployee result = reader.find( DocumentAuthorEmployee.class, id, revisionNumber );
assertThat( result.getEmployee().getName() ).isEqualTo( employee );
assertThat( result.getDocument().getTitle() ).isEqualTo( "The Hobbit" );
}
@Audited(withModifiedFlag = true)
@Entity(name = "Document")
static | ManyToOneCustomRevisionListenerTest |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/models/annotations/internal/CollectionIdTypeAnnotation.java | {
"start": 642,
"end": 2262
} | class ____ implements CollectionIdType {
private java.lang.Class<? extends org.hibernate.usertype.UserType<?>> value;
private org.hibernate.annotations.Parameter[] parameters;
/**
* Used in creating dynamic annotation instances (e.g. from XML)
*/
public CollectionIdTypeAnnotation(ModelsContext modelContext) {
this.parameters = new org.hibernate.annotations.Parameter[0];
}
/**
* Used in creating annotation instances from JDK variant
*/
public CollectionIdTypeAnnotation(CollectionIdType annotation, ModelsContext modelContext) {
this.value = annotation.value();
this.parameters = extractJdkValue( annotation, COLLECTION_ID_TYPE, "parameters", modelContext );
}
/**
* Used in creating annotation instances from Jandex variant
*/
public CollectionIdTypeAnnotation(Map<String, Object> attributeValues, ModelsContext modelContext) {
this.value = (Class<? extends org.hibernate.usertype.UserType<?>>) attributeValues.get( "value" );
this.parameters = (org.hibernate.annotations.Parameter[]) attributeValues.get( "parameters" );
}
@Override
public Class<? extends Annotation> annotationType() {
return CollectionIdType.class;
}
@Override
public java.lang.Class<? extends org.hibernate.usertype.UserType<?>> value() {
return value;
}
public void value(java.lang.Class<? extends org.hibernate.usertype.UserType<?>> value) {
this.value = value;
}
@Override
public org.hibernate.annotations.Parameter[] parameters() {
return parameters;
}
public void parameters(org.hibernate.annotations.Parameter[] value) {
this.parameters = value;
}
}
| CollectionIdTypeAnnotation |
java | apache__camel | components/camel-digitalocean/src/test/java/org/apache/camel/component/digitalocean/DigitalOceanComponentTest.java | {
"start": 1562,
"end": 2712
} | class ____ extends CamelTestSupport {
@EndpointInject("mock:result")
protected MockEndpoint mockResultEndpoint;
@BindToRegistry("digitalOceanClient")
DigitalOceanClient digitalOceanClient = new DigitalOceanClientMock();
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("direct:getAccountInfo")
.setHeader(DigitalOceanHeaders.OPERATION, constant(DigitalOceanOperations.get))
.to("digitalocean:account?digitalOceanClient=#digitalOceanClient")
.to("mock:result");
}
};
}
@Test
public void testGetAccountInfo() throws Exception {
mockResultEndpoint.expectedMinimumMessageCount(1);
Exchange exchange = template.request("direct:getAccountInfo", null);
MockEndpoint.assertIsSatisfied(context);
assertIsInstanceOf(Account.class, exchange.getMessage().getBody());
assertEquals("camel@apache.org", exchange.getMessage().getBody(Account.class).getEmail());
}
}
| DigitalOceanComponentTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/lazy/proxy/Order.java | {
"start": 765,
"end": 2305
} | class ____ {
private Integer oid;
private String theText;
private Customer customer;
private OrderSupplemental supplemental;
private OrderSupplemental2 supplemental2;
private Set<Payment> payments = new HashSet<Payment>();
public Order() {
}
public Order(Integer oid, String theText, Customer customer) {
this.oid = oid;
this.theText = theText;
this.customer = customer;
}
@Id
@Column(name = "oid")
public Integer getOid() {
return oid;
}
public void setOid(Integer oid) {
this.oid = oid;
}
public String getTheText() {
return theText;
}
public void setTheText(String theText) {
this.theText = theText;
}
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn
// @LazyToOne( LazyToOneOption.NO_PROXY )
public Customer getCustomer() {
return customer;
}
public void setCustomer(Customer customer) {
this.customer = customer;
}
@OneToMany(fetch = FetchType.LAZY)
public Set<Payment> getPayments() {
return payments;
}
public void setPayments(Set<Payment> payments) {
this.payments = payments;
}
@OneToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "supp_info_id")
public OrderSupplemental getSupplemental() {
return supplemental;
}
public void setSupplemental(OrderSupplemental supplemental) {
this.supplemental = supplemental;
}
@OneToOne(fetch = FetchType.LAZY, mappedBy = "order")
public OrderSupplemental2 getSupplemental2() {
return supplemental2;
}
public void setSupplemental2(OrderSupplemental2 supplemental2) {
this.supplemental2 = supplemental2;
}
}
| Order |
java | apache__camel | components/camel-jms/src/test/java/org/apache/camel/component/jms/integration/JmsXPathHeaderIT.java | {
"start": 1469,
"end": 3830
} | class ____ extends AbstractJMSTest {
@Order(2)
@RegisterExtension
public static CamelContextExtension camelContextExtension = new DefaultCamelContextExtension();
protected CamelContext context;
protected ProducerTemplate template;
protected ConsumerTemplate consumer;
@Test
public void testTrue() throws Exception {
getMockEndpoint("mock:true").expectedMessageCount(1);
getMockEndpoint("mock:other").expectedMessageCount(0);
template.sendBodyAndHeader("activemq:queue:in", "<hello>World</hello>", "foo", "true");
MockEndpoint.assertIsSatisfied(context);
}
@Test
public void testFalse() throws Exception {
getMockEndpoint("mock:true").expectedMessageCount(0);
getMockEndpoint("mock:other").expectedMessageCount(1);
template.sendBodyAndHeader("activemq:queue:in", "<hello>World</hello>", "foo", "false");
MockEndpoint.assertIsSatisfied(context);
}
@Test
public void testNoHeader() throws Exception {
getMockEndpoint("mock:true").expectedMessageCount(0);
getMockEndpoint("mock:other").expectedMessageCount(1);
template.sendBody("activemq:queue:in", "<hello>World</hello>");
MockEndpoint.assertIsSatisfied(context);
}
@Override
protected String getComponentName() {
return "activemq";
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("activemq:queue:in")
.choice()
.when().xpath("$foo = 'true'")
.to("activemq:queue:true")
.otherwise()
.to("activemq:queue:other")
.end();
from("activemq:queue:true").to("mock:true");
from("activemq:queue:other").to("mock:other");
}
};
}
@Override
public CamelContextExtension getCamelContextExtension() {
return camelContextExtension;
}
@BeforeEach
void setUpRequirements() {
context = camelContextExtension.getContext();
template = camelContextExtension.getProducerTemplate();
consumer = camelContextExtension.getConsumerTemplate();
}
}
| JmsXPathHeaderIT |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/access/UnsupportedEnhancementStrategyTest.java | {
"start": 7847,
"end": 8174
} | class ____ extends AbstractSuperclass {
private Long id;
@Id
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getProperty() {
return property;
}
public void setProperty(String property) {
this.property = property;
}
}
}
| PropertyAccessInheritedEntity |
java | apache__camel | components/camel-http/src/main/java/org/apache/camel/component/http/helper/HttpMethodHelper.java | {
"start": 1261,
"end": 4043
} | class ____ {
private HttpMethodHelper() {
// Helper class
}
/**
* Creates the HttpMethod to use to call the remote server, often either its GET or POST
*/
public static HttpMethods createMethod(Exchange exchange, HttpEndpoint endpoint) throws URISyntaxException {
// is a query string provided in the endpoint URI or in a header (header
// overrules endpoint)
String queryString = null;
String uriString = null;
if (!endpoint.isSkipControlHeaders()) {
queryString = exchange.getIn().getHeader(HttpConstants.HTTP_QUERY, String.class);
uriString = exchange.getIn().getHeader(HttpConstants.HTTP_URI, String.class);
}
if (uriString != null) {
// resolve placeholders in uriString
try {
uriString = exchange.getContext().resolvePropertyPlaceholders(uriString);
} catch (Exception e) {
throw new RuntimeExchangeException("Cannot resolve property placeholders with uri: " + uriString, exchange, e);
}
// in case the URI string contains unsafe characters
uriString = UnsafeUriCharactersEncoder.encodeHttpURI(uriString);
URI uri = new URI(uriString);
queryString = uri.getQuery();
}
if (queryString == null) {
queryString = endpoint.getHttpUri().getRawQuery();
}
// compute what method to use either GET or POST
HttpMethods answer;
if (endpoint.getHttpMethod() != null) {
// endpoint configured take precedence
answer = HttpMethods.valueOf(endpoint.getHttpMethod().name());
} else {
// compute what method to use either GET or POST (header take precedence)
HttpMethods m = null;
if (!endpoint.isSkipControlHeaders()) {
m = exchange.getIn().getHeader(HttpConstants.HTTP_METHOD, HttpMethods.class);
}
if (m != null) {
// always use what end-user provides in a header
answer = m;
} else if (queryString != null) {
// if a query string is provided then use GET
answer = HttpMethods.GET;
} else {
// fallback to POST if we have payload, otherwise GET
Object body = exchange.getMessage().getBody();
if (body instanceof StreamCache sc) {
long len = sc.length();
answer = len > 0 ? HttpMethods.POST : HttpMethods.GET;
} else {
answer = body != null ? HttpMethods.POST : HttpMethods.GET;
}
}
}
return answer;
}
}
| HttpMethodHelper |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/component/timer/TimerDelaySecondsTest.java | {
"start": 1032,
"end": 1558
} | class ____ extends ContextTestSupport {
@Test
public void testDelay() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMinimumMessageCount(1);
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("timer://foo?delay=1s").to("mock:result");
}
};
}
}
| TimerDelaySecondsTest |
java | quarkusio__quarkus | integration-tests/rest-client-reactive/src/main/java/io/quarkus/it/rest/client/main/Apple.java | {
"start": 48,
"end": 352
} | class ____ {
private String cultivar;
public Apple() {
}
public Apple(String cultivar) {
this.cultivar = cultivar;
}
public String getCultivar() {
return cultivar;
}
public void setCultivar(String cultivar) {
this.cultivar = cultivar;
}
}
| Apple |
java | alibaba__nacos | test/naming-test/src/test/java/com/alibaba/nacos/test/naming/NamingBase.java | {
"start": 1456,
"end": 10017
} | class ____ extends HttpClient4Test {
public static final String TEST_DOM_1 = "nacos.test.1";
public static final String TEST_IP_4_DOM_1 = "127.0.0.1";
public static final String TEST_PORT_4_DOM_1 = "8080";
public static final String TEST_PORT2_4_DOM_1 = "8888";
public static final String TEST_PORT3_4_DOM_1 = "80";
public static final String TEST_TOKEN_4_DOM_1 = "abc";
public static final String TEST_NEW_CLUSTER_4_DOM_1 = "TEST1";
public static final String TEST_DOM_2 = "nacos.test.2";
public static final String TEST_IP_4_DOM_2 = "127.0.0.2";
public static final String TEST_PORT_4_DOM_2 = "7070";
public static final String TETS_TOKEN_4_DOM_2 = "xyz";
public static final String TEST_SERVER_STATUS = "UP";
public static final String TEST_GROUP = "group";
public static final String TEST_GROUP_1 = "group1";
public static final String TEST_GROUP_2 = "group2";
public static final String TEST_NAMESPACE_1 = "namespace-1";
public static final String TEST_NAMESPACE_2 = "namespace-2";
public static final int TEST_PORT = 8080;
public static final int TIME_OUT = 3000;
static final String NAMING_CONTROLLER_PATH = "/nacos/v1/ns";
private static final NacosRestTemplate nacosRestTemplate = NamingHttpClientManager.getInstance().getNacosRestTemplate();
public static String randomDomainName() {
StringBuilder sb = new StringBuilder();
sb.append("jinhan");
for (int i = 0; i < 2; i++) {
sb.append(RandomUtils.getStringWithNumAndCha(5));
sb.append('.');
}
int i = RandomUtils.getIntegerBetween(0, 2);
if (i == 0) {
sb.append("com");
} else {
sb.append("net");
}
return sb.toString();
}
public static Instance getInstance(String serviceName) {
Instance instance = new Instance();
instance.setIp("127.0.0.1");
instance.setPort(TEST_PORT);
instance.setHealthy(true);
instance.setWeight(2.0);
Map<String, String> instanceMeta = new HashMap<String, String>();
instanceMeta.put("site", "et2");
instance.setMetadata(instanceMeta);
instance.setServiceName(serviceName);
instance.setClusterName("c1");
return instance;
}
public static boolean verifyInstance(Instance i1, Instance i2) {
if (!i1.getIp().equals(i2.getIp()) || i1.getPort() != i2.getPort() || i1.getWeight() != i2.getWeight()
|| i1.isHealthy() != i2.isHealthy() || !i1.getMetadata().equals(i2.getMetadata())) {
return false;
}
//Service service1 = i1.getService();
//Service service2 = i2.getService();
//
//if (!service1.getApp().equals(service2.getApp()) || !service1.getGroup().equals(service2.getGroup()) ||
// !service1.getMetadata().equals(service2.getMetadata()) || !service1.getName().equals(service2.getName()) ||
// service1.getProtectThreshold() != service2.getProtectThreshold() ||
// service1.isEnableClientBeat() != service2.isEnableClientBeat() ||
// service1.isEnableHealthCheck() != service2.isEnableHealthCheck()) {
// return false;
//}
//Cluster cluster1 = i1.getCluster();
//Cluster cluster2 = i2.getCluster();
//
//if (!cluster1.getName().equals(cluster2.getName()) ||
// cluster1.getDefaultCheckPort() != cluster2.getDefaultCheckPort() ||
// cluster1.getDefaultPort() != cluster2.getDefaultPort() ||
// !cluster1.getServiceName().equals(cluster2.getServiceName()) ||
// !cluster1.getMetadata().equals(cluster2.getMetadata())||
// cluster1.isUseIpPort4Check() != cluster2.isUseIpPort4Check()) {
// return false;
//}
//
//HealthChecker healthChecker1 = cluster1.getHealthChecker();
//HealthChecker healthChecker2 = cluster2.getHealthChecker();
//
//if (healthChecker1.getClass().getName() != healthChecker2.getClass().getName()) {
// return false;
//}
//
//if (healthChecker1 instanceof HealthChecker.Http) {
// HealthChecker.Http h1 = (HealthChecker.Http) healthChecker1;
// HealthChecker.Http h2 = (HealthChecker.Http) healthChecker2;
//
// if (h1.getExpectedResponseCode() != h2.getExpectedResponseCode() ||
// !h1.getHeaders().equals(h2.getHeaders()) ||
// !h1.getPath().equals(h2.getPath()) ||
// !h1.getCustomHeaders().equals(h2.getCustomHeaders())) {
// return false;
// }
//}
return true;
}
public static boolean verifyInstanceList(List<Instance> instanceList1, List<Instance> instanceList2) {
Map<String, Instance> instanceMap = new HashMap<String, Instance>();
for (Instance instance : instanceList1) {
instanceMap.put(instance.getIp(), instance);
}
Map<String, Instance> instanceGetMap = new HashMap<String, Instance>();
for (Instance instance : instanceList2) {
instanceGetMap.put(instance.getIp(), instance);
}
for (String ip : instanceMap.keySet()) {
if (!instanceGetMap.containsKey(ip)) {
return false;
}
if (!verifyInstance(instanceMap.get(ip), instanceGetMap.get(ip))) {
return false;
}
}
return true;
}
public static void prepareServer(int localPort) throws Exception {
prepareServer(localPort, "UP", "/nacos");
}
public static void prepareServer(int localPort, String contextPath) throws Exception {
prepareServer(localPort, "UP", contextPath);
}
public static void prepareServer(int localPort, String status, String contextPath) throws Exception {
String url = "http://127.0.0.1:" + localPort + normalizeContextPath(contextPath)
+ "/v1/ns/operator/switches?entry=overriddenServerStatus&value=" + status;
Header header = Header.newInstance();
header.addParam(HttpHeaderConsts.USER_AGENT_HEADER, "Nacos-Server");
HttpRestResult<String> result = nacosRestTemplate.putForm(url, header, new HashMap<>(), String.class);
System.out.println(result);
assertEquals(HttpStatus.SC_OK, result.getCode());
url = "http://127.0.0.1:" + localPort + normalizeContextPath(contextPath)
+ "/v1/ns/operator/switches?entry=autoChangeHealthCheckEnabled&value=" + false;
result = nacosRestTemplate.putForm(url, header, new HashMap<>(), String.class);
System.out.println(result);
assertEquals(HttpStatus.SC_OK, result.getCode());
}
public static void destoryServer(int localPort) throws Exception {
destoryServer(localPort, "/nacos");
}
public static void destoryServer(int localPort, String contextPath) throws Exception {
String url = "http://127.0.0.1:" + localPort + normalizeContextPath(contextPath)
+ "/v1/ns/operator/switches?entry=autoChangeHealthCheckEnabled&value=" + true;
Header header = Header.newInstance();
header.addParam(HttpHeaderConsts.USER_AGENT_HEADER, "Nacos-Server");
HttpRestResult<String> result = nacosRestTemplate.putForm(url, header, new HashMap<>(), String.class);
System.out.println(result);
assertEquals(HttpStatus.SC_OK, result.getCode());
}
public static String normalizeContextPath(String contextPath) {
if (StringUtils.isBlank(contextPath) || "/".equals(contextPath)) {
return StringUtils.EMPTY;
}
return contextPath.startsWith("/") ? contextPath : "/" + contextPath;
}
protected void isNamingServerReady() throws InterruptedException {
int retry = 0;
while (retry < 3) {
ResponseEntity<String> response = request("/nacos/v1/ns/operator/metrics", Params.newParams().done(),
String.class);
if (response.getStatusCode().is2xxSuccessful() && response.getBody().contains("UP")) {
break;
}
retry++;
TimeUnit.SECONDS.sleep(5);
}
}
}
| NamingBase |
java | mapstruct__mapstruct | processor/src/test/resources/fixtures/org/mapstruct/ap/test/bugs/_3591/ContainerBeanMapperImpl.java | {
"start": 510,
"end": 3201
} | class ____ implements ContainerBeanMapper {
@Override
public ContainerBeanDto mapWithMapMapping(ContainerBean containerBean, ContainerBeanDto containerBeanDto) {
if ( containerBean == null ) {
return containerBeanDto;
}
if ( containerBeanDto.getBeanMap() != null ) {
Map<String, ContainerBeanDto> map = stringContainerBeanMapToStringContainerBeanDtoMap( containerBean.getBeanMap() );
if ( map != null ) {
containerBeanDto.getBeanMap().clear();
containerBeanDto.getBeanMap().putAll( map );
}
else {
containerBeanDto.setBeanMap( null );
}
}
else {
Map<String, ContainerBeanDto> map = stringContainerBeanMapToStringContainerBeanDtoMap( containerBean.getBeanMap() );
if ( map != null ) {
containerBeanDto.setBeanMap( map );
}
}
containerBeanDto.setBeanStream( containerBeanStreamToContainerBeanDtoStream( containerBean.getBeanStream() ) );
containerBeanDto.setValue( containerBean.getValue() );
return containerBeanDto;
}
protected Stream<ContainerBeanDto> containerBeanStreamToContainerBeanDtoStream(Stream<ContainerBean> stream) {
if ( stream == null ) {
return null;
}
return stream.map( containerBean -> containerBeanToContainerBeanDto( containerBean ) );
}
protected ContainerBeanDto containerBeanToContainerBeanDto(ContainerBean containerBean) {
if ( containerBean == null ) {
return null;
}
ContainerBeanDto containerBeanDto = new ContainerBeanDto();
containerBeanDto.setBeanMap( stringContainerBeanMapToStringContainerBeanDtoMap( containerBean.getBeanMap() ) );
containerBeanDto.setBeanStream( containerBeanStreamToContainerBeanDtoStream( containerBean.getBeanStream() ) );
containerBeanDto.setValue( containerBean.getValue() );
return containerBeanDto;
}
protected Map<String, ContainerBeanDto> stringContainerBeanMapToStringContainerBeanDtoMap(Map<String, ContainerBean> map) {
if ( map == null ) {
return null;
}
Map<String, ContainerBeanDto> map1 = new LinkedHashMap<String, ContainerBeanDto>( Math.max( (int) ( map.size() / .75f ) + 1, 16 ) );
for ( java.util.Map.Entry<String, ContainerBean> entry : map.entrySet() ) {
String key = entry.getKey();
ContainerBeanDto value = containerBeanToContainerBeanDto( entry.getValue() );
map1.put( key, value );
}
return map1;
}
}
| ContainerBeanMapperImpl |
java | apache__flink | flink-docs/src/main/java/org/apache/flink/docs/rest/ApiSpecGeneratorUtils.java | {
"start": 1946,
"end": 2031
} | class ____ dynamic fields that need to be documented.
*
* @param clazz | contains |
java | apache__hadoop | hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/AWSHeaders.java | {
"start": 1693,
"end": 3620
} | class ____ storage a user wants. */
String STORAGE_CLASS = "x-amz-storage-class";
/** Header describing what archive tier the object is in, if any. */
String ARCHIVE_STATUS = "x-amz-archive-status";
/** Header for optional server-side encryption algorithm. */
String SERVER_SIDE_ENCRYPTION = "x-amz-server-side-encryption";
/** Header for optional server-side encryption algorithm. */
String SERVER_SIDE_ENCRYPTION_AWS_KMS_KEY_ID = "x-amz-server-side-encryption-aws-kms-key-id";
/** Range header for the get object request. */
String RANGE = "Range";
/**
* Encrypted symmetric key header that is used in the Encryption Only (EO) envelope
* encryption mechanism.
*/
@Deprecated
String CRYPTO_KEY = "x-amz-key";
/** JSON-encoded description of encryption materials used during encryption. */
String MATERIALS_DESCRIPTION = "x-amz-matdesc";
/** Header for the optional restore information of an object. */
String RESTORE = "x-amz-restore";
/**
* Key wrapping algorithm such as "AESWrap" and "RSA/ECB/OAEPWithSHA-256AndMGF1Padding".
*/
String CRYPTO_KEYWRAP_ALGORITHM = "x-amz-wrap-alg";
/**
* Content encryption algorithm, such as "AES/GCM/NoPadding".
*/
String CRYPTO_CEK_ALGORITHM = "x-amz-cek-alg";
/**
* Header for unencrypted content length of an object: {@value}.
*/
String UNENCRYPTED_CONTENT_LENGTH = "x-amz-unencrypted-content-length";
/**
* Headers in request indicating that the requester must be charged for data
* transfer.
*/
String REQUESTER_PAYS_HEADER = "x-amz-request-payer";
/** Header for the replication status of an Amazon S3 Object.*/
String OBJECT_REPLICATION_STATUS = "x-amz-replication-status";
String OBJECT_LOCK_MODE = "x-amz-object-lock-mode";
String OBJECT_LOCK_RETAIN_UNTIL_DATE = "x-amz-object-lock-retain-until-date";
String OBJECT_LOCK_LEGAL_HOLD_STATUS = "x-amz-object-lock-legal-hold";
} | of |
java | quarkusio__quarkus | core/devmode-spi/src/main/java/io/quarkus/dev/testing/results/TestResultInterface.java | {
"start": 404,
"end": 474
} | enum ____ {
PASSED,
FAILED,
SKIPPED
}
}
| State |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/streaming/runtime/partitioner/StreamPartitioner.java | {
"start": 1337,
"end": 3254
} | class ____<T>
implements ChannelSelector<SerializationDelegate<StreamRecord<T>>>, Serializable {
private static final long serialVersionUID = 1L;
protected int numberOfChannels;
/**
* By default, all partitioner except {@link #isBroadcast()} or {@link #isPointwise()} support
* unaligned checkpoints. However, transformations may disable unaligned checkpoints for
* specific cases.
*/
private boolean supportsUnalignedCheckpoint = true;
@Override
public void setup(int numberOfChannels) {
this.numberOfChannels = numberOfChannels;
}
@Override
public boolean isBroadcast() {
return false;
}
public abstract StreamPartitioner<T> copy();
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final StreamPartitioner<?> that = (StreamPartitioner<?>) o;
return numberOfChannels == that.numberOfChannels;
}
@Override
public int hashCode() {
return Objects.hash(numberOfChannels);
}
/**
* Defines the behavior of this partitioner, when upstream rescaled during recovery of in-flight
* data.
*/
public SubtaskStateMapper getUpstreamSubtaskStateMapper() {
return SubtaskStateMapper.ARBITRARY;
}
/**
* Defines the behavior of this partitioner, when downstream rescaled during recovery of
* in-flight data.
*/
public abstract SubtaskStateMapper getDownstreamSubtaskStateMapper();
public abstract boolean isPointwise();
public boolean isSupportsUnalignedCheckpoint() {
return supportsUnalignedCheckpoint && !isPointwise() && !isBroadcast();
}
public void disableUnalignedCheckpoints() {
this.supportsUnalignedCheckpoint = false;
}
}
| StreamPartitioner |
java | reactor__reactor-core | reactor-core/src/main/java/reactor/core/publisher/FluxConcatIterable.java | {
"start": 2096,
"end": 3976
} | class ____<T>
extends Operators.MultiSubscriptionSubscriber<T, T> {
final Iterator<? extends Publisher<? extends T>> it;
volatile int wip;
@SuppressWarnings("rawtypes")
static final AtomicIntegerFieldUpdater<ConcatIterableSubscriber> WIP =
AtomicIntegerFieldUpdater.newUpdater(ConcatIterableSubscriber.class,
"wip");
long produced;
ConcatIterableSubscriber(CoreSubscriber<? super T> actual,
Iterator<? extends Publisher<? extends T>> it) {
super(actual);
this.it = it;
}
@Override
public void onNext(T t) {
produced++;
actual.onNext(t);
}
@Override
public void onComplete() {
if (WIP.getAndIncrement(this) == 0) {
Iterator<? extends Publisher<? extends T>> a = this.it;
do {
if (isCancelled()) {
return;
}
boolean b;
try {
b = a.hasNext();
}
catch (Throwable e) {
onError(Operators.onOperatorError(this, e,
actual.currentContext()));
return;
}
if (isCancelled()) {
return;
}
if (!b) {
actual.onComplete();
return;
}
Publisher<? extends T> p;
try {
p = Objects.requireNonNull(it.next(),
"The Publisher returned by the iterator is null");
}
catch (Throwable e) {
actual.onError(Operators.onOperatorError(this, e,
actual.currentContext()));
return;
}
if (isCancelled()) {
return;
}
long c = produced;
if (c != 0L) {
produced = 0L;
produced(c);
}
p = Operators.toFluxOrMono(p);
p.subscribe(this);
if (isCancelled()) {
return;
}
}
while (WIP.decrementAndGet(this) != 0);
}
}
@Override
public @Nullable Object scanUnsafe(Attr key) {
if (key == Attr.RUN_STYLE) return Attr.RunStyle.SYNC;
return super.scanUnsafe(key);
}
}
}
| ConcatIterableSubscriber |
java | quarkusio__quarkus | integration-tests/picocli-native/src/test/java/io/quarkus/it/picocli/PicocliIT.java | {
"start": 130,
"end": 170
} | class ____ extends PicocliTest {
}
| PicocliIT |
java | quarkusio__quarkus | integration-tests/main/src/main/java/io/quarkus/it/context/RequestBean.java | {
"start": 104,
"end": 219
} | class ____ {
public String callMe() {
return "Hello " + System.identityHashCode(this);
}
}
| RequestBean |
java | alibaba__fastjson | src/main/java/com/alibaba/fastjson/parser/deserializer/ASMDeserializerFactory.java | {
"start": 1479,
"end": 86289
} | class ____ implements Opcodes {
public final ASMClassLoader classLoader;
protected final AtomicLong seed = new AtomicLong();
final static String DefaultJSONParser = type(DefaultJSONParser.class);
final static String JSONLexerBase = type(JSONLexerBase.class);
public ASMDeserializerFactory(ClassLoader parentClassLoader){
classLoader = parentClassLoader instanceof ASMClassLoader //
? (ASMClassLoader) parentClassLoader //
: new ASMClassLoader(parentClassLoader);
}
public ObjectDeserializer createJavaBeanDeserializer(ParserConfig config, JavaBeanInfo beanInfo) throws Exception {
Class<?> clazz = beanInfo.clazz;
if (clazz.isPrimitive()) {
throw new IllegalArgumentException("not support type :" + clazz.getName());
}
String className = "FastjsonASMDeserializer_" + seed.incrementAndGet() + "_" + clazz.getSimpleName();
String classNameType;
String classNameFull;
Package pkg = ASMDeserializerFactory.class.getPackage();
if (pkg != null) {
String packageName = pkg.getName();
classNameType = packageName.replace('.', '/') + "/" + className;
classNameFull = packageName + "." + className;
} else {
classNameType = className;
classNameFull = className;
}
ClassWriter cw = new ClassWriter();
cw.visit(V1_5, ACC_PUBLIC + ACC_SUPER, classNameType, type(JavaBeanDeserializer.class), null);
_init(cw, new Context(classNameType, config, beanInfo, 3));
_createInstance(cw, new Context(classNameType, config, beanInfo, 3));
_deserialze(cw, new Context(classNameType, config, beanInfo, 5));
_deserialzeArrayMapping(cw, new Context(classNameType, config, beanInfo, 4));
byte[] code = cw.toByteArray();
Class<?> deserClass = classLoader.defineClassPublic(classNameFull, code, 0, code.length);
Constructor<?> constructor = deserClass.getConstructor(ParserConfig.class, JavaBeanInfo.class);
Object instance = constructor.newInstance(config, beanInfo);
return (ObjectDeserializer) instance;
}
private void _setFlag(MethodVisitor mw, Context context, int i) {
String varName = "_asm_flag_" + (i / 32);
mw.visitVarInsn(ILOAD, context.var(varName));
mw.visitLdcInsn(1 << i);
mw.visitInsn(IOR);
mw.visitVarInsn(ISTORE, context.var(varName));
}
private void _isFlag(MethodVisitor mw, Context context, int i, Label label) {
mw.visitVarInsn(ILOAD, context.var("_asm_flag_" + (i / 32)));
mw.visitLdcInsn(1 << i);
mw.visitInsn(IAND);
mw.visitJumpInsn(IFEQ, label);
}
private void _deserialzeArrayMapping(ClassWriter cw, Context context) {
MethodVisitor mw = new MethodWriter(cw, ACC_PUBLIC, "deserialzeArrayMapping",
"(L" + DefaultJSONParser + ";Ljava/lang/reflect/Type;Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;",
null, null);
defineVarLexer(context, mw);
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(ALOAD, 1);
mw.visitMethodInsn(INVOKEVIRTUAL, DefaultJSONParser, "getSymbolTable", "()" + desc(SymbolTable.class));
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "scanTypeName", "(" + desc(SymbolTable.class) + ")Ljava/lang/String;");
mw.visitVarInsn(ASTORE, context.var("typeName"));
Label typeNameNotNull_ = new Label();
mw.visitVarInsn(ALOAD, context.var("typeName"));
mw.visitJumpInsn(IFNULL, typeNameNotNull_);
mw.visitVarInsn(ALOAD, 1);
mw.visitMethodInsn(INVOKEVIRTUAL, DefaultJSONParser, "getConfig", "()" + desc(ParserConfig.class));
mw.visitVarInsn(ALOAD, 0);
mw.visitFieldInsn(GETFIELD, type(JavaBeanDeserializer.class), "beanInfo", desc(JavaBeanInfo.class));
mw.visitVarInsn(ALOAD, context.var("typeName"));
mw.visitMethodInsn(INVOKESTATIC, type(JavaBeanDeserializer.class), "getSeeAlso"
, "(" + desc(ParserConfig.class) + desc(JavaBeanInfo.class) + "Ljava/lang/String;)" + desc(JavaBeanDeserializer.class));
mw.visitVarInsn(ASTORE, context.var("userTypeDeser"));
mw.visitVarInsn(ALOAD, context.var("userTypeDeser"));
mw.visitTypeInsn(INSTANCEOF, type(JavaBeanDeserializer.class));
mw.visitJumpInsn(IFEQ, typeNameNotNull_);
mw.visitVarInsn(ALOAD, context.var("userTypeDeser"));
mw.visitVarInsn(ALOAD, Context.parser);
mw.visitVarInsn(ALOAD, 2);
mw.visitVarInsn(ALOAD, 3);
mw.visitVarInsn(ALOAD, 4);
mw.visitMethodInsn(INVOKEVIRTUAL, //
type(JavaBeanDeserializer.class), //
"deserialzeArrayMapping", //
"(L" + DefaultJSONParser + ";Ljava/lang/reflect/Type;Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;");
mw.visitInsn(ARETURN);
mw.visitLabel(typeNameNotNull_);
_createInstance(context, mw);
FieldInfo[] sortedFieldInfoList = context.beanInfo.sortedFields;
int fieldListSize = sortedFieldInfoList.length;
for (int i = 0; i < fieldListSize; ++i) {
final boolean last = (i == fieldListSize - 1);
final char seperator = last ? ']' : ',';
FieldInfo fieldInfo = sortedFieldInfoList[i];
Class<?> fieldClass = fieldInfo.fieldClass;
Type fieldType = fieldInfo.fieldType;
if (fieldClass == byte.class //
|| fieldClass == short.class //
|| fieldClass == int.class) {
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(BIPUSH, seperator);
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "scanInt", "(C)I");
mw.visitVarInsn(ISTORE, context.var_asm(fieldInfo));
} else if (fieldClass == Byte.class) {
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(BIPUSH, seperator);
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "scanInt", "(C)I");
mw.visitMethodInsn(INVOKESTATIC, "java/lang/Byte", "valueOf", "(B)Ljava/lang/Byte;");
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
Label valueNullEnd_ = new Label();
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitFieldInsn(GETFIELD, JSONLexerBase, "matchStat", "I");
mw.visitLdcInsn(com.alibaba.fastjson.parser.JSONLexerBase.VALUE_NULL);
mw.visitJumpInsn(IF_ICMPNE, valueNullEnd_);
mw.visitInsn(ACONST_NULL);
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
mw.visitLabel(valueNullEnd_);
} else if (fieldClass == Short.class) {
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(BIPUSH, seperator);
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "scanInt", "(C)I");
mw.visitMethodInsn(INVOKESTATIC, "java/lang/Short", "valueOf", "(S)Ljava/lang/Short;");
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
Label valueNullEnd_ = new Label();
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitFieldInsn(GETFIELD, JSONLexerBase, "matchStat", "I");
mw.visitLdcInsn(com.alibaba.fastjson.parser.JSONLexerBase.VALUE_NULL);
mw.visitJumpInsn(IF_ICMPNE, valueNullEnd_);
mw.visitInsn(ACONST_NULL);
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
mw.visitLabel(valueNullEnd_);
} else if (fieldClass == Integer.class) {
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(BIPUSH, seperator);
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "scanInt", "(C)I");
mw.visitMethodInsn(INVOKESTATIC, "java/lang/Integer", "valueOf", "(I)Ljava/lang/Integer;");
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
Label valueNullEnd_ = new Label();
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitFieldInsn(GETFIELD, JSONLexerBase, "matchStat", "I");
mw.visitLdcInsn(com.alibaba.fastjson.parser.JSONLexerBase.VALUE_NULL);
mw.visitJumpInsn(IF_ICMPNE, valueNullEnd_);
mw.visitInsn(ACONST_NULL);
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
mw.visitLabel(valueNullEnd_);
} else if (fieldClass == long.class) {
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(BIPUSH, seperator);
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "scanLong", "(C)J");
mw.visitVarInsn(LSTORE, context.var_asm(fieldInfo, 2));
} else if (fieldClass == Long.class) {
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(BIPUSH, seperator);
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "scanLong", "(C)J");
mw.visitMethodInsn(INVOKESTATIC, "java/lang/Long", "valueOf", "(J)Ljava/lang/Long;");
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
Label valueNullEnd_ = new Label();
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitFieldInsn(GETFIELD, JSONLexerBase, "matchStat", "I");
mw.visitLdcInsn(com.alibaba.fastjson.parser.JSONLexerBase.VALUE_NULL);
mw.visitJumpInsn(IF_ICMPNE, valueNullEnd_);
mw.visitInsn(ACONST_NULL);
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
mw.visitLabel(valueNullEnd_);
} else if (fieldClass == boolean.class) {
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(BIPUSH, seperator);
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "scanBoolean", "(C)Z");
mw.visitVarInsn(ISTORE, context.var_asm(fieldInfo));
} else if (fieldClass == float.class) {
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(BIPUSH, seperator);
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "scanFloat", "(C)F");
mw.visitVarInsn(FSTORE, context.var_asm(fieldInfo));
} else if (fieldClass == Float.class) {
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(BIPUSH, seperator);
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "scanFloat", "(C)F");
mw.visitMethodInsn(INVOKESTATIC, "java/lang/Float", "valueOf", "(F)Ljava/lang/Float;");
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
Label valueNullEnd_ = new Label();
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitFieldInsn(GETFIELD, JSONLexerBase, "matchStat", "I");
mw.visitLdcInsn(com.alibaba.fastjson.parser.JSONLexerBase.VALUE_NULL);
mw.visitJumpInsn(IF_ICMPNE, valueNullEnd_);
mw.visitInsn(ACONST_NULL);
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
mw.visitLabel(valueNullEnd_);
} else if (fieldClass == double.class) {
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(BIPUSH, seperator);
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "scanDouble", "(C)D");
mw.visitVarInsn(DSTORE, context.var_asm(fieldInfo, 2));
} else if (fieldClass == Double.class) {
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(BIPUSH, seperator);
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "scanDouble", "(C)D");
mw.visitMethodInsn(INVOKESTATIC, "java/lang/Double", "valueOf", "(D)Ljava/lang/Double;");
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
Label valueNullEnd_ = new Label();
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitFieldInsn(GETFIELD, JSONLexerBase, "matchStat", "I");
mw.visitLdcInsn(com.alibaba.fastjson.parser.JSONLexerBase.VALUE_NULL);
mw.visitJumpInsn(IF_ICMPNE, valueNullEnd_);
mw.visitInsn(ACONST_NULL);
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
mw.visitLabel(valueNullEnd_);
} else if (fieldClass == char.class) {
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(BIPUSH, seperator);
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "scanString", "(C)Ljava/lang/String;");
mw.visitInsn(ICONST_0);
mw.visitMethodInsn(INVOKEVIRTUAL, "java/lang/String", "charAt", "(I)C");
mw.visitVarInsn(ISTORE, context.var_asm(fieldInfo));
} else if (fieldClass == String.class) {
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(BIPUSH, seperator);
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "scanString", "(C)Ljava/lang/String;");
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
} else if (fieldClass == BigDecimal.class) {
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(BIPUSH, seperator);
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "scanDecimal", "(C)Ljava/math/BigDecimal;");
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
} else if (fieldClass == java.util.Date.class) {
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(BIPUSH, seperator);
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "scanDate", "(C)Ljava/util/Date;");
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
} else if (fieldClass == java.util.UUID.class) {
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(BIPUSH, seperator);
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "scanUUID", "(C)Ljava/util/UUID;");
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
} else if (fieldClass.isEnum()) {
Label enumNumIf_ = new Label();
Label enumNumErr_ = new Label();
Label enumStore_ = new Label();
Label enumQuote_ = new Label();
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "getCurrent", "()C");
mw.visitInsn(DUP);
mw.visitVarInsn(ISTORE, context.var("ch"));
mw.visitLdcInsn((int) 'n');
mw.visitJumpInsn(IF_ICMPEQ, enumQuote_);
mw.visitVarInsn(ILOAD, context.var("ch"));
mw.visitLdcInsn((int) '\"');
mw.visitJumpInsn(IF_ICMPNE, enumNumIf_);
mw.visitLabel(enumQuote_);
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitLdcInsn(com.alibaba.fastjson.asm.Type.getType(desc(fieldClass)));
mw.visitVarInsn(ALOAD, 1);
mw.visitMethodInsn(INVOKEVIRTUAL, DefaultJSONParser, "getSymbolTable", "()" + desc(SymbolTable.class));
mw.visitVarInsn(BIPUSH, seperator);
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "scanEnum",
"(Ljava/lang/Class;" + desc(SymbolTable.class) + "C)Ljava/lang/Enum;");
mw.visitJumpInsn(GOTO, enumStore_);
// (ch >= '0' && ch <= '9') {
mw.visitLabel(enumNumIf_);
mw.visitVarInsn(ILOAD, context.var("ch"));
mw.visitLdcInsn((int) '0');
mw.visitJumpInsn(IF_ICMPLT, enumNumErr_);
mw.visitVarInsn(ILOAD, context.var("ch"));
mw.visitLdcInsn((int) '9');
mw.visitJumpInsn(IF_ICMPGT, enumNumErr_);
_getFieldDeser(context, mw, fieldInfo);
mw.visitTypeInsn(CHECKCAST, type(EnumDeserializer.class)); // cast
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(BIPUSH, seperator);
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "scanInt", "(C)I");
mw.visitMethodInsn(INVOKEVIRTUAL, type(EnumDeserializer.class), "valueOf", "(I)Ljava/lang/Enum;");
mw.visitJumpInsn(GOTO, enumStore_);
mw.visitLabel(enumNumErr_);
mw.visitVarInsn(ALOAD, 0);
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(BIPUSH, seperator);
mw.visitMethodInsn(INVOKEVIRTUAL, type(JavaBeanDeserializer.class), "scanEnum",
"(L" + JSONLexerBase + ";C)Ljava/lang/Enum;");
mw.visitLabel(enumStore_);
mw.visitTypeInsn(CHECKCAST, type(fieldClass)); // cast
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
} else if (Collection.class.isAssignableFrom(fieldClass)) {
Class<?> itemClass = TypeUtils.getCollectionItemClass(fieldType);
if (itemClass == String.class) {
if (fieldClass == List.class
|| fieldClass == Collections.class
|| fieldClass == ArrayList.class
) {
mw.visitTypeInsn(NEW, type(ArrayList.class));
mw.visitInsn(DUP);
mw.visitMethodInsn(INVOKESPECIAL, type(ArrayList.class), "<init>", "()V");
} else {
mw.visitLdcInsn(com.alibaba.fastjson.asm.Type.getType(desc(fieldClass)));
mw.visitMethodInsn(INVOKESTATIC, type(TypeUtils.class), "createCollection",
"(Ljava/lang/Class;)Ljava/util/Collection;");
}
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(ALOAD, context.var_asm(fieldInfo));
mw.visitVarInsn(BIPUSH, seperator);
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "scanStringArray", "(Ljava/util/Collection;C)V");
Label valueNullEnd_ = new Label();
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitFieldInsn(GETFIELD, JSONLexerBase, "matchStat", "I");
mw.visitLdcInsn(com.alibaba.fastjson.parser.JSONLexerBase.VALUE_NULL);
mw.visitJumpInsn(IF_ICMPNE, valueNullEnd_);
mw.visitInsn(ACONST_NULL);
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
mw.visitLabel(valueNullEnd_);
} else {
Label notError_ = new Label();
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "token", "()I");
mw.visitVarInsn(ISTORE, context.var("token"));
mw.visitVarInsn(ILOAD, context.var("token"));
int token = i == 0 ? JSONToken.LBRACKET : JSONToken.COMMA;
mw.visitLdcInsn(token);
mw.visitJumpInsn(IF_ICMPEQ, notError_);
mw.visitVarInsn(ALOAD, 1); // DefaultJSONParser
mw.visitLdcInsn(token);
mw.visitMethodInsn(INVOKEVIRTUAL, DefaultJSONParser, "throwException", "(I)V");
mw.visitLabel(notError_);
Label quickElse_ = new Label(), quickEnd_ = new Label();
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "getCurrent", "()C");
mw.visitVarInsn(BIPUSH, '[');
mw.visitJumpInsn(IF_ICMPNE, quickElse_);
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "next", "()C");
mw.visitInsn(POP);
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitLdcInsn(JSONToken.LBRACKET);
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "setToken", "(I)V");
mw.visitJumpInsn(GOTO, quickEnd_);
mw.visitLabel(quickElse_);
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitLdcInsn(JSONToken.LBRACKET);
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "nextToken", "(I)V");
mw.visitLabel(quickEnd_);
_newCollection(mw, fieldClass, i, false);
mw.visitInsn(DUP);
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
_getCollectionFieldItemDeser(context, mw, fieldInfo, itemClass);
mw.visitVarInsn(ALOAD, 1);
mw.visitLdcInsn(com.alibaba.fastjson.asm.Type.getType(desc(itemClass)));
mw.visitVarInsn(ALOAD, 3);
mw.visitMethodInsn(INVOKESTATIC, type(JavaBeanDeserializer.class),
"parseArray",
"(Ljava/util/Collection;" //
+ desc(ObjectDeserializer.class) //
+ "L" + DefaultJSONParser + ";" //
+ "Ljava/lang/reflect/Type;Ljava/lang/Object;)V");
}
} else if (fieldClass.isArray()) {
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitLdcInsn(com.alibaba.fastjson.parser.JSONToken.LBRACKET);
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "nextToken", "(I)V");
mw.visitVarInsn(ALOAD, Context.parser);
mw.visitVarInsn(ALOAD, 0);
mw.visitLdcInsn(i);
mw.visitMethodInsn(INVOKEVIRTUAL, type(JavaBeanDeserializer.class), "getFieldType",
"(I)Ljava/lang/reflect/Type;");
mw.visitMethodInsn(INVOKEVIRTUAL, DefaultJSONParser, "parseObject",
"(Ljava/lang/reflect/Type;)Ljava/lang/Object;");
mw.visitTypeInsn(CHECKCAST, type(fieldClass)); // cast
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
} else {
Label objElseIf_ = new Label();
Label objEndIf_ = new Label();
if (fieldClass == java.util.Date.class) {
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "getCurrent", "()C");
mw.visitLdcInsn((int) '1');
mw.visitJumpInsn(IF_ICMPNE, objElseIf_);
mw.visitTypeInsn(NEW, type(java.util.Date.class));
mw.visitInsn(DUP);
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(BIPUSH, seperator);
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "scanLong", "(C)J");
mw.visitMethodInsn(INVOKESPECIAL, type(java.util.Date.class), "<init>", "(J)V");
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
mw.visitJumpInsn(GOTO, objEndIf_);
}
mw.visitLabel(objElseIf_);
_quickNextToken(context, mw, JSONToken.LBRACKET);
_deserObject(context, mw, fieldInfo, fieldClass, i);
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "token", "()I");
mw.visitLdcInsn(JSONToken.RBRACKET);
mw.visitJumpInsn(IF_ICMPEQ, objEndIf_);
// mw.visitInsn(POP);
// mw.visitInsn(POP);
mw.visitVarInsn(ALOAD, 0);
mw.visitVarInsn(ALOAD, context.var("lexer"));
if (!last) {
mw.visitLdcInsn(JSONToken.COMMA);
} else {
mw.visitLdcInsn(JSONToken.RBRACKET);
}
mw.visitMethodInsn(INVOKESPECIAL, //
type(JavaBeanDeserializer.class), //
"check", "(" + desc(JSONLexer.class) + "I)V");
mw.visitLabel(objEndIf_);
continue;
}
}
_batchSet(context, mw, false);
Label quickElse_ = new Label(), quickElseIf_ = new Label(), quickElseIfEOI_ = new Label(),
quickEnd_ = new Label();
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "getCurrent", "()C");
mw.visitInsn(DUP);
mw.visitVarInsn(ISTORE, context.var("ch"));
mw.visitVarInsn(BIPUSH, ',');
mw.visitJumpInsn(IF_ICMPNE, quickElseIf_);
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "next", "()C");
mw.visitInsn(POP);
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitLdcInsn(JSONToken.COMMA);
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "setToken", "(I)V");
mw.visitJumpInsn(GOTO, quickEnd_);
mw.visitLabel(quickElseIf_);
mw.visitVarInsn(ILOAD, context.var("ch"));
mw.visitVarInsn(BIPUSH, ']');
mw.visitJumpInsn(IF_ICMPNE, quickElseIfEOI_);
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "next", "()C");
mw.visitInsn(POP);
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitLdcInsn(JSONToken.RBRACKET);
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "setToken", "(I)V");
mw.visitJumpInsn(GOTO, quickEnd_);
mw.visitLabel(quickElseIfEOI_);
mw.visitVarInsn(ILOAD, context.var("ch"));
mw.visitVarInsn(BIPUSH, (char) JSONLexer.EOI);
mw.visitJumpInsn(IF_ICMPNE, quickElse_);
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "next", "()C");
mw.visitInsn(POP);
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitLdcInsn(JSONToken.EOF);
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "setToken", "(I)V");
mw.visitJumpInsn(GOTO, quickEnd_);
mw.visitLabel(quickElse_);
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitLdcInsn(JSONToken.COMMA);
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "nextToken", "(I)V");
mw.visitLabel(quickEnd_);
mw.visitVarInsn(ALOAD, context.var("instance"));
mw.visitInsn(ARETURN);
mw.visitMaxs(5, context.variantIndex);
mw.visitEnd();
}
private void _deserialze(ClassWriter cw, Context context) {
if (context.fieldInfoList.length == 0) {
return;
}
for (FieldInfo fieldInfo : context.fieldInfoList) {
Class<?> fieldClass = fieldInfo.fieldClass;
Type fieldType = fieldInfo.fieldType;
if (fieldClass == char.class) {
return;
}
if (Collection.class.isAssignableFrom(fieldClass)) {
if (fieldType instanceof ParameterizedType) {
Type itemType = ((ParameterizedType) fieldType).getActualTypeArguments()[0];
if (itemType instanceof Class) {
continue;
} else {
return;
}
} else {
return;
}
}
}
JavaBeanInfo beanInfo = context.beanInfo;
context.fieldInfoList = beanInfo.sortedFields;
MethodVisitor mw = new MethodWriter(cw, ACC_PUBLIC, "deserialze",
"(L" + DefaultJSONParser + ";Ljava/lang/reflect/Type;Ljava/lang/Object;I)Ljava/lang/Object;",
null, null);
Label reset_ = new Label();
Label super_ = new Label();
Label return_ = new Label();
Label end_ = new Label();
defineVarLexer(context, mw);
{
Label next_ = new Label();
// isSupportArrayToBean
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "token", "()I");
mw.visitLdcInsn(JSONToken.LBRACKET);
mw.visitJumpInsn(IF_ICMPNE, next_);
if ((beanInfo.parserFeatures & Feature.SupportArrayToBean.mask) == 0) {
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(ILOAD, 4);
mw.visitLdcInsn(Feature.SupportArrayToBean.mask);
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "isEnabled", "(II)Z");
mw.visitJumpInsn(IFEQ, next_);
}
mw.visitVarInsn(ALOAD, 0);
mw.visitVarInsn(ALOAD, Context.parser);
mw.visitVarInsn(ALOAD, 2);
mw.visitVarInsn(ALOAD, 3);
mw.visitInsn(ACONST_NULL); //mw.visitVarInsn(ALOAD, 5);
mw.visitMethodInsn(INVOKESPECIAL, //
context.className, //
"deserialzeArrayMapping", //
"(L" + DefaultJSONParser + ";Ljava/lang/reflect/Type;Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;");
mw.visitInsn(ARETURN);
mw.visitLabel(next_);
// deserialzeArrayMapping
}
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitLdcInsn(Feature.SortFeidFastMatch.mask);
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "isEnabled", "(I)Z");
Label continue_ = new Label();
mw.visitJumpInsn(IFNE, continue_);
mw.visitJumpInsn(GOTO_W, super_);
mw.visitLabel(continue_);
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitLdcInsn(context.clazz.getName());
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "scanType", "(Ljava/lang/String;)I");
mw.visitLdcInsn(com.alibaba.fastjson.parser.JSONLexerBase.NOT_MATCH);
Label continue_2 = new Label();
mw.visitJumpInsn(IF_ICMPNE, continue_2);
mw.visitJumpInsn(GOTO_W, super_);
mw.visitLabel(continue_2);
mw.visitVarInsn(ALOAD, 1); // parser
mw.visitMethodInsn(INVOKEVIRTUAL, DefaultJSONParser, "getContext", "()" + desc(ParseContext.class));
mw.visitVarInsn(ASTORE, context.var("mark_context"));
// ParseContext context = parser.getContext();
mw.visitInsn(ICONST_0);
mw.visitVarInsn(ISTORE, context.var("matchedCount"));
_createInstance(context, mw);
{
mw.visitVarInsn(ALOAD, 1); // parser
mw.visitMethodInsn(INVOKEVIRTUAL, DefaultJSONParser, "getContext", "()" + desc(ParseContext.class));
mw.visitVarInsn(ASTORE, context.var("context"));
mw.visitVarInsn(ALOAD, 1); // parser
mw.visitVarInsn(ALOAD, context.var("context"));
mw.visitVarInsn(ALOAD, context.var("instance"));
mw.visitVarInsn(ALOAD, 3); // fieldName
mw.visitMethodInsn(INVOKEVIRTUAL, DefaultJSONParser, "setContext", //
"(" + desc(ParseContext.class) + "Ljava/lang/Object;Ljava/lang/Object;)"
+ desc(ParseContext.class));
mw.visitVarInsn(ASTORE, context.var("childContext"));
}
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitFieldInsn(GETFIELD, JSONLexerBase, "matchStat", "I");
mw.visitLdcInsn(com.alibaba.fastjson.parser.JSONLexerBase.END);
//mw.visitJumpInsn(IF_ICMPEQ, return_);
Label continue_3 = new Label();
mw.visitJumpInsn(IF_ICMPNE, continue_3);
mw.visitJumpInsn(GOTO_W, return_);
mw.visitLabel(continue_3);
mw.visitInsn(ICONST_0); // UNKOWN
mw.visitIntInsn(ISTORE, context.var("matchStat"));
int fieldListSize = context.fieldInfoList.length;
for (int i = 0; i < fieldListSize; i += 32) {
mw.visitInsn(ICONST_0);
mw.visitVarInsn(ISTORE, context.var("_asm_flag_" + (i / 32)));
}
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitLdcInsn(Feature.InitStringFieldAsEmpty.mask);
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "isEnabled", "(I)Z");
mw.visitIntInsn(ISTORE, context.var("initStringFieldAsEmpty"));
// declare and init
for (int i = 0; i < fieldListSize; ++i) {
FieldInfo fieldInfo = context.fieldInfoList[i];
Class<?> fieldClass = fieldInfo.fieldClass;
if (fieldClass == boolean.class //
|| fieldClass == byte.class //
|| fieldClass == short.class //
|| fieldClass == int.class) {
mw.visitInsn(ICONST_0);
mw.visitVarInsn(ISTORE, context.var_asm(fieldInfo));
} else if (fieldClass == long.class) {
mw.visitInsn(LCONST_0);
mw.visitVarInsn(LSTORE, context.var_asm(fieldInfo, 2));
} else if (fieldClass == float.class) {
mw.visitInsn(FCONST_0);
mw.visitVarInsn(FSTORE, context.var_asm(fieldInfo));
} else if (fieldClass == double.class) {
mw.visitInsn(DCONST_0);
mw.visitVarInsn(DSTORE, context.var_asm(fieldInfo, 2));
} else {
if (fieldClass == String.class) {
Label flagEnd_ = new Label();
Label flagElse_ = new Label();
mw.visitVarInsn(ILOAD, context.var("initStringFieldAsEmpty"));
mw.visitJumpInsn(IFEQ, flagElse_);
_setFlag(mw, context, i);
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "stringDefaultValue", "()Ljava/lang/String;");
mw.visitJumpInsn(GOTO, flagEnd_);
mw.visitLabel(flagElse_);
mw.visitInsn(ACONST_NULL);
mw.visitLabel(flagEnd_);
} else {
mw.visitInsn(ACONST_NULL);
}
mw.visitTypeInsn(CHECKCAST, type(fieldClass)); // cast
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
}
}
for (int i = 0; i < fieldListSize; ++i) {
FieldInfo fieldInfo = context.fieldInfoList[i];
Class<?> fieldClass = fieldInfo.fieldClass;
Type fieldType = fieldInfo.fieldType;
Label notMatch_ = new Label();
if (fieldClass == boolean.class) {
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(ALOAD, 0);
mw.visitFieldInsn(GETFIELD, context.className, context.fieldName(fieldInfo), "[C");
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "scanFieldBoolean", "([C)Z");
mw.visitVarInsn(ISTORE, context.var_asm(fieldInfo));
} else if (fieldClass == byte.class) {
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(ALOAD, 0);
mw.visitFieldInsn(GETFIELD, context.className, context.fieldName(fieldInfo), "[C");
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "scanFieldInt", "([C)I");
mw.visitVarInsn(ISTORE, context.var_asm(fieldInfo));
} else if (fieldClass == Byte.class) {
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(ALOAD, 0);
mw.visitFieldInsn(GETFIELD, context.className, context.fieldName(fieldInfo), "[C");
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "scanFieldInt", "([C)I");
mw.visitMethodInsn(INVOKESTATIC, "java/lang/Byte", "valueOf", "(B)Ljava/lang/Byte;");
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
Label valueNullEnd_ = new Label();
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitFieldInsn(GETFIELD, JSONLexerBase, "matchStat", "I");
mw.visitLdcInsn(com.alibaba.fastjson.parser.JSONLexerBase.VALUE_NULL);
mw.visitJumpInsn(IF_ICMPNE, valueNullEnd_);
mw.visitInsn(ACONST_NULL);
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
mw.visitLabel(valueNullEnd_);
} else if (fieldClass == short.class) {
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(ALOAD, 0);
mw.visitFieldInsn(GETFIELD, context.className, context.fieldName(fieldInfo), "[C");
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "scanFieldInt", "([C)I");
mw.visitVarInsn(ISTORE, context.var_asm(fieldInfo));
} else if (fieldClass == Short.class) {
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(ALOAD, 0);
mw.visitFieldInsn(GETFIELD, context.className, context.fieldName(fieldInfo), "[C");
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "scanFieldInt", "([C)I");
mw.visitMethodInsn(INVOKESTATIC, "java/lang/Short", "valueOf", "(S)Ljava/lang/Short;");
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
Label valueNullEnd_ = new Label();
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitFieldInsn(GETFIELD, JSONLexerBase, "matchStat", "I");
mw.visitLdcInsn(com.alibaba.fastjson.parser.JSONLexerBase.VALUE_NULL);
mw.visitJumpInsn(IF_ICMPNE, valueNullEnd_);
mw.visitInsn(ACONST_NULL);
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
mw.visitLabel(valueNullEnd_);
} else if (fieldClass == int.class) {
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(ALOAD, 0);
mw.visitFieldInsn(GETFIELD, context.className, context.fieldName(fieldInfo), "[C");
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "scanFieldInt", "([C)I");
mw.visitVarInsn(ISTORE, context.var_asm(fieldInfo));
} else if (fieldClass == Integer.class) {
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(ALOAD, 0);
mw.visitFieldInsn(GETFIELD, context.className, context.fieldName(fieldInfo), "[C");
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "scanFieldInt", "([C)I");
mw.visitMethodInsn(INVOKESTATIC, "java/lang/Integer", "valueOf", "(I)Ljava/lang/Integer;");
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
Label valueNullEnd_ = new Label();
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitFieldInsn(GETFIELD, JSONLexerBase, "matchStat", "I");
mw.visitLdcInsn(com.alibaba.fastjson.parser.JSONLexerBase.VALUE_NULL);
mw.visitJumpInsn(IF_ICMPNE, valueNullEnd_);
mw.visitInsn(ACONST_NULL);
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
mw.visitLabel(valueNullEnd_);
} else if (fieldClass == long.class) {
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(ALOAD, 0);
mw.visitFieldInsn(GETFIELD, context.className, context.fieldName(fieldInfo), "[C");
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "scanFieldLong", "([C)J");
mw.visitVarInsn(LSTORE, context.var_asm(fieldInfo, 2));
} else if (fieldClass == Long.class) {
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(ALOAD, 0);
mw.visitFieldInsn(GETFIELD, context.className, context.fieldName(fieldInfo), "[C");
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "scanFieldLong", "([C)J");
mw.visitMethodInsn(INVOKESTATIC, "java/lang/Long", "valueOf", "(J)Ljava/lang/Long;");
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
Label valueNullEnd_ = new Label();
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitFieldInsn(GETFIELD, JSONLexerBase, "matchStat", "I");
mw.visitLdcInsn(com.alibaba.fastjson.parser.JSONLexerBase.VALUE_NULL);
mw.visitJumpInsn(IF_ICMPNE, valueNullEnd_);
mw.visitInsn(ACONST_NULL);
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
mw.visitLabel(valueNullEnd_);
} else if (fieldClass == float.class) {
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(ALOAD, 0);
mw.visitFieldInsn(GETFIELD, context.className, context.fieldName(fieldInfo), "[C");
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "scanFieldFloat", "([C)F");
mw.visitVarInsn(FSTORE, context.var_asm(fieldInfo));
} else if (fieldClass == Float.class) {
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(ALOAD, 0);
mw.visitFieldInsn(GETFIELD, context.className, context.fieldName(fieldInfo), "[C");
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "scanFieldFloat", "([C)F");
mw.visitMethodInsn(INVOKESTATIC, "java/lang/Float", "valueOf", "(F)Ljava/lang/Float;");
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
Label valueNullEnd_ = new Label();
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitFieldInsn(GETFIELD, JSONLexerBase, "matchStat", "I");
mw.visitLdcInsn(com.alibaba.fastjson.parser.JSONLexerBase.VALUE_NULL);
mw.visitJumpInsn(IF_ICMPNE, valueNullEnd_);
mw.visitInsn(ACONST_NULL);
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
mw.visitLabel(valueNullEnd_);
} else if (fieldClass == double.class) {
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(ALOAD, 0);
mw.visitFieldInsn(GETFIELD, context.className, context.fieldName(fieldInfo), "[C");
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "scanFieldDouble", "([C)D");
mw.visitVarInsn(DSTORE, context.var_asm(fieldInfo, 2));
} else if (fieldClass == Double.class) {
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(ALOAD, 0);
mw.visitFieldInsn(GETFIELD, context.className, context.fieldName(fieldInfo), "[C");
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "scanFieldDouble", "([C)D");
mw.visitMethodInsn(INVOKESTATIC, "java/lang/Double", "valueOf", "(D)Ljava/lang/Double;");
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
Label valueNullEnd_ = new Label();
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitFieldInsn(GETFIELD, JSONLexerBase, "matchStat", "I");
mw.visitLdcInsn(com.alibaba.fastjson.parser.JSONLexerBase.VALUE_NULL);
mw.visitJumpInsn(IF_ICMPNE, valueNullEnd_);
mw.visitInsn(ACONST_NULL);
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
mw.visitLabel(valueNullEnd_);
} else if (fieldClass == String.class) {
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(ALOAD, 0);
mw.visitFieldInsn(GETFIELD, context.className, context.fieldName(fieldInfo), "[C");
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "scanFieldString", "([C)Ljava/lang/String;");
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
} else if (fieldClass == java.util.Date.class) {
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(ALOAD, 0);
mw.visitFieldInsn(GETFIELD, context.className, context.fieldName(fieldInfo), "[C");
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "scanFieldDate", "([C)Ljava/util/Date;");
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
} else if (fieldClass == java.util.UUID.class) {
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(ALOAD, 0);
mw.visitFieldInsn(GETFIELD, context.className, context.fieldName(fieldInfo), "[C");
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "scanFieldUUID", "([C)Ljava/util/UUID;");
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
} else if (fieldClass == BigDecimal.class) {
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(ALOAD, 0);
mw.visitFieldInsn(GETFIELD, context.className, context.fieldName(fieldInfo), "[C");
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "scanFieldDecimal", "([C)Ljava/math/BigDecimal;");
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
} else if (fieldClass == BigInteger.class) {
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(ALOAD, 0);
mw.visitFieldInsn(GETFIELD, context.className, context.fieldName(fieldInfo), "[C");
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "scanFieldBigInteger", "([C)Ljava/math/BigInteger;");
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
} else if (fieldClass == int[].class) {
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(ALOAD, 0);
mw.visitFieldInsn(GETFIELD, context.className, context.fieldName(fieldInfo), "[C");
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "scanFieldIntArray", "([C)[I");
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
} else if (fieldClass == float[].class) {
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(ALOAD, 0);
mw.visitFieldInsn(GETFIELD, context.className, context.fieldName(fieldInfo), "[C");
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "scanFieldFloatArray", "([C)[F");
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
} else if (fieldClass == float[][].class) {
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(ALOAD, 0);
mw.visitFieldInsn(GETFIELD, context.className, context.fieldName(fieldInfo), "[C");
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "scanFieldFloatArray2", "([C)[[F");
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
} else if (fieldClass.isEnum()) {
mw.visitVarInsn(ALOAD, 0);
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(ALOAD, 0);
mw.visitFieldInsn(GETFIELD, context.className, context.fieldName(fieldInfo), "[C");
_getFieldDeser(context, mw, fieldInfo);
mw.visitMethodInsn(INVOKEVIRTUAL, type(JavaBeanDeserializer.class), "scanEnum"
, "(L" + JSONLexerBase + ";[C" + desc(ObjectDeserializer.class) + ")Ljava/lang/Enum;");
mw.visitTypeInsn(CHECKCAST, type(fieldClass)); // cast
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
// } else if (fieldClass.isEnum()) {
// mw.visitVarInsn(ALOAD, context.var("lexer"));
// mw.visitVarInsn(ALOAD, 0);
// mw.visitFieldInsn(GETFIELD, context.className, context.fieldName(fieldInfo), "[C");
// Label enumNull_ = new Label();
// mw.visitInsn(ACONST_NULL);
// mw.visitTypeInsn(CHECKCAST, type(fieldClass)); // cast
// mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
//
// mw.visitVarInsn(ALOAD, 1);
//
// mw.visitMethodInsn(INVOKEVIRTUAL, DefaultJSONParser, "getSymbolTable", "()" + desc(SymbolTable.class));
//
// mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "scanFieldSymbol",
// "([C" + desc(SymbolTable.class) + ")Ljava/lang/String;");
// mw.visitInsn(DUP);
// mw.visitVarInsn(ASTORE, context.var(fieldInfo.name + "_asm_enumName"));
//
// mw.visitJumpInsn(IFNULL, enumNull_);
//
// mw.visitVarInsn(ALOAD, context.var(fieldInfo.name + "_asm_enumName"));
// mw.visitMethodInsn(INVOKEVIRTUAL, type(String.class), "length", "()I");
// mw.visitJumpInsn(IFEQ, enumNull_);
//
// mw.visitVarInsn(ALOAD, context.var(fieldInfo.name + "_asm_enumName"));
// mw.visitMethodInsn(INVOKESTATIC, type(fieldClass), "valueOf",
// "(Ljava/lang/String;)" + desc(fieldClass));
// mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
// mw.visitLabel(enumNull_);
} else if (Collection.class.isAssignableFrom(fieldClass)) {
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(ALOAD, 0);
mw.visitFieldInsn(GETFIELD, context.className, context.fieldName(fieldInfo), "[C");
Class<?> itemClass = TypeUtils.getCollectionItemClass(fieldType);
if (itemClass == String.class) {
mw.visitLdcInsn(com.alibaba.fastjson.asm.Type.getType(desc(fieldClass))); // cast
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "scanFieldStringArray",
"([CLjava/lang/Class;)" + desc(Collection.class));
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
} else {
_deserialze_list_obj(context, mw, reset_, fieldInfo, fieldClass, itemClass, i);
if (i == fieldListSize - 1) {
_deserialize_endCheck(context, mw, reset_);
}
continue;
}
} else {
_deserialze_obj(context, mw, reset_, fieldInfo, fieldClass, i);
if (i == fieldListSize - 1) {
_deserialize_endCheck(context, mw, reset_);
}
continue;
}
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitFieldInsn(GETFIELD, JSONLexerBase, "matchStat", "I");
Label flag_ = new Label();
// mw.visitInsn(DUP);
mw.visitJumpInsn(IFLE, flag_);
_setFlag(mw, context, i);
mw.visitLabel(flag_);
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitFieldInsn(GETFIELD, JSONLexerBase, "matchStat", "I");
mw.visitInsn(DUP);
mw.visitVarInsn(ISTORE, context.var("matchStat"));
mw.visitLdcInsn(com.alibaba.fastjson.parser.JSONLexerBase.NOT_MATCH);
mw.visitJumpInsn(IF_ICMPEQ, reset_);
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitFieldInsn(GETFIELD, JSONLexerBase, "matchStat", "I");
mw.visitJumpInsn(IFLE, notMatch_);
// increment matchedCount
mw.visitVarInsn(ILOAD, context.var("matchedCount"));
mw.visitInsn(ICONST_1);
mw.visitInsn(IADD);
mw.visitVarInsn(ISTORE, context.var("matchedCount"));
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitFieldInsn(GETFIELD, JSONLexerBase, "matchStat", "I");
mw.visitLdcInsn(com.alibaba.fastjson.parser.JSONLexerBase.END);
mw.visitJumpInsn(IF_ICMPEQ, end_);
mw.visitLabel(notMatch_);
if (i == fieldListSize - 1) {
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitFieldInsn(GETFIELD, JSONLexerBase, "matchStat", "I");
mw.visitLdcInsn(com.alibaba.fastjson.parser.JSONLexerBase.END);
mw.visitJumpInsn(IF_ICMPNE, reset_);
}
} // endFor
mw.visitLabel(end_);
if (!context.clazz.isInterface() && !Modifier.isAbstract(context.clazz.getModifiers())) {
_batchSet(context, mw);
}
mw.visitLabel(return_);
_setContext(context, mw);
mw.visitVarInsn(ALOAD, context.var("instance"));
Method buildMethod = context.beanInfo.buildMethod;
if (buildMethod != null) {
mw.visitMethodInsn(INVOKEVIRTUAL, type(context.getInstClass()), buildMethod.getName(),
"()" + desc(buildMethod.getReturnType()));
}
mw.visitInsn(ARETURN);
mw.visitLabel(reset_);
_batchSet(context, mw);
mw.visitVarInsn(ALOAD, 0);
mw.visitVarInsn(ALOAD, 1);
mw.visitVarInsn(ALOAD, 2);
mw.visitVarInsn(ALOAD, 3);
mw.visitVarInsn(ALOAD, context.var("instance"));
mw.visitVarInsn(ILOAD, 4);
int flagSize = (fieldListSize / 32);
if (fieldListSize != 0 && (fieldListSize % 32) != 0) {
flagSize += 1;
}
if (flagSize == 1) {
mw.visitInsn(ICONST_1);
} else {
mw.visitIntInsn(BIPUSH, flagSize);
}
mw.visitIntInsn(NEWARRAY, T_INT);
for (int i = 0; i < flagSize; ++i) {
mw.visitInsn(DUP);
if (i == 0) {
mw.visitInsn(ICONST_0);
} else if (i == 1) {
mw.visitInsn(ICONST_1);
} else {
mw.visitIntInsn(BIPUSH, i);
}
mw.visitVarInsn(ILOAD, context.var("_asm_flag_" + i));
mw.visitInsn(IASTORE);
}
mw.visitMethodInsn(INVOKEVIRTUAL, type(JavaBeanDeserializer.class),
"parseRest", "(L" + DefaultJSONParser
+ ";Ljava/lang/reflect/Type;Ljava/lang/Object;Ljava/lang/Object;I[I)Ljava/lang/Object;");
mw.visitTypeInsn(CHECKCAST, type(context.clazz)); // cast
mw.visitInsn(ARETURN);
mw.visitLabel(super_);
mw.visitVarInsn(ALOAD, 0);
mw.visitVarInsn(ALOAD, 1);
mw.visitVarInsn(ALOAD, 2);
mw.visitVarInsn(ALOAD, 3);
mw.visitVarInsn(ILOAD, 4);
mw.visitMethodInsn(INVOKESPECIAL, type(JavaBeanDeserializer.class), //
"deserialze", //
"(L" + DefaultJSONParser + ";Ljava/lang/reflect/Type;Ljava/lang/Object;I)Ljava/lang/Object;");
mw.visitInsn(ARETURN);
mw.visitMaxs(10, context.variantIndex);
mw.visitEnd();
}
private void defineVarLexer(Context context, MethodVisitor mw) {
mw.visitVarInsn(ALOAD, 1);
mw.visitFieldInsn(GETFIELD, DefaultJSONParser, "lexer", desc(JSONLexer.class));
mw.visitTypeInsn(CHECKCAST, JSONLexerBase); // cast
mw.visitVarInsn(ASTORE, context.var("lexer"));
}
private void _createInstance(Context context, MethodVisitor mw) {
JavaBeanInfo beanInfo = context.beanInfo;
Constructor<?> defaultConstructor = beanInfo.defaultConstructor;
if (Modifier.isPublic(defaultConstructor.getModifiers())) {
mw.visitTypeInsn(NEW, type(context.getInstClass()));
mw.visitInsn(DUP);
mw.visitMethodInsn(INVOKESPECIAL, type(defaultConstructor.getDeclaringClass()), "<init>", "()V");
} else {
mw.visitVarInsn(ALOAD, 0);
mw.visitVarInsn(ALOAD, 1);
mw.visitVarInsn(ALOAD, 0);
mw.visitFieldInsn(GETFIELD, type(JavaBeanDeserializer.class), "clazz", "Ljava/lang/Class;");
mw.visitMethodInsn(INVOKESPECIAL, type(JavaBeanDeserializer.class), "createInstance",
"(L" + DefaultJSONParser + ";Ljava/lang/reflect/Type;)Ljava/lang/Object;");
mw.visitTypeInsn(CHECKCAST, type(context.getInstClass())); // cast
}
mw.visitVarInsn(ASTORE, context.var("instance"));
}
private void _batchSet(Context context, MethodVisitor mw) {
_batchSet(context, mw, true);
}
private void _batchSet(Context context, MethodVisitor mw, boolean flag) {
for (int i = 0, size = context.fieldInfoList.length; i < size; ++i) {
Label notSet_ = new Label();
if (flag) {
_isFlag(mw, context, i, notSet_);
}
FieldInfo fieldInfo = context.fieldInfoList[i];
_loadAndSet(context, mw, fieldInfo);
if (flag) {
mw.visitLabel(notSet_);
}
}
}
private void _loadAndSet(Context context, MethodVisitor mw, FieldInfo fieldInfo) {
Class<?> fieldClass = fieldInfo.fieldClass;
Type fieldType = fieldInfo.fieldType;
if (fieldClass == boolean.class) {
mw.visitVarInsn(ALOAD, context.var("instance"));
mw.visitVarInsn(ILOAD, context.var_asm(fieldInfo));
_set(context, mw, fieldInfo);
} else if (fieldClass == byte.class //
|| fieldClass == short.class //
|| fieldClass == int.class //
|| fieldClass == char.class) {
mw.visitVarInsn(ALOAD, context.var("instance"));
mw.visitVarInsn(ILOAD, context.var_asm(fieldInfo));
_set(context, mw, fieldInfo);
} else if (fieldClass == long.class) {
mw.visitVarInsn(ALOAD, context.var("instance"));
mw.visitVarInsn(LLOAD, context.var_asm(fieldInfo, 2));
if (fieldInfo.method != null) {
mw.visitMethodInsn(INVOKEVIRTUAL, type(context.getInstClass()), fieldInfo.method.getName(),
desc(fieldInfo.method));
if (!fieldInfo.method.getReturnType().equals(Void.TYPE)) {
mw.visitInsn(POP);
}
} else {
mw.visitFieldInsn(PUTFIELD, type(fieldInfo.declaringClass), fieldInfo.field.getName(),
desc(fieldInfo.fieldClass));
}
} else if (fieldClass == float.class) {
mw.visitVarInsn(ALOAD, context.var("instance"));
mw.visitVarInsn(FLOAD, context.var_asm(fieldInfo));
_set(context, mw, fieldInfo);
} else if (fieldClass == double.class) {
mw.visitVarInsn(ALOAD, context.var("instance"));
mw.visitVarInsn(DLOAD, context.var_asm(fieldInfo, 2));
_set(context, mw, fieldInfo);
} else if (fieldClass == String.class) {
mw.visitVarInsn(ALOAD, context.var("instance"));
mw.visitVarInsn(ALOAD, context.var_asm(fieldInfo));
_set(context, mw, fieldInfo);
} else if (fieldClass.isEnum()) {
mw.visitVarInsn(ALOAD, context.var("instance"));
mw.visitVarInsn(ALOAD, context.var_asm(fieldInfo));
_set(context, mw, fieldInfo);
} else if (Collection.class.isAssignableFrom(fieldClass)) {
mw.visitVarInsn(ALOAD, context.var("instance"));
Type itemType = TypeUtils.getCollectionItemClass(fieldType);
if (itemType == String.class) {
mw.visitVarInsn(ALOAD, context.var_asm(fieldInfo));
mw.visitTypeInsn(CHECKCAST, type(fieldClass)); // cast
} else {
mw.visitVarInsn(ALOAD, context.var_asm(fieldInfo));
}
_set(context, mw, fieldInfo);
} else {
mw.visitVarInsn(ALOAD, context.var("instance"));
mw.visitVarInsn(ALOAD, context.var_asm(fieldInfo));
_set(context, mw, fieldInfo);
}
}
private void _set(Context context, MethodVisitor mw, FieldInfo fieldInfo) {
Method method = fieldInfo.method;
if (method != null) {
Class<?> declaringClass = method.getDeclaringClass();
mw.visitMethodInsn(declaringClass.isInterface() ? INVOKEINTERFACE : INVOKEVIRTUAL, type(fieldInfo.declaringClass), method.getName(), desc(method));
if (!fieldInfo.method.getReturnType().equals(Void.TYPE)) {
mw.visitInsn(POP);
}
} else {
mw.visitFieldInsn(PUTFIELD, type(fieldInfo.declaringClass), fieldInfo.field.getName(),
desc(fieldInfo.fieldClass));
}
}
private void _setContext(Context context, MethodVisitor mw) {
mw.visitVarInsn(ALOAD, 1); // parser
mw.visitVarInsn(ALOAD, context.var("context"));
mw.visitMethodInsn(INVOKEVIRTUAL, DefaultJSONParser, "setContext", "(" + desc(ParseContext.class) + ")V");
Label endIf_ = new Label();
mw.visitVarInsn(ALOAD, context.var("childContext"));
mw.visitJumpInsn(IFNULL, endIf_);
mw.visitVarInsn(ALOAD, context.var("childContext"));
mw.visitVarInsn(ALOAD, context.var("instance"));
mw.visitFieldInsn(PUTFIELD, type(ParseContext.class), "object", "Ljava/lang/Object;");
mw.visitLabel(endIf_);
}
private void _deserialize_endCheck(Context context, MethodVisitor mw, Label reset_) {
mw.visitIntInsn(ILOAD, context.var("matchedCount"));
mw.visitJumpInsn(IFLE, reset_);
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "token", "()I");
mw.visitLdcInsn(JSONToken.RBRACE);
mw.visitJumpInsn(IF_ICMPNE, reset_);
// mw.visitLabel(nextToken_);
_quickNextTokenComma(context, mw);
}
private void _deserialze_list_obj(Context context, MethodVisitor mw, Label reset_, FieldInfo fieldInfo,
Class<?> fieldClass, Class<?> itemType, int i) {
Label _end_if = new Label();
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "matchField", "([C)Z");
mw.visitJumpInsn(IFEQ, _end_if);
_setFlag(mw, context, i);
Label valueNotNull_ = new Label();
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "token", "()I");
mw.visitLdcInsn(JSONToken.NULL);
mw.visitJumpInsn(IF_ICMPNE, valueNotNull_);
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitLdcInsn(JSONToken.COMMA);
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "nextToken", "(I)V");
mw.visitJumpInsn(GOTO, _end_if);
// loop_end_
mw.visitLabel(valueNotNull_);
Label storeCollection_ = new Label(), endSet_ = new Label(), lbacketNormal_ = new Label();
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "token", "()I");
mw.visitLdcInsn(JSONToken.SET);
mw.visitJumpInsn(IF_ICMPNE, endSet_);
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitLdcInsn(JSONToken.LBRACKET);
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "nextToken", "(I)V");
_newCollection(mw, fieldClass, i, true);
mw.visitJumpInsn(GOTO, storeCollection_);
mw.visitLabel(endSet_);
// if (lexer.token() != JSONToken.LBRACKET) reset
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "token", "()I");
mw.visitLdcInsn(JSONToken.LBRACKET);
mw.visitJumpInsn(IF_ICMPEQ, lbacketNormal_);
// if (lexer.token() == JSONToken.LBRACE) reset
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "token", "()I");
mw.visitLdcInsn(JSONToken.LBRACE);
mw.visitJumpInsn(IF_ICMPNE, reset_);
_newCollection(mw, fieldClass, i, false);
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
_getCollectionFieldItemDeser(context, mw, fieldInfo, itemType);
mw.visitVarInsn(ALOAD, 1);
mw.visitLdcInsn(com.alibaba.fastjson.asm.Type.getType(desc(itemType)));
mw.visitInsn(ICONST_0);
mw.visitMethodInsn(INVOKESTATIC, "java/lang/Integer", "valueOf", "(I)Ljava/lang/Integer;");
mw.visitMethodInsn(INVOKEINTERFACE, type(ObjectDeserializer.class), "deserialze",
"(L" + DefaultJSONParser + ";Ljava/lang/reflect/Type;Ljava/lang/Object;)Ljava/lang/Object;");
mw.visitVarInsn(ASTORE, context.var("list_item_value"));
mw.visitVarInsn(ALOAD, context.var_asm(fieldInfo));
mw.visitVarInsn(ALOAD, context.var("list_item_value"));
if (fieldClass.isInterface()) {
mw.visitMethodInsn(INVOKEINTERFACE, type(fieldClass), "add", "(Ljava/lang/Object;)Z");
} else {
mw.visitMethodInsn(INVOKEVIRTUAL, type(fieldClass), "add", "(Ljava/lang/Object;)Z");
}
mw.visitInsn(POP);
mw.visitJumpInsn(GOTO, _end_if);
mw.visitLabel(lbacketNormal_);
_newCollection(mw, fieldClass, i, false);
mw.visitLabel(storeCollection_);
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
boolean isPrimitive = ParserConfig.isPrimitive2(fieldInfo.fieldClass);
_getCollectionFieldItemDeser(context, mw, fieldInfo, itemType);
if (isPrimitive) {
mw.visitMethodInsn(INVOKEINTERFACE, type(ObjectDeserializer.class), "getFastMatchToken", "()I");
mw.visitVarInsn(ISTORE, context.var("fastMatchToken"));
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(ILOAD, context.var("fastMatchToken"));
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "nextToken", "(I)V");
} else {
mw.visitInsn(POP);
mw.visitLdcInsn(JSONToken.LBRACE);
mw.visitVarInsn(ISTORE, context.var("fastMatchToken"));
_quickNextToken(context, mw, JSONToken.LBRACE);
}
{ // setContext
mw.visitVarInsn(ALOAD, 1);
mw.visitMethodInsn(INVOKEVIRTUAL, DefaultJSONParser, "getContext", "()" + desc(ParseContext.class));
mw.visitVarInsn(ASTORE, context.var("listContext"));
mw.visitVarInsn(ALOAD, 1); // parser
mw.visitVarInsn(ALOAD, context.var_asm(fieldInfo));
mw.visitLdcInsn(fieldInfo.name);
mw.visitMethodInsn(INVOKEVIRTUAL, DefaultJSONParser, "setContext",
"(Ljava/lang/Object;Ljava/lang/Object;)" + desc(ParseContext.class));
mw.visitInsn(POP);
}
Label loop_ = new Label();
Label loop_end_ = new Label();
// for (;;) {
mw.visitInsn(ICONST_0);
mw.visitVarInsn(ISTORE, context.var("i"));
mw.visitLabel(loop_);
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "token", "()I");
mw.visitLdcInsn(JSONToken.RBRACKET);
mw.visitJumpInsn(IF_ICMPEQ, loop_end_);
// Object value = itemDeserializer.deserialze(parser, null);
// array.add(value);
mw.visitVarInsn(ALOAD, 0);
mw.visitFieldInsn(GETFIELD, context.className, fieldInfo.name + "_asm_list_item_deser__",
desc(ObjectDeserializer.class));
mw.visitVarInsn(ALOAD, 1);
mw.visitLdcInsn(com.alibaba.fastjson.asm.Type.getType(desc(itemType)));
mw.visitVarInsn(ILOAD, context.var("i"));
mw.visitMethodInsn(INVOKESTATIC, "java/lang/Integer", "valueOf", "(I)Ljava/lang/Integer;");
mw.visitMethodInsn(INVOKEINTERFACE, type(ObjectDeserializer.class), "deserialze",
"(L" + DefaultJSONParser + ";Ljava/lang/reflect/Type;Ljava/lang/Object;)Ljava/lang/Object;");
mw.visitVarInsn(ASTORE, context.var("list_item_value"));
mw.visitIincInsn(context.var("i"), 1);
mw.visitVarInsn(ALOAD, context.var_asm(fieldInfo));
mw.visitVarInsn(ALOAD, context.var("list_item_value"));
if (fieldClass.isInterface()) {
mw.visitMethodInsn(INVOKEINTERFACE, type(fieldClass), "add", "(Ljava/lang/Object;)Z");
} else {
mw.visitMethodInsn(INVOKEVIRTUAL, type(fieldClass), "add", "(Ljava/lang/Object;)Z");
}
mw.visitInsn(POP);
mw.visitVarInsn(ALOAD, 1);
mw.visitVarInsn(ALOAD, context.var_asm(fieldInfo));
mw.visitMethodInsn(INVOKEVIRTUAL, DefaultJSONParser, "checkListResolve", "(Ljava/util/Collection;)V");
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "token", "()I");
mw.visitLdcInsn(JSONToken.COMMA);
mw.visitJumpInsn(IF_ICMPNE, loop_);
if (isPrimitive) {
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(ILOAD, context.var("fastMatchToken"));
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "nextToken", "(I)V");
} else {
_quickNextToken(context, mw, JSONToken.LBRACE);
}
mw.visitJumpInsn(GOTO, loop_);
mw.visitLabel(loop_end_);
// mw.visitVarInsn(ASTORE, context.var("context"));
// parser.setContext(context);
{ // setContext
mw.visitVarInsn(ALOAD, 1); // parser
mw.visitVarInsn(ALOAD, context.var("listContext"));
mw.visitMethodInsn(INVOKEVIRTUAL, DefaultJSONParser, "setContext", "(" + desc(ParseContext.class) + ")V");
}
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "token", "()I");
mw.visitLdcInsn(JSONToken.RBRACKET);
mw.visitJumpInsn(IF_ICMPNE, reset_);
_quickNextTokenComma(context, mw);
// lexer.nextToken(JSONToken.COMMA);
mw.visitLabel(_end_if);
}
private void _quickNextToken(Context context, MethodVisitor mw, int token) {
Label quickElse_ = new Label(), quickEnd_ = new Label();
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "getCurrent", "()C");
if (token == JSONToken.LBRACE) {
mw.visitVarInsn(BIPUSH, '{');
} else if (token == JSONToken.LBRACKET) {
mw.visitVarInsn(BIPUSH, '[');
} else {
throw new IllegalStateException();
}
mw.visitJumpInsn(IF_ICMPNE, quickElse_);
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "next", "()C");
mw.visitInsn(POP);
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitLdcInsn(token);
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "setToken", "(I)V");
mw.visitJumpInsn(GOTO, quickEnd_);
mw.visitLabel(quickElse_);
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitLdcInsn(token);
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "nextToken", "(I)V");
mw.visitLabel(quickEnd_);
}
private void _quickNextTokenComma(Context context, MethodVisitor mw) {
Label quickElse_ = new Label(), quickElseIf0_ = new Label(), quickElseIf1_ = new Label(), quickElseIf2_ = new Label(), quickEnd_ = new Label();
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "getCurrent", "()C");
mw.visitInsn(DUP);
mw.visitVarInsn(ISTORE, context.var("ch"));
mw.visitVarInsn(BIPUSH, ',');
mw.visitJumpInsn(IF_ICMPNE, quickElseIf0_);
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "next", "()C");
mw.visitInsn(POP);
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitLdcInsn(JSONToken.COMMA);
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "setToken", "(I)V");
mw.visitJumpInsn(GOTO, quickEnd_);
mw.visitLabel(quickElseIf0_);
mw.visitVarInsn(ILOAD, context.var("ch"));
mw.visitVarInsn(BIPUSH, '}');
mw.visitJumpInsn(IF_ICMPNE, quickElseIf1_);
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "next", "()C");
mw.visitInsn(POP);
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitLdcInsn(JSONToken.RBRACE);
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "setToken", "(I)V");
mw.visitJumpInsn(GOTO, quickEnd_);
mw.visitLabel(quickElseIf1_);
mw.visitVarInsn(ILOAD, context.var("ch"));
mw.visitVarInsn(BIPUSH, ']');
mw.visitJumpInsn(IF_ICMPNE, quickElseIf2_);
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "next", "()C");
mw.visitInsn(POP);
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitLdcInsn(JSONToken.RBRACKET);
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "setToken", "(I)V");
mw.visitJumpInsn(GOTO, quickEnd_);
mw.visitLabel(quickElseIf2_);
mw.visitVarInsn(ILOAD, context.var("ch"));
mw.visitVarInsn(BIPUSH, JSONLexer.EOI);
mw.visitJumpInsn(IF_ICMPNE, quickElse_);
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitLdcInsn(JSONToken.EOF);
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "setToken", "(I)V");
mw.visitJumpInsn(GOTO, quickEnd_);
mw.visitLabel(quickElse_);
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "nextToken", "()V");
mw.visitLabel(quickEnd_);
}
private void _getCollectionFieldItemDeser(Context context, MethodVisitor mw, FieldInfo fieldInfo,
Class<?> itemType) {
Label notNull_ = new Label();
mw.visitVarInsn(ALOAD, 0);
mw.visitFieldInsn(GETFIELD, context.className, fieldInfo.name + "_asm_list_item_deser__",
desc(ObjectDeserializer.class));
mw.visitJumpInsn(IFNONNULL, notNull_);
mw.visitVarInsn(ALOAD, 0);
mw.visitVarInsn(ALOAD, 1);
mw.visitMethodInsn(INVOKEVIRTUAL, DefaultJSONParser, "getConfig", "()" + desc(ParserConfig.class));
mw.visitLdcInsn(com.alibaba.fastjson.asm.Type.getType(desc(itemType)));
mw.visitMethodInsn(INVOKEVIRTUAL, type(ParserConfig.class), "getDeserializer",
"(Ljava/lang/reflect/Type;)" + desc(ObjectDeserializer.class));
mw.visitFieldInsn(PUTFIELD, context.className, fieldInfo.name + "_asm_list_item_deser__",
desc(ObjectDeserializer.class));
mw.visitLabel(notNull_);
mw.visitVarInsn(ALOAD, 0);
mw.visitFieldInsn(GETFIELD, context.className, fieldInfo.name + "_asm_list_item_deser__",
desc(ObjectDeserializer.class));
}
private void _newCollection(MethodVisitor mw, Class<?> fieldClass, int i, boolean set) {
if (fieldClass.isAssignableFrom(ArrayList.class) && !set) {
mw.visitTypeInsn(NEW, "java/util/ArrayList");
mw.visitInsn(DUP);
mw.visitMethodInsn(INVOKESPECIAL, "java/util/ArrayList", "<init>", "()V");
} else if (fieldClass.isAssignableFrom(LinkedList.class) && !set) {
mw.visitTypeInsn(NEW, type(LinkedList.class));
mw.visitInsn(DUP);
mw.visitMethodInsn(INVOKESPECIAL, type(LinkedList.class), "<init>", "()V");
} else if (fieldClass.isAssignableFrom(HashSet.class)) {
mw.visitTypeInsn(NEW, type(HashSet.class));
mw.visitInsn(DUP);
mw.visitMethodInsn(INVOKESPECIAL, type(HashSet.class), "<init>", "()V");
} else if (fieldClass.isAssignableFrom(TreeSet.class)) {
mw.visitTypeInsn(NEW, type(TreeSet.class));
mw.visitInsn(DUP);
mw.visitMethodInsn(INVOKESPECIAL, type(TreeSet.class), "<init>", "()V");
} else if (fieldClass.isAssignableFrom(LinkedHashSet.class)) {
mw.visitTypeInsn(NEW, type(LinkedHashSet.class));
mw.visitInsn(DUP);
mw.visitMethodInsn(INVOKESPECIAL, type(LinkedHashSet.class), "<init>", "()V");
} else if (set) {
mw.visitTypeInsn(NEW, type(HashSet.class));
mw.visitInsn(DUP);
mw.visitMethodInsn(INVOKESPECIAL, type(HashSet.class), "<init>", "()V");
} else {
mw.visitVarInsn(ALOAD, 0);
mw.visitLdcInsn(i);
mw.visitMethodInsn(INVOKEVIRTUAL, type(JavaBeanDeserializer.class), "getFieldType",
"(I)Ljava/lang/reflect/Type;");
mw.visitMethodInsn(INVOKESTATIC, type(TypeUtils.class), "createCollection",
"(Ljava/lang/reflect/Type;)Ljava/util/Collection;");
}
mw.visitTypeInsn(CHECKCAST, type(fieldClass)); // cast
}
private void _deserialze_obj(Context context, MethodVisitor mw, Label reset_, FieldInfo fieldInfo,
Class<?> fieldClass, int i) {
Label matched_ = new Label();
Label _end_if = new Label();
mw.visitVarInsn(ALOAD, context.var("lexer"));
mw.visitVarInsn(ALOAD, 0);
mw.visitFieldInsn(GETFIELD, context.className, context.fieldName(fieldInfo), "[C");
mw.visitMethodInsn(INVOKEVIRTUAL, JSONLexerBase, "matchField", "([C)Z");
mw.visitJumpInsn(IFNE, matched_);
mw.visitInsn(ACONST_NULL);
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
mw.visitJumpInsn(GOTO, _end_if);
mw.visitLabel(matched_);
_setFlag(mw, context, i);
// increment matchedCount
mw.visitVarInsn(ILOAD, context.var("matchedCount"));
mw.visitInsn(ICONST_1);
mw.visitInsn(IADD);
mw.visitVarInsn(ISTORE, context.var("matchedCount"));
_deserObject(context, mw, fieldInfo, fieldClass, i);
mw.visitVarInsn(ALOAD, 1);
mw.visitMethodInsn(INVOKEVIRTUAL, DefaultJSONParser, "getResolveStatus", "()I");
mw.visitLdcInsn(com.alibaba.fastjson.parser.DefaultJSONParser.NeedToResolve);
mw.visitJumpInsn(IF_ICMPNE, _end_if);
mw.visitVarInsn(ALOAD, 1);
mw.visitMethodInsn(INVOKEVIRTUAL, DefaultJSONParser, "getLastResolveTask", "()" + desc(ResolveTask.class));
mw.visitVarInsn(ASTORE, context.var("resolveTask"));
mw.visitVarInsn(ALOAD, context.var("resolveTask"));
mw.visitVarInsn(ALOAD, 1);
mw.visitMethodInsn(INVOKEVIRTUAL, DefaultJSONParser, "getContext", "()" + desc(ParseContext.class));
mw.visitFieldInsn(PUTFIELD, type(ResolveTask.class), "ownerContext", desc(ParseContext.class));
mw.visitVarInsn(ALOAD, context.var("resolveTask"));
mw.visitVarInsn(ALOAD, 0);
mw.visitLdcInsn(fieldInfo.name);
mw.visitMethodInsn(INVOKEVIRTUAL, type(JavaBeanDeserializer.class), "getFieldDeserializer",
"(Ljava/lang/String;)" + desc(FieldDeserializer.class));
mw.visitFieldInsn(PUTFIELD, type(ResolveTask.class), "fieldDeserializer", desc(FieldDeserializer.class));
mw.visitVarInsn(ALOAD, 1);
mw.visitLdcInsn(com.alibaba.fastjson.parser.DefaultJSONParser.NONE);
mw.visitMethodInsn(INVOKEVIRTUAL, DefaultJSONParser, "setResolveStatus", "(I)V");
mw.visitLabel(_end_if);
}
private void _deserObject(Context context, MethodVisitor mw, FieldInfo fieldInfo, Class<?> fieldClass, int i) {
_getFieldDeser(context, mw, fieldInfo);
Label instanceOfElse_ = new Label(), instanceOfEnd_ = new Label();
if ((fieldInfo.parserFeatures & Feature.SupportArrayToBean.mask) != 0) {
mw.visitInsn(DUP);
mw.visitTypeInsn(INSTANCEOF, type(JavaBeanDeserializer.class));
mw.visitJumpInsn(IFEQ, instanceOfElse_);
mw.visitTypeInsn(CHECKCAST, type(JavaBeanDeserializer.class)); // cast
mw.visitVarInsn(ALOAD, 1);
if (fieldInfo.fieldType instanceof Class) {
mw.visitLdcInsn(com.alibaba.fastjson.asm.Type.getType(desc(fieldInfo.fieldClass)));
} else {
mw.visitVarInsn(ALOAD, 0);
mw.visitLdcInsn(i);
mw.visitMethodInsn(INVOKEVIRTUAL, type(JavaBeanDeserializer.class), "getFieldType",
"(I)Ljava/lang/reflect/Type;");
}
mw.visitLdcInsn(fieldInfo.name);
mw.visitLdcInsn(fieldInfo.parserFeatures);
mw.visitMethodInsn(INVOKEVIRTUAL, type(JavaBeanDeserializer.class), "deserialze",
"(L" + DefaultJSONParser + ";Ljava/lang/reflect/Type;Ljava/lang/Object;I)Ljava/lang/Object;");
mw.visitTypeInsn(CHECKCAST, type(fieldClass)); // cast
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
mw.visitJumpInsn(GOTO, instanceOfEnd_);
mw.visitLabel(instanceOfElse_);
}
mw.visitVarInsn(ALOAD, 1);
if (fieldInfo.fieldType instanceof Class) {
mw.visitLdcInsn(com.alibaba.fastjson.asm.Type.getType(desc(fieldInfo.fieldClass)));
} else {
mw.visitVarInsn(ALOAD, 0);
mw.visitLdcInsn(i);
mw.visitMethodInsn(INVOKEVIRTUAL, type(JavaBeanDeserializer.class), "getFieldType",
"(I)Ljava/lang/reflect/Type;");
}
mw.visitLdcInsn(fieldInfo.name);
mw.visitMethodInsn(INVOKEINTERFACE, type(ObjectDeserializer.class), "deserialze",
"(L" + DefaultJSONParser + ";Ljava/lang/reflect/Type;Ljava/lang/Object;)Ljava/lang/Object;");
mw.visitTypeInsn(CHECKCAST, type(fieldClass)); // cast
mw.visitVarInsn(ASTORE, context.var_asm(fieldInfo));
mw.visitLabel(instanceOfEnd_);
}
private void _getFieldDeser(Context context, MethodVisitor mw, FieldInfo fieldInfo) {
Label notNull_ = new Label();
mw.visitVarInsn(ALOAD, 0);
mw.visitFieldInsn(GETFIELD, context.className, context.fieldDeserName(fieldInfo), desc(ObjectDeserializer.class));
mw.visitJumpInsn(IFNONNULL, notNull_);
mw.visitVarInsn(ALOAD, 0);
mw.visitVarInsn(ALOAD, 1);
mw.visitMethodInsn(INVOKEVIRTUAL, DefaultJSONParser, "getConfig", "()" + desc(ParserConfig.class));
mw.visitLdcInsn(com.alibaba.fastjson.asm.Type.getType(desc(fieldInfo.fieldClass)));
mw.visitMethodInsn(INVOKEVIRTUAL, type(ParserConfig.class), "getDeserializer",
"(Ljava/lang/reflect/Type;)" + desc(ObjectDeserializer.class));
mw.visitFieldInsn(PUTFIELD, context.className, context.fieldDeserName(fieldInfo), desc(ObjectDeserializer.class));
mw.visitLabel(notNull_);
mw.visitVarInsn(ALOAD, 0);
mw.visitFieldInsn(GETFIELD, context.className, context.fieldDeserName(fieldInfo), desc(ObjectDeserializer.class));
}
static | ASMDeserializerFactory |
java | spring-projects__spring-framework | spring-expression/src/test/java/org/springframework/expression/spel/IndexingTests.java | {
"start": 21241,
"end": 30871
} | class ____ { // gh-26478
private final StandardEvaluationContext context = new StandardEvaluationContext();
private final SpelExpressionParser parser = new SpelExpressionParser();
@Test
void addingAndRemovingIndexAccessors() {
ObjectMapper objectMapper = new ObjectMapper();
IndexAccessor accessor1 = new JacksonArrayNodeIndexAccessor(objectMapper);
IndexAccessor accessor2 = new JacksonArrayNodeIndexAccessor(objectMapper);
List<IndexAccessor> indexAccessors = context.getIndexAccessors();
assertThat(indexAccessors).isEmpty();
context.addIndexAccessor(accessor1);
assertThat(indexAccessors).containsExactly(accessor1);
context.addIndexAccessor(accessor2);
assertThat(indexAccessors).containsExactly(accessor1, accessor2);
List<IndexAccessor> copy = new ArrayList<>(indexAccessors);
assertThat(context.removeIndexAccessor(accessor1)).isTrue();
assertThat(context.removeIndexAccessor(accessor1)).isFalse();
assertThat(indexAccessors).containsExactly(accessor2);
context.setIndexAccessors(copy);
assertThat(context.getIndexAccessors()).containsExactly(accessor1, accessor2);
}
@Test
void noSuitableIndexAccessorResultsInException() {
assertThat(context.getIndexAccessors()).isEmpty();
Expression expr = parser.parseExpression("[0]");
assertThatExceptionOfType(SpelEvaluationException.class)
.isThrownBy(() -> expr.getValue(context, this))
.withMessageEndingWith("Indexing into type '%s' is not supported", getClass().getName())
.extracting(SpelEvaluationException::getMessageCode).isEqualTo(INDEXING_NOT_SUPPORTED_FOR_TYPE);
}
@Test
void canReadThrowsException() throws Exception {
RuntimeException exception = new RuntimeException("Boom!");
IndexAccessor mock = mock();
given(mock.getSpecificTargetClasses()).willReturn(null);
given(mock.canRead(any(), eq(this), any())).willThrow(exception);
context.addIndexAccessor(mock);
Expression expr = parser.parseExpression("[0]");
assertThatExceptionOfType(SpelEvaluationException.class)
.isThrownBy(() -> expr.getValue(context, this))
.withMessageEndingWith("A problem occurred while attempting to read index '%d' in '%s'",
0, getClass().getName())
.withCause(exception)
.extracting(SpelEvaluationException::getMessageCode).isEqualTo(EXCEPTION_DURING_INDEX_READ);
verify(mock, times(1)).getSpecificTargetClasses();
verify(mock, times(1)).canRead(any(), any(), any());
verifyNoMoreInteractions(mock);
}
@Test
void readThrowsException() throws Exception {
RuntimeException exception = new RuntimeException("Boom!");
IndexAccessor mock = mock();
given(mock.getSpecificTargetClasses()).willReturn(null);
given(mock.canRead(any(), eq(this), any())).willReturn(true);
given(mock.read(any(), eq(this), any())).willThrow(exception);
context.addIndexAccessor(mock);
Expression expr = parser.parseExpression("[0]");
assertThatExceptionOfType(SpelEvaluationException.class)
.isThrownBy(() -> expr.getValue(context, this))
.withMessageEndingWith("A problem occurred while attempting to read index '%d' in '%s'",
0, getClass().getName())
.withCause(exception)
.extracting(SpelEvaluationException::getMessageCode).isEqualTo(EXCEPTION_DURING_INDEX_READ);
verify(mock, times(2)).getSpecificTargetClasses();
verify(mock, times(2)).canRead(any(), any(), any());
verify(mock, times(1)).read(any(), any(), any());
verifyNoMoreInteractions(mock);
}
@Test
void canWriteThrowsException() throws Exception {
RuntimeException exception = new RuntimeException("Boom!");
IndexAccessor mock = mock();
given(mock.getSpecificTargetClasses()).willReturn(null);
given(mock.canWrite(eq(context), eq(this), eq(0))).willThrow(exception);
context.addIndexAccessor(mock);
Expression expr = parser.parseExpression("[0]");
assertThatExceptionOfType(SpelEvaluationException.class)
.isThrownBy(() -> expr.setValue(context, this, 999))
.withMessageEndingWith("A problem occurred while attempting to write index '%d' in '%s'",
0, getClass().getName())
.withCause(exception)
.extracting(SpelEvaluationException::getMessageCode).isEqualTo(EXCEPTION_DURING_INDEX_WRITE);
verify(mock, times(1)).getSpecificTargetClasses();
verify(mock, times(1)).canWrite(any(), any(), any());
verifyNoMoreInteractions(mock);
}
@Test
void writeThrowsException() throws Exception {
RuntimeException exception = new RuntimeException("Boom!");
IndexAccessor mock = mock();
given(mock.getSpecificTargetClasses()).willReturn(null);
given(mock.canWrite(eq(context), eq(this), eq(0))).willReturn(true);
doThrow(exception).when(mock).write(any(), any(), any(), any());
context.addIndexAccessor(mock);
Expression expr = parser.parseExpression("[0]");
assertThatExceptionOfType(SpelEvaluationException.class)
.isThrownBy(() -> expr.setValue(context, this, 999))
.withMessageEndingWith("A problem occurred while attempting to write index '%d' in '%s'",
0, getClass().getName())
.withCause(exception)
.extracting(SpelEvaluationException::getMessageCode).isEqualTo(EXCEPTION_DURING_INDEX_WRITE);
verify(mock, times(2)).getSpecificTargetClasses();
verify(mock, times(2)).canWrite(any(), any(), any());
verify(mock, times(1)).write(any(), any(), any(), any());
verifyNoMoreInteractions(mock);
}
@Test
void readAndWriteIndex() {
ObjectMapper objectMapper = new ObjectMapper();
context.addIndexAccessor(new JacksonArrayNodeIndexAccessor(objectMapper));
TextNode node0 = new TextNode("node0");
TextNode node1 = new TextNode("node1");
ArrayNode arrayNode = objectMapper.createArrayNode();
arrayNode.addAll(List.of(node0, node1));
Expression expr = parser.parseExpression("[0]");
assertThat(expr.getValue(context, arrayNode)).isSameAs(node0);
TextNode nodeX = new TextNode("nodeX");
expr.setValue(context, arrayNode, nodeX);
// We use isEqualTo() instead of isSameAs(), since ObjectMapper.convertValue()
// converts the supplied TextNode to an equivalent JsonNode.
assertThat(expr.getValue(context, arrayNode)).isEqualTo(nodeX);
NullNode nullNode = NullNode.getInstance();
expr.setValue(context, arrayNode, nullNode);
assertThat(expr.getValue(context, arrayNode)).isSameAs(nullNode);
expr = parser.parseExpression("[1]");
assertThat(expr.getValue(context, arrayNode)).isSameAs(node1);
expr = parser.parseExpression("[-1]");
// Jackson's ArrayNode returns null for a non-existent index instead
// of throwing an ArrayIndexOutOfBoundsException or similar.
assertThat(expr.getValue(context, arrayNode)).isNull();
}
@Test
void readAndWriteIndexWithSimpleEvaluationContext() {
ObjectMapper objectMapper = new ObjectMapper();
SimpleEvaluationContext context = SimpleEvaluationContext.forReadWriteDataBinding()
.withIndexAccessors(new JacksonArrayNodeIndexAccessor(objectMapper))
.build();
TextNode node0 = new TextNode("node0");
TextNode node1 = new TextNode("node1");
ArrayNode arrayNode = objectMapper.createArrayNode();
arrayNode.addAll(List.of(node0, node1));
Expression expr = parser.parseExpression("[0]");
assertThat(expr.getValue(context, arrayNode)).isSameAs(node0);
TextNode nodeX = new TextNode("nodeX");
expr.setValue(context, arrayNode, nodeX);
// We use isEqualTo() instead of isSameAs(), since ObjectMapper.convertValue()
// converts the supplied TextNode to an equivalent JsonNode.
assertThat(expr.getValue(context, arrayNode)).isEqualTo(nodeX);
expr = parser.parseExpression("[1]");
assertThat(expr.getValue(context, arrayNode)).isSameAs(node1);
}
@Test // gh-32706
void readIndexWithStringIndexType() {
BirdNameToColorMappings birdNameMappings = new BirdNameToColorMappings();
// Without a registered BirdNameToColorMappingsIndexAccessor, we should
// be able to index into an object via a property name.
Expression propertyExpression = parser.parseExpression("['property']");
assertThat(propertyExpression.getValue(context, birdNameMappings)).isEqualTo("enigma");
context.addIndexAccessor(new BirdNameToColorMappingsIndexAccessor());
Expression expression = parser.parseExpression("['cardinal']");
assertThat(expression.getValue(context, birdNameMappings)).isEqualTo(Color.RED);
// With a registered BirdNameToColorMappingsIndexAccessor, an attempt
// to index into an object via a property name should fail.
assertThatExceptionOfType(SpelEvaluationException.class)
.isThrownBy(() -> propertyExpression.getValue(context, birdNameMappings))
.withMessageEndingWith("A problem occurred while attempting to read index '%s' in '%s'",
"property", BirdNameToColorMappings.class.getName())
.havingCause().withMessage("unknown bird: property");
}
@Test // gh-32736
void readIndexWithCollectionTargetType() {
context.addIndexAccessor(new ColorCollectionIndexAccessor());
Expression expression = parser.parseExpression("[0]");
// List.of() relies on built-in list support.
assertThat(expression.getValue(context, List.of(Color.RED))).isEqualTo(Color.RED);
ColorCollection colorCollection = new ColorCollection();
// Preconditions for this use case.
assertThat(colorCollection).isInstanceOf(Collection.class);
assertThat(colorCollection).isNotInstanceOf(List.class);
// ColorCollection relies on custom ColorCollectionIndexAccessor.
assertThat(expression.getValue(context, colorCollection)).isEqualTo(Color.RED);
}
static | IndexAccessorTests |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/time/DurationGetTemporalUnitTest.java | {
"start": 879,
"end": 1288
} | class ____ {
private final CompilationTestHelper helper =
CompilationTestHelper.newInstance(DurationGetTemporalUnit.class, getClass());
@Test
public void durationGetTemporalUnit() {
helper
.addSourceLines(
"TestClass.java",
"""
import java.time.Duration;
import java.time.temporal.ChronoUnit;
public | DurationGetTemporalUnitTest |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/core/type/AnnotationMetadataTests.java | {
"start": 31032,
"end": 31370
} | interface ____ {
@AliasFor("basePackages")
String[] value() default {};
@AliasFor("value")
String[] basePackages() default {};
Class<?>[] basePackageClasses() default {};
}
@TestConfiguration
@TestComponentScan(basePackages = "bogus")
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
public @ | TestComponentScan |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/core/io/ResourceLoader.java | {
"start": 1681,
"end": 1753
} | interface ____ {
/** Pseudo URL prefix for loading from the | ResourceLoader |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/engine/spi/ComparableExecutable.java | {
"start": 318,
"end": 647
} | interface ____ such union; this helps to simplify several generic signatures.
* Secondarily, it helps to avoid triggering type pollution by not needing to typecheck
* for a very specific Comparable type; we represent the common needs to resolve sorting
* by exposing primary and secondary sorting attributes.
*/
public | represents |
java | apache__flink | flink-table/flink-sql-parser/src/main/java/org/apache/flink/sql/parser/ddl/SqlDropFunction.java | {
"start": 1118,
"end": 2343
} | class ____ extends SqlDropObject {
private static final SqlSpecialOperator OPERATOR =
new SqlSpecialOperator("DROP FUNCTION", SqlKind.DROP_FUNCTION);
private final boolean isTemporary;
private final boolean isSystemFunction;
public SqlDropFunction(
SqlParserPos pos,
SqlIdentifier functionIdentifier,
boolean ifExists,
boolean isTemporary,
boolean isSystemFunction) {
super(OPERATOR, pos, functionIdentifier, ifExists);
this.isSystemFunction = isSystemFunction;
this.isTemporary = isTemporary;
}
@Override
public void unparse(SqlWriter writer, int leftPrec, int rightPrec) {
writer.keyword("DROP");
if (isTemporary) {
writer.keyword("TEMPORARY");
}
if (isSystemFunction) {
writer.keyword("SYSTEM");
}
writer.keyword("FUNCTION");
if (ifExists) {
writer.keyword("IF EXISTS");
}
name.unparse(writer, leftPrec, rightPrec);
}
public boolean isTemporary() {
return isTemporary;
}
public boolean isSystemFunction() {
return isSystemFunction;
}
}
| SqlDropFunction |
java | grpc__grpc-java | xds/src/main/java/io/grpc/xds/GcpAuthenticationFilter.java | {
"start": 8832,
"end": 9443
} | class ____<ReqT, RespT> extends ClientCall<ReqT, RespT> {
@VisibleForTesting
final Status error;
public FailingClientCall(Status error) {
this.error = error;
}
@Override
public void start(ClientCall.Listener<RespT> listener, Metadata headers) {
listener.onClose(error, new Metadata());
}
@Override
public void request(int numMessages) {}
@Override
public void cancel(String message, Throwable cause) {}
@Override
public void halfClose() {}
@Override
public void sendMessage(ReqT message) {}
}
private static final | FailingClientCall |
java | resilience4j__resilience4j | resilience4j-spring-boot3/src/test/java/io/github/resilience4j/springboot3/service/test/DummyServiceImpl.java | {
"start": 668,
"end": 1666
} | class ____ implements DummyService {
@Override
public void doSomething(boolean throwBackendTrouble) throws IOException {
if (throwBackendTrouble) {
throw new IOException("Test Message");
}
}
@Override
@TimeLimiter(name = BACKEND)
public CompletableFuture<String> doSomethingAsync(boolean throwBackendTrouble)
throws IOException {
if (throwBackendTrouble) {
CompletableFuture<String> future = new CompletableFuture<>();
future.completeExceptionally(new IOException("Test Message"));
return future;
}
return CompletableFuture.supplyAsync(() -> "Test result");
}
@Bulkhead(name = BulkheadDummyService.BACKEND_D, type = Bulkhead.Type.THREADPOOL)
@TimeLimiter(name = BACKEND_B)
public CompletableFuture<String> longDoSomethingAsync() {
Try.run(() -> Thread.sleep(2000));
return CompletableFuture.completedFuture("Test result");
}
}
| DummyServiceImpl |
java | apache__flink | flink-tests/src/test/java/org/apache/flink/test/checkpointing/SavepointITCase.java | {
"start": 65095,
"end": 66962
} | class ____ extends RichSourceFunction<Integer>
implements ListCheckpointed<Integer> {
private static final long serialVersionUID = 1L;
private volatile boolean running;
private volatile boolean isRestored;
private int emittedCount;
public IntegerStreamSource() {
this.running = true;
this.isRestored = false;
this.emittedCount = 0;
}
@Override
public void run(SourceContext<Integer> ctx) throws Exception {
while (running) {
synchronized (ctx.getCheckpointLock()) {
ctx.collect(emittedCount);
}
if (emittedCount < 100) {
++emittedCount;
} else {
emittedCount = 0;
}
Thread.sleep(1);
}
}
@Override
public void cancel() {
running = false;
}
@Override
public List<Integer> snapshotState(long checkpointId, long timestamp) throws Exception {
iterTestCheckpointVerify[getRuntimeContext().getTaskInfo().getIndexOfThisSubtask()] =
emittedCount;
return Collections.singletonList(emittedCount);
}
@Override
public void restoreState(List<Integer> state) throws Exception {
if (!state.isEmpty()) {
this.emittedCount = state.get(0);
}
Assert.assertEquals(
iterTestCheckpointVerify[
getRuntimeContext().getTaskInfo().getIndexOfThisSubtask()],
emittedCount);
iterTestRestoreWait[getRuntimeContext().getTaskInfo().getIndexOfThisSubtask()]
.trigger();
}
}
private static | IntegerStreamSource |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/assumptions/BDDAssumptionsTest.java | {
"start": 16217,
"end": 16663
} | class ____ {
private final Yoda actual = new Yoda();
@Test
void should_run_test_when_assumption_passes() {
thenCode(() -> given(actual).hasNoNullFieldsOrProperties()).doesNotThrowAnyException();
}
@Test
void should_ignore_test_when_assumption_fails() {
expectAssumptionNotMetException(() -> given(actual).hasAllNullFieldsOrProperties());
}
}
@Nested
| BDDAssumptions_given_T_Test |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/ser/jdk/NumberSerializers.java | {
"start": 2180,
"end": 2283
} | class ____ actual primitive/wrapper value serializers.
*<p>
* NOTE: while you can extend this | for |
java | elastic__elasticsearch | x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene40/blocktree/CompressionAlgorithm.java | {
"start": 1180,
"end": 1357
} | class ____ same name shipped with Lucene, which is though package protected hence not accessible.
* We need to copy it because we have our own fork of {@link FieldReader}.
*/
| with |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/core/type/ClassMetadata.java | {
"start": 3089,
"end": 3442
} | class ____ by
* this ClassMetadata object. This includes public, protected, default (package)
* access, and private classes and interfaces declared by the class, but excludes
* inherited classes and interfaces. An empty array is returned if no member classes
* or interfaces exist.
* @since 3.1
*/
String[] getMemberClassNames();
}
| represented |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/annotations/Formula.java | {
"start": 2365,
"end": 2454
} | interface ____ {
/**
* The formula, written in native SQL.
*/
String value();
}
| Formula |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/type/descriptor/java/spi/EmbeddableAggregateJavaType.java | {
"start": 627,
"end": 3409
} | class ____<T> extends AbstractClassJavaType<T> {
private final String structName;
public EmbeddableAggregateJavaType(Class<T> type, String structName) {
super( type );
this.structName = structName;
}
public String getStructName() {
return structName;
}
@Override
public JdbcType getRecommendedJdbcType(JdbcTypeIndicators context) {
final var basicType = context.getTypeConfiguration().getBasicTypeForJavaType( getJavaType() );
if ( basicType != null ) {
return basicType.getJdbcType();
}
if ( structName != null ) {
final var jdbcTypeRegistry = context.getTypeConfiguration().getJdbcTypeRegistry();
final var aggregateDescriptor = jdbcTypeRegistry.findAggregateDescriptor( structName );
if ( aggregateDescriptor != null ) {
return aggregateDescriptor;
}
if ( jdbcTypeRegistry.findDescriptor( SqlTypes.STRUCT ) != null ) {
return new DelayedStructJdbcType( this, structName );
}
}
// When the column is mapped as XML array, the component type must be SQLXML
final Integer explicitJdbcTypeCode = context.getExplicitJdbcTypeCode();
if ( explicitJdbcTypeCode != null && explicitJdbcTypeCode == SqlTypes.XML_ARRAY
// Also prefer XML as the Dialect prefers XML arrays
|| context.getDialect().getPreferredSqlTypeCodeForArray() == SqlTypes.XML_ARRAY ) {
final var descriptor = context.getJdbcType( SqlTypes.SQLXML );
if ( descriptor != null ) {
return descriptor;
}
}
else {
// Otherwise use json by default for now
final var descriptor = context.getJdbcType( SqlTypes.JSON );
if ( descriptor != null ) {
return descriptor;
}
}
throw new JdbcTypeRecommendationException(
"Could not determine recommended JdbcType for `" + getTypeName() + "`"
);
}
@Override
public String toString(T value) {
return value.toString();
}
@Override
public T fromString(CharSequence string) {
throw new UnsupportedOperationException(
"Conversion from String strategy not known for this Java type: " + getTypeName()
);
}
@Override
public <X> X unwrap(T value, Class<X> type, WrapperOptions options) {
if ( type.isAssignableFrom( getJavaTypeClass() ) ) {
//noinspection unchecked
return (X) value;
}
throw new UnsupportedOperationException(
"Unwrap strategy not known for this Java type: " + getTypeName()
);
}
@Override
public <X> T wrap(X value, WrapperOptions options) {
if ( value == null ) {
return null;
}
if ( getJavaTypeClass().isInstance( value ) ) {
//noinspection unchecked
return (T) value;
}
throw new UnsupportedOperationException(
"Wrap strategy not known for this Java type: " + getTypeName()
);
}
@Override
public String toString() {
return "BasicJavaType(" + getTypeName() + ")";
}
}
| EmbeddableAggregateJavaType |
java | google__guava | android/guava-tests/test/com/google/common/util/concurrent/FuturesGetCheckedInputs.java | {
"start": 5810,
"end": 6123
} | class ____ extends Exception {
public ExceptionWithGoodAndBadConstructor(String message, Throwable cause) {
throw new RuntimeException("bad constructor");
}
public ExceptionWithGoodAndBadConstructor(Throwable cause) {
super(cause);
}
}
static final | ExceptionWithGoodAndBadConstructor |
java | apache__camel | components/camel-netty-http/src/test/java/org/apache/camel/component/netty/http/NettyHttpStreamDisabledStreamCachingTest.java | {
"start": 897,
"end": 1189
} | class ____ extends NettyHttpStreamTest {
@Override
protected CamelContext createCamelContext() throws Exception {
CamelContext context = super.createCamelContext();
context.setStreamCaching(false);
return context;
}
}
| NettyHttpStreamDisabledStreamCachingTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/service/ServiceBootstrappingTest.java | {
"start": 1175,
"end": 5234
} | class ____ {
@Test
public void testBasicBuild() throws Exception {
Field globalProperties = Environment.class.getDeclaredField( "GLOBAL_PROPERTIES" );
globalProperties.setAccessible( true );
Properties props = (Properties) globalProperties.get( null );
Object showSql = props.remove( Environment.SHOW_SQL );
// this test requires that SHOW_SQL property isn't passed from the outside (eg. via Gradle)
final String showSqlPropertyFromOutside = System.getProperty( Environment.SHOW_SQL );
assertThat( showSqlPropertyFromOutside ).isNotEqualTo( "true" );
final StandardServiceRegistryImpl serviceRegistry = ServiceRegistryUtil.serviceRegistry();
try {
final JdbcServices jdbcServices = serviceRegistry.getService( JdbcServices.class );
final JdbcConnectionAccess connectionAccess = jdbcServices.getBootstrapJdbcConnectionAccess();
assertThat( connectionAccess ).isInstanceOf( ConnectionProviderJdbcConnectionAccess.class );
ConnectionProviderJdbcConnectionAccess connectionProviderJdbcConnectionAccess = (ConnectionProviderJdbcConnectionAccess) connectionAccess;
assertThat( connectionProviderJdbcConnectionAccess.getConnectionProvider()
.isUnwrappableAs( DriverManagerConnectionProvider.class ) ).isTrue();
assertThat( jdbcServices.getSqlStatementLogger().isLogToStdout() ).isFalse();
}
finally {
if ( showSql != null ) {
props.put( Environment.SHOW_SQL, showSql );
}
serviceRegistry.destroy();
}
}
@Test
public void testBuildWithLogging() {
StandardServiceRegistryImpl serviceRegistry = (StandardServiceRegistryImpl) ServiceRegistryUtil.serviceRegistryBuilder()
.applySetting( Environment.SHOW_SQL, "true" )
.build();
try {
JdbcServices jdbcServices = serviceRegistry.getService( JdbcServices.class );
final JdbcConnectionAccess connectionAccess = jdbcServices.getBootstrapJdbcConnectionAccess();
assertThat( connectionAccess ).isInstanceOf( ConnectionProviderJdbcConnectionAccess.class );
ConnectionProviderJdbcConnectionAccess connectionProviderJdbcConnectionAccess = (ConnectionProviderJdbcConnectionAccess) connectionAccess;
assertThat( connectionProviderJdbcConnectionAccess.getConnectionProvider()
.isUnwrappableAs( DriverManagerConnectionProvider.class ) )
.isTrue();
assertThat( jdbcServices.getSqlStatementLogger().isLogToStdout() ).isTrue();
}
finally {
serviceRegistry.destroy();
}
}
@Test
public void testBuildWithServiceOverride() {
StandardServiceRegistryImpl serviceRegistry = ServiceRegistryUtil.serviceRegistry();
try {
JdbcServices jdbcServices = serviceRegistry.getService( JdbcServices.class );
final JdbcConnectionAccess connectionAccess = jdbcServices.getBootstrapJdbcConnectionAccess();
assertThat( connectionAccess ).isInstanceOf( ConnectionProviderJdbcConnectionAccess.class );
ConnectionProviderJdbcConnectionAccess connectionProviderJdbcConnectionAccess = (ConnectionProviderJdbcConnectionAccess) connectionAccess;
assertThat( connectionProviderJdbcConnectionAccess.getConnectionProvider()
.isUnwrappableAs( DriverManagerConnectionProvider.class ) )
.isTrue();
}
finally {
serviceRegistry.destroy();
}
try {
serviceRegistry = (StandardServiceRegistryImpl) ServiceRegistryUtil.serviceRegistryBuilder()
.addService( ConnectionProvider.class, new UserSuppliedConnectionProviderImpl() )
.build();
JdbcServices jdbcServices = serviceRegistry.getService( JdbcServices.class );
final JdbcConnectionAccess connectionAccess = jdbcServices.getBootstrapJdbcConnectionAccess();
assertThat( connectionAccess ).isInstanceOf( ConnectionProviderJdbcConnectionAccess.class );
ConnectionProviderJdbcConnectionAccess connectionProviderJdbcConnectionAccess = (ConnectionProviderJdbcConnectionAccess) connectionAccess;
assertThat( connectionProviderJdbcConnectionAccess.getConnectionProvider()
.isUnwrappableAs( UserSuppliedConnectionProviderImpl.class ) )
.isTrue();
}
finally {
serviceRegistry.destroy();
}
}
}
| ServiceBootstrappingTest |
java | apache__flink | flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/operators/deduplicate/utils/DeduplicateFunctionHelper.java | {
"start": 1422,
"end": 13971
} | class ____ {
/**
* Processes element to deduplicate on keys with process time semantic, sends current element as
* last row, retracts previous element if needed.
*
* @param currentRow latest row received by deduplicate function
* @param generateUpdateBefore whether need to send UPDATE_BEFORE message for updates
* @param state state of function, null if generateUpdateBefore is false
* @param out underlying collector
* @param isStateTtlEnabled whether state ttl is disabled
* @param equaliser the record equaliser used to equal RowData.
*/
public static void processLastRowOnProcTime(
RowData currentRow,
boolean generateUpdateBefore,
boolean generateInsert,
ValueState<RowData> state,
Collector<RowData> out,
boolean isStateTtlEnabled,
RecordEqualiser equaliser)
throws Exception {
checkInsertOnly(currentRow);
if (generateUpdateBefore || generateInsert) {
// use state to keep the previous row content if we need to generate UPDATE_BEFORE
// or use to distinguish the first row, if we need to generate INSERT
RowData preRow = state.value();
state.update(currentRow);
if (preRow == null) {
// the first row, send INSERT message
currentRow.setRowKind(RowKind.INSERT);
out.collect(currentRow);
} else {
if (!isStateTtlEnabled && equaliser.equals(preRow, currentRow)) {
// currentRow is the same as preRow and state cleaning is not enabled.
// We do not emit retraction and update message.
// If state cleaning is enabled, we have to emit messages to prevent too early
// state eviction of downstream operators.
return;
} else {
if (generateUpdateBefore) {
preRow.setRowKind(RowKind.UPDATE_BEFORE);
out.collect(preRow);
}
currentRow.setRowKind(RowKind.UPDATE_AFTER);
out.collect(currentRow);
}
}
} else {
// always send UPDATE_AFTER if INSERT is not needed
currentRow.setRowKind(RowKind.UPDATE_AFTER);
out.collect(currentRow);
}
}
/**
* Processes element to deduplicate on keys, sends current element as last row, retracts
* previous element if needed.
*
* <p>Note: we don't support stateless mode yet. Because this is not safe for Kafka tombstone
* messages which doesn't contain full content. This can be a future improvement if the
* downstream (e.g. sink) doesn't require full content for DELETE messages.
*
* @param currentRow latest row received by deduplicate function
* @param generateUpdateBefore whether need to send UPDATE_BEFORE message for updates
* @param state state of function
* @param out underlying collector
*/
public static void processLastRowOnChangelog(
RowData currentRow,
boolean generateUpdateBefore,
ValueState<RowData> state,
Collector<RowData> out,
boolean isStateTtlEnabled,
RecordEqualiser equaliser)
throws Exception {
RowData preRow = state.value();
RowKind currentKind = currentRow.getRowKind();
if (currentKind == RowKind.INSERT || currentKind == RowKind.UPDATE_AFTER) {
if (preRow == null) {
// the first row, send INSERT message
currentRow.setRowKind(RowKind.INSERT);
out.collect(currentRow);
} else {
if (!isStateTtlEnabled && areRowsWithSameContent(equaliser, preRow, currentRow)) {
// currentRow is the same as preRow and state cleaning is not enabled.
// We do not emit retraction and update message.
// If state cleaning is enabled, we have to emit messages to prevent too early
// state eviction of downstream operators.
return;
} else {
if (generateUpdateBefore) {
preRow.setRowKind(RowKind.UPDATE_BEFORE);
out.collect(preRow);
}
currentRow.setRowKind(RowKind.UPDATE_AFTER);
out.collect(currentRow);
}
}
// normalize row kind
currentRow.setRowKind(RowKind.INSERT);
// save to state
state.update(currentRow);
} else {
// DELETE or UPDATER_BEFORE
if (preRow != null) {
// always set to DELETE because this row has been removed
// even the input is UPDATE_BEFORE, there may no UPDATE_AFTER after it.
preRow.setRowKind(RowKind.DELETE);
// output the preRow instead of currentRow,
// because preRow always contains the full content.
// currentRow may only contain key parts (e.g. Kafka tombstone records).
out.collect(preRow);
// clear state as the row has been removed
state.clear();
}
// nothing to do if removing a non-existed row
}
}
public static void processLastRowOnChangelogWithFilter(
FilterCondition.Context context,
RowData currentRow,
boolean generateUpdateBefore,
ValueState<RowData> state,
Collector<RowData> out,
boolean isStateTtlEnabled,
RecordEqualiser equaliser,
FilterCondition filterCondition)
throws Exception {
RowData preRow = state.value();
RowKind currentKind = currentRow.getRowKind();
if (currentKind == RowKind.INSERT || currentKind == RowKind.UPDATE_AFTER) {
if (preRow == null) {
if (filterCondition.apply(context, currentRow)) {
// the first row, send INSERT message
currentRow.setRowKind(RowKind.INSERT);
out.collect(currentRow);
} else {
// return, do not update the state
return;
}
} else {
if (!isStateTtlEnabled && areRowsWithSameContent(equaliser, preRow, currentRow)) {
// currentRow is the same as preRow and state cleaning is not enabled.
// We do not emit retraction and update message.
// If state cleaning is enabled, we have to emit messages to prevent too early
// state eviction of downstream operators.
return;
} else {
if (filterCondition.apply(context, currentRow)) {
if (generateUpdateBefore) {
preRow.setRowKind(RowKind.UPDATE_BEFORE);
out.collect(preRow);
}
currentRow.setRowKind(RowKind.UPDATE_AFTER);
out.collect(currentRow);
} else {
// generate retraction, because the row does not match any longer
preRow.setRowKind(RowKind.DELETE);
out.collect(preRow);
// clear the state, there is no row we will need to retract
state.clear();
return;
}
}
}
// normalize row kind
currentRow.setRowKind(RowKind.INSERT);
// save to state
state.update(currentRow);
} else {
// DELETE or UPDATER_BEFORE
if (preRow != null) {
// always set to DELETE because this row has been removed
// even the input is UPDATE_BEFORE, there may no UPDATE_AFTER after it.
preRow.setRowKind(RowKind.DELETE);
// output the preRow instead of currentRow,
// because preRow always contains the full content.
// currentRow may only contain key parts (e.g. Kafka tombstone records).
out.collect(preRow);
// clear state as the row has been removed
state.clear();
}
// nothing to do if removing a non-existed row
}
}
/**
* Processes element to deduplicate on keys with process time semantic, sends current element if
* it is first row.
*
* @param currentRow latest row received by deduplicate function
* @param state state of function
* @param out underlying collector
*/
public static void processFirstRowOnProcTime(
RowData currentRow, ValueState<Boolean> state, Collector<RowData> out)
throws Exception {
checkInsertOnly(currentRow);
// ignore record if it is not first row
if (state.value() != null) {
return;
}
state.update(true);
// emit the first row which is INSERT message
out.collect(currentRow);
}
/**
* Collect the updated result for duplicate row.
*
* @param generateUpdateBefore flag to generate UPDATE_BEFORE message or not
* @param generateInsert flag to generate INSERT message or not
* @param preRow previous row under the key
* @param currentRow current row under the key which is the duplicate row
* @param out underlying collector
*/
public static void updateDeduplicateResult(
boolean generateUpdateBefore,
boolean generateInsert,
RowData preRow,
RowData currentRow,
Collector<RowData> out) {
if (generateUpdateBefore || generateInsert) {
if (preRow == null) {
// the first row, send INSERT message
currentRow.setRowKind(RowKind.INSERT);
out.collect(currentRow);
} else {
if (generateUpdateBefore) {
final RowKind preRowKind = preRow.getRowKind();
preRow.setRowKind(RowKind.UPDATE_BEFORE);
out.collect(preRow);
preRow.setRowKind(preRowKind);
}
currentRow.setRowKind(RowKind.UPDATE_AFTER);
out.collect(currentRow);
}
} else {
currentRow.setRowKind(RowKind.UPDATE_AFTER);
out.collect(currentRow);
}
}
/** Returns true if currentRow should be kept. */
public static boolean shouldKeepCurrentRow(
RowData preRow, RowData currentRow, int rowtimeIndex, boolean keepLastRow) {
if (keepLastRow) {
return preRow == null
|| getRowtime(preRow, rowtimeIndex) <= getRowtime(currentRow, rowtimeIndex);
} else {
return preRow == null
|| getRowtime(currentRow, rowtimeIndex) < getRowtime(preRow, rowtimeIndex);
}
}
/**
* Important: the method assumes that {@code currentRow} comes either with {@code
* RowKind.UPDATE_AFTER} or with {@code RowKind.INSERT}. It is not designed to be used for other
* cases.
*/
private static boolean areRowsWithSameContent(
RecordEqualiser equaliser, RowData prevRow, RowData currentRow) {
final RowKind currentRowKind = currentRow.getRowKind();
if (currentRowKind == RowKind.UPDATE_AFTER) {
// setting row kind to prevRowKind to check whether the row content is the same
currentRow.setRowKind(RowKind.INSERT);
final boolean result = equaliser.equals(prevRow, currentRow);
currentRow.setRowKind(currentRowKind);
return result;
}
return equaliser.equals(prevRow, currentRow);
}
private static long getRowtime(RowData input, int rowtimeIndex) {
return input.getLong(rowtimeIndex);
}
/** check message should be insert only. */
public static void checkInsertOnly(RowData currentRow) {
Preconditions.checkArgument(currentRow.getRowKind() == RowKind.INSERT);
}
private DeduplicateFunctionHelper() {}
}
| DeduplicateFunctionHelper |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/CharSequenceAssertBaseTest.java | {
"start": 767,
"end": 892
} | class ____ {@link CharSequenceAssert} tests.
*
* @author Olivier Michallat
* @author Mikhail Mazursky
*/
public abstract | for |
java | apache__maven | impl/maven-testing/src/main/java/org/apache/maven/api/di/testing/MavenDIExtension.java | {
"start": 6353,
"end": 8998
} | class ____ the component to look up
* @param qualifier The qualifier for the component
* @return The component instance
* @throws DIException if lookup fails
*/
protected <T> T lookup(Class<T> componentClass, Object qualifier) throws DIException {
return getInjector().getInstance(Key.ofType(componentClass, qualifier));
}
/**
* Releases a component back to the container.
* Currently a placeholder for future implementation.
*
* @param component The component to release
* @throws DIException if release fails
*/
protected void release(Object component) throws DIException {
// TODO: implement
// getInjector().release(component);
}
/**
* Creates a File object for a path relative to the base directory.
*
* @param path The relative path
* @return A File object representing the path
*/
public static File getTestFile(String path) {
return new File(getBasedir(), path);
}
/**
* Creates a File object for a path relative to a specified base directory.
*
* @param basedir The base directory path
* @param path The relative path
* @return A File object representing the path
*/
public static File getTestFile(String basedir, String path) {
File basedirFile = new File(basedir);
if (!basedirFile.isAbsolute()) {
basedirFile = getTestFile(basedir);
}
return new File(basedirFile, path);
}
/**
* Returns the absolute path for a path relative to the base directory.
*
* @param path The relative path
* @return The absolute path
*/
public static String getTestPath(String path) {
return getTestFile(path).getAbsolutePath();
}
/**
* Returns the absolute path for a path relative to a specified base directory.
*
* @param basedir The base directory path
* @param path The relative path
* @return The absolute path
*/
public static String getTestPath(String basedir, String path) {
return getTestFile(basedir, path).getAbsolutePath();
}
/**
* Returns the base directory for test execution.
* Uses the "basedir" system property if set, otherwise uses the current directory.
*
* @return The base directory path
*/
public static String getBasedir() {
if (basedir != null) {
return basedir;
}
basedir = System.getProperty("basedir");
if (basedir == null) {
basedir = new File("").getAbsolutePath();
}
return basedir;
}
}
| of |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/TwilioEndpointBuilderFactory.java | {
"start": 28006,
"end": 28812
} | interface ____
extends
EndpointProducerBuilder {
default AdvancedTwilioEndpointProducerBuilder advanced() {
return (AdvancedTwilioEndpointProducerBuilder) this;
}
/**
* Sets the name of a parameter to be passed in the exchange In Body.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param inBody the value to set
* @return the dsl builder
*/
default TwilioEndpointProducerBuilder inBody(String inBody) {
doSetProperty("inBody", inBody);
return this;
}
}
/**
* Advanced builder for endpoint producers for the Twilio component.
*/
public | TwilioEndpointProducerBuilder |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/search/lookup/LeafStoredFieldsLookupTests.java | {
"start": 768,
"end": 2243
} | class ____ extends ESTestCase {
private LeafStoredFieldsLookup buildFieldsLookup() {
MappedFieldType fieldType = mock(MappedFieldType.class);
when(fieldType.name()).thenReturn("field");
// Add 10 when valueForDisplay is called so it is easy to be sure it *was* called
when(fieldType.valueForDisplay(any())).then(invocation -> (Double) invocation.getArguments()[0] + 10);
return new LeafStoredFieldsLookup(
field -> field.equals("field") || field.equals("alias") ? fieldType : null,
(fieldLookup, doc) -> fieldLookup.setValues(List.of(2.718))
);
}
public void testBasicLookup() {
LeafStoredFieldsLookup fieldsLookup = buildFieldsLookup();
FieldLookup fieldLookup = fieldsLookup.get("field");
assertEquals("field", fieldLookup.fieldType().name());
List<Object> values = fieldLookup.getValues();
assertNotNull(values);
assertEquals(1, values.size());
assertEquals(12.718, values.get(0));
}
public void testLookupWithFieldAlias() {
LeafStoredFieldsLookup fieldsLookup = buildFieldsLookup();
FieldLookup fieldLookup = fieldsLookup.get("alias");
assertEquals("field", fieldLookup.fieldType().name());
List<Object> values = fieldLookup.getValues();
assertNotNull(values);
assertEquals(1, values.size());
assertEquals(12.718, values.get(0));
}
}
| LeafStoredFieldsLookupTests |
java | spring-projects__spring-framework | spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/method/annotation/CrossOriginTests.java | {
"start": 21157,
"end": 21414
} | class ____ {
@CrossOrigin(allowCredentials = "bogus")
@RequestMapping("/bogus")
public void bogusAllowCredentialsValue() {
}
}
@Controller
@CrossOrigin(allowCredentials = "false")
private static | MethodLevelControllerWithBogusAllowCredentialsValue |
java | alibaba__nacos | naming/src/main/java/com/alibaba/nacos/naming/healthcheck/heartbeat/InstanceBeatChecker.java | {
"start": 932,
"end": 1239
} | interface ____ {
/**
* Do check for input instance.
*
* @param client client
* @param service service of instance
* @param instance instance publish info
*/
void doCheck(Client client, Service service, HealthCheckInstancePublishInfo instance);
}
| InstanceBeatChecker |
java | apache__flink | flink-examples/flink-examples-table/src/main/java/org/apache/flink/table/examples/java/basics/GettingStartedExample.java | {
"start": 8853,
"end": 9360
} | class ____ extends ScalarFunction {
// the 'eval()' method defines input and output types (reflectively extracted)
// and contains the runtime logic
public String eval(String street, String zipCode, String city) {
return normalize(street) + ", " + normalize(zipCode) + ", " + normalize(city);
}
private String normalize(String s) {
return s.toUpperCase().replaceAll("\\W", " ").replaceAll("\\s+", " ").trim();
}
}
}
| AddressNormalizer |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/cluster/block/ClusterBlock.java | {
"start": 1057,
"end": 6679
} | class ____ implements Writeable, ToXContentFragment {
private final int id;
@Nullable
private final String uuid;
private final String description;
private final EnumSet<ClusterBlockLevel> levels;
private final boolean retryable;
private final boolean disableStatePersistence;
private final boolean allowReleaseResources;
private final RestStatus status;
public ClusterBlock(StreamInput in) throws IOException {
id = in.readVInt();
uuid = in.readOptionalString();
description = in.readString();
this.levels = in.readEnumSet(ClusterBlockLevel.class);
retryable = in.readBoolean();
disableStatePersistence = in.readBoolean();
status = RestStatus.readFrom(in);
allowReleaseResources = in.readBoolean();
}
public ClusterBlock(
int id,
String description,
boolean retryable,
boolean disableStatePersistence,
boolean allowReleaseResources,
RestStatus status,
EnumSet<ClusterBlockLevel> levels
) {
this(id, null, description, retryable, disableStatePersistence, allowReleaseResources, status, levels);
}
public ClusterBlock(
int id,
String uuid,
String description,
boolean retryable,
boolean disableStatePersistence,
boolean allowReleaseResources,
RestStatus status,
EnumSet<ClusterBlockLevel> levels
) {
this.id = id;
this.uuid = uuid;
this.description = description;
this.retryable = retryable;
this.disableStatePersistence = disableStatePersistence;
this.status = status;
this.levels = levels;
this.allowReleaseResources = allowReleaseResources;
}
public int id() {
return this.id;
}
@Nullable
public String uuid() {
return uuid;
}
public String description() {
return this.description;
}
public RestStatus status() {
return this.status;
}
public EnumSet<ClusterBlockLevel> levels() {
return this.levels;
}
public boolean contains(ClusterBlockLevel level) {
return levels.contains(level);
}
/**
* Should operations get into retry state if this block is present.
*/
public boolean retryable() {
return this.retryable;
}
/**
* Should global state persistence be disabled when this block is present. Note,
* only relevant for global blocks.
*/
public boolean disableStatePersistence() {
return this.disableStatePersistence;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(Integer.toString(id));
if (uuid != null) {
builder.field("uuid", uuid);
}
builder.field("description", description);
builder.field("retryable", retryable);
if (disableStatePersistence) {
builder.field("disable_state_persistence", disableStatePersistence);
}
builder.startArray("levels");
for (ClusterBlockLevel level : levels) {
builder.value(level.name().toLowerCase(Locale.ROOT));
}
builder.endArray();
builder.endObject();
return builder;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(id);
out.writeOptionalString(uuid);
out.writeString(description);
if (out.getTransportVersion().supports(TransportVersions.V_8_18_0)) {
out.writeEnumSet(levels);
} else {
// do not send ClusterBlockLevel.REFRESH to old nodes
out.writeEnumSet(filterLevels(levels, level -> ClusterBlockLevel.REFRESH.equals(level) == false));
}
out.writeBoolean(retryable);
out.writeBoolean(disableStatePersistence);
RestStatus.writeTo(out, status);
out.writeBoolean(allowReleaseResources);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(id).append(",");
if (uuid != null) {
sb.append(uuid).append(',');
}
sb.append(description).append(", blocks ");
String delimiter = "";
for (ClusterBlockLevel level : levels) {
sb.append(delimiter).append(level.name());
delimiter = ",";
}
return sb.toString();
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final ClusterBlock that = (ClusterBlock) o;
return id == that.id && Objects.equals(uuid, that.uuid);
}
@Override
public int hashCode() {
return 31 * Integer.hashCode(id) + Objects.hashCode(uuid);
}
public boolean isAllowReleaseResources() {
return allowReleaseResources;
}
static EnumSet<ClusterBlockLevel> filterLevels(EnumSet<ClusterBlockLevel> levels, Predicate<ClusterBlockLevel> predicate) {
assert levels != null;
int size = levels.size();
if (size == 0 || (size == 1 && predicate.test(levels.iterator().next()))) {
return levels;
}
var filteredLevels = EnumSet.noneOf(ClusterBlockLevel.class);
for (ClusterBlockLevel level : levels) {
if (predicate.test(level)) {
filteredLevels.add(level);
}
}
return filteredLevels;
}
}
| ClusterBlock |
java | google__guava | android/guava/src/com/google/common/graph/MapIteratorCache.java | {
"start": 1730,
"end": 4719
} | class ____<K, V> {
private final Map<K, V> backingMap;
/*
* Per JDK: "the behavior of a map entry is undefined if the backing map has been modified after
* the entry was returned by the iterator, except through the setValue operation on the map entry"
* As such, this field must be cleared before every map mutation.
*
* Note about volatile: volatile doesn't make it safe to read from a mutable graph in one thread
* while writing to it in another. All it does is help with _reading_ from multiple threads
* concurrently. For more information, see AbstractNetworkTest.concurrentIteration.
*/
private transient volatile @Nullable Entry<K, V> cacheEntry;
MapIteratorCache(Map<K, V> backingMap) {
this.backingMap = checkNotNull(backingMap);
}
@CanIgnoreReturnValue
final @Nullable V put(K key, V value) {
checkNotNull(key);
checkNotNull(value);
clearCache();
return backingMap.put(key, value);
}
@CanIgnoreReturnValue
final @Nullable V remove(Object key) {
checkNotNull(key);
clearCache();
return backingMap.remove(key);
}
final void clear() {
clearCache();
backingMap.clear();
}
@Nullable V get(Object key) {
checkNotNull(key);
V value = getIfCached(key);
// TODO(b/192579700): Use a ternary once it no longer confuses our nullness checker.
if (value == null) {
return getWithoutCaching(key);
} else {
return value;
}
}
final @Nullable V getWithoutCaching(Object key) {
checkNotNull(key);
return backingMap.get(key);
}
final boolean containsKey(@Nullable Object key) {
return getIfCached(key) != null || backingMap.containsKey(key);
}
final Set<K> unmodifiableKeySet() {
return new AbstractSet<K>() {
@Override
public UnmodifiableIterator<K> iterator() {
Iterator<Entry<K, V>> entryIterator = backingMap.entrySet().iterator();
return new UnmodifiableIterator<K>() {
@Override
public boolean hasNext() {
return entryIterator.hasNext();
}
@Override
public K next() {
Entry<K, V> entry = entryIterator.next(); // store local reference for thread-safety
cacheEntry = entry;
return entry.getKey();
}
};
}
@Override
public int size() {
return backingMap.size();
}
@Override
public boolean contains(@Nullable Object key) {
return containsKey(key);
}
};
}
// Internal methods (package-visible, but treat as only subclass-visible)
@Nullable V getIfCached(@Nullable Object key) {
Entry<K, V> entry = cacheEntry; // store local reference for thread-safety
// Check cache. We use == on purpose because it's cheaper and a cache miss is ok.
if (entry != null && entry.getKey() == key) {
return entry.getValue();
}
return null;
}
void clearCache() {
cacheEntry = null;
}
}
| MapIteratorCache |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/snapshots/SnapshotsInfoService.java | {
"start": 535,
"end": 618
} | interface ____ {
SnapshotShardSizeInfo snapshotShardSizes();
}
| SnapshotsInfoService |
java | quarkusio__quarkus | extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/runtime/MongoClientCustomizer.java | {
"start": 410,
"end": 495
} | interface ____ the qualifier will be used for the default client only.
*/
public | without |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/error/ShouldHaveSuperclass_create_Test.java | {
"start": 1003,
"end": 2401
} | class ____ {
@Test
void should_create_error_message_if_actual_has_superclass() {
// WHEN
String message = shouldHaveSuperclass(String.class, Integer.class).create(new TestDescription("TEST"),
STANDARD_REPRESENTATION);
// THEN
then(message).isEqualTo(format("[TEST] %n" +
"Expecting%n" +
" java.lang.String%n" +
"to have superclass:%n" +
" java.lang.Integer%n" +
"but had:%n" +
" java.lang.Object"));
}
@Test
void should_create_error_message_if_actual_has_no_superclass() {
// WHEN
String message = shouldHaveSuperclass(Object.class, Integer.class).create(new TestDescription("TEST"),
STANDARD_REPRESENTATION);
// THEN
then(message).isEqualTo(format("[TEST] %n" +
"Expecting%n" +
" java.lang.Object%n" +
"to have superclass:%n" +
" java.lang.Integer%n" +
"but had none."));
}
}
| ShouldHaveSuperclass_create_Test |
java | apache__camel | components/camel-metrics/src/main/java/org/apache/camel/component/metrics/routepolicy/MetricsRoutePolicy.java | {
"start": 2396,
"end": 7255
} | class ____ {
private final String routeId;
private final Timer responses;
private MetricsStatistics(Route route, Timer responses) {
this.routeId = route.getId();
this.responses = responses;
}
public void onExchangeBegin(Exchange exchange) {
Timer.Context context = responses.time();
exchange.setProperty("MetricsRoutePolicy-" + routeId, context);
}
public void onExchangeDone(Exchange exchange) {
Timer.Context context = (Timer.Context) exchange.removeProperty("MetricsRoutePolicy-" + routeId);
if (context != null) {
context.stop();
}
}
}
public MetricRegistry getMetricsRegistry() {
return metricsRegistry;
}
public void setMetricsRegistry(MetricRegistry metricsRegistry) {
this.metricsRegistry = metricsRegistry;
}
public boolean isUseJmx() {
return useJmx;
}
public void setUseJmx(boolean useJmx) {
this.useJmx = useJmx;
}
public String getJmxDomain() {
return jmxDomain;
}
public void setJmxDomain(String jmxDomain) {
this.jmxDomain = jmxDomain;
}
public boolean isPrettyPrint() {
return prettyPrint;
}
public void setPrettyPrint(boolean prettyPrint) {
this.prettyPrint = prettyPrint;
}
public TimeUnit getRateUnit() {
return rateUnit;
}
public void setRateUnit(TimeUnit rateUnit) {
this.rateUnit = rateUnit;
}
public TimeUnit getDurationUnit() {
return durationUnit;
}
public void setDurationUnit(TimeUnit durationUnit) {
this.durationUnit = durationUnit;
}
public String getNamePattern() {
return namePattern;
}
/**
* The name pattern to use.
* <p/>
* Uses dot as separators, but you can change that. The values <tt>##name##</tt>, <tt>##routeId##</tt>, and
* <tt>##type##</tt> will be replaced with actual value.
*/
public void setNamePattern(String namePattern) {
this.namePattern = namePattern;
}
@Override
public void onInit(Route route) {
super.onInit(route);
ManagementStrategy ms = route.getCamelContext().getManagementStrategy();
if (ms != null && ms.getManagementAgent() != null) {
registerKamelets = ms.getManagementAgent().getRegisterRoutesCreateByKamelet();
registerTemplates = ms.getManagementAgent().getRegisterRoutesCreateByTemplate();
}
this.route = route;
try {
registryService = route.getCamelContext().hasService(MetricsRegistryService.class);
if (registryService == null) {
registryService = new MetricsRegistryService();
registryService.setMetricsRegistry(getMetricsRegistry());
registryService.setUseJmx(isUseJmx());
registryService.setJmxDomain(getJmxDomain());
registryService.setPrettyPrint(isPrettyPrint());
registryService.setRateUnit(getRateUnit());
registryService.setDurationUnit(getDurationUnit());
route.getCamelContext().addService(registryService);
}
} catch (Exception e) {
throw RuntimeCamelException.wrapRuntimeCamelException(e);
}
// skip routes that should not be included
boolean skip = (route.isCreatedByKamelet() && !registerKamelets)
|| (route.isCreatedByRouteTemplate() && !registerTemplates);
if (!skip) {
// create statistics holder
// for know we record only all the timings of a complete exchange (responses)
// we have in-flight / total statistics already from camel-core
Timer responses = registryService.getMetricsRegistry().timer(createName("responses"));
statistics = new MetricsStatistics(route, responses);
}
}
private String createName(String type) {
CamelContext context = route.getCamelContext();
String name = context.getManagementName() != null ? context.getManagementName() : context.getName();
String answer = namePattern;
answer = answer.replaceFirst(NAME_TOKEN, name);
answer = answer.replaceFirst(ROUTE_ID_TOKEN, Matcher.quoteReplacement(route.getId()));
answer = answer.replaceFirst(TYPE_TOKEN, type);
return answer;
}
@Override
public void onExchangeBegin(Route route, Exchange exchange) {
if (statistics != null) {
statistics.onExchangeBegin(exchange);
}
}
@Override
public void onExchangeDone(Route route, Exchange exchange) {
if (statistics != null) {
statistics.onExchangeDone(exchange);
}
}
}
| MetricsStatistics |
java | assertj__assertj-core | assertj-core/src/main/java/org/assertj/core/internal/Uris.java | {
"start": 1981,
"end": 7676
} | class ____ {
private static final String UTF_8 = "UTF-8";
private static final String EQUAL = "=";
private static final String AND = "&";
private static final Uris INSTANCE = new Uris();
private final Failures failures = Failures.instance();
public static Uris instance() {
return INSTANCE;
}
Uris() {}
public void assertHasScheme(final AssertionInfo info, final URI actual, final String scheme) {
assertNotNull(info, actual);
if (!Objects.equals(actual.getScheme(), scheme)) throw failures.failure(info, shouldHaveScheme(actual, scheme));
}
public void assertHasPath(AssertionInfo info, URI actual, String path) {
assertNotNull(info, actual);
if (!Objects.equals(actual.getPath(), path)) throw failures.failure(info, shouldHavePath(actual, path));
}
public void assertHasPort(AssertionInfo info, URI actual, Integer expected) {
assertNotNull(info, actual);
if (actual.getPort() != expected) throw failures.failure(info, shouldHavePort(actual, expected));
}
public void assertHasHost(AssertionInfo info, URI actual, String expected) {
assertNotNull(info, actual);
requireNonNull(expected, "The expected host should not be null");
if (!Objects.equals(actual.getHost(), expected)) throw failures.failure(info, shouldHaveHost(actual, expected));
}
public void assertHasNoHost(AssertionInfo info, URI actual) {
assertNotNull(info, actual);
if (actual.getHost() != null) throw failures.failure(info, shouldHaveNoHost(actual));
}
public void assertHasAuthority(AssertionInfo info, URI actual, String expected) {
assertNotNull(info, actual);
if (!Objects.equals(actual.getAuthority(), expected))
throw failures.failure(info, shouldHaveAuthority(actual, expected));
}
public void assertHasFragment(AssertionInfo info, URI actual, String expected) {
assertNotNull(info, actual);
if (!Objects.equals(actual.getFragment(), expected)) throw failures.failure(info, shouldHaveFragment(actual, expected));
}
public void assertHasQuery(AssertionInfo info, URI actual, String expected) {
assertNotNull(info, actual);
if (!Objects.equals(actual.getQuery(), expected)) throw failures.failure(info, shouldHaveQuery(actual, expected));
}
public void assertHasUserInfo(AssertionInfo info, URI actual, String expected) {
assertNotNull(info, actual);
if (!Objects.equals(actual.getUserInfo(), expected)) throw failures.failure(info, shouldHaveUserInfo(actual, expected));
}
static Map<String, List<String>> getParameters(String query) {
Map<String, List<String>> parameters = new LinkedHashMap<>();
if (query != null && !query.isEmpty()) {
for (String pair : query.split(AND)) {
int equalIndex = pair.indexOf(EQUAL);
String key = equalIndex == -1 ? pair : pair.substring(0, equalIndex);
String value = equalIndex == -1 ? null : pair.substring(equalIndex + 1);
try {
key = URLDecoder.decode(key, UTF_8);
} catch (UnsupportedEncodingException ex) {
// UTF-8 is missing? Allow the key to remain encoded (no reasonable alternative).
}
if (value != null) {
try {
value = URLDecoder.decode(value, UTF_8);
} catch (UnsupportedEncodingException ex) {
// UTF-8 is missing? Allow the value to remain encoded (no reasonable alternative).
}
}
if (!parameters.containsKey(key)) {
parameters.put(key, new ArrayList<>());
}
parameters.get(key).add(value);
}
}
return parameters;
}
public void assertHasParameter(AssertionInfo info, URI actual, String name) {
assertNotNull(info, actual);
Map<String, List<String>> parameters = getParameters(actual.getRawQuery());
if (!parameters.containsKey(name)) throw failures.failure(info, shouldHaveParameter(actual, name));
}
public void assertHasParameter(AssertionInfo info, URI actual, String expectedParameterName,
String expectedParameterValue) {
assertNotNull(info, actual);
Map<String, List<String>> parameters = getParameters(actual.getRawQuery());
if (!parameters.containsKey(expectedParameterName))
throw failures.failure(info, shouldHaveParameter(actual, expectedParameterName, expectedParameterValue));
List<String> values = parameters.get(expectedParameterName);
if (!values.contains(expectedParameterValue))
throw failures.failure(info, shouldHaveParameter(actual, expectedParameterName, expectedParameterValue, values));
}
public void assertHasNoParameters(AssertionInfo info, URI actual) {
assertNotNull(info, actual);
Map<String, List<String>> parameters = getParameters(actual.getRawQuery());
if (!parameters.isEmpty()) throw failures.failure(info, shouldHaveNoParameters(actual, parameters.keySet()));
}
public void assertHasNoParameter(AssertionInfo info, URI actual, String name) {
assertNotNull(info, actual);
Map<String, List<String>> parameters = getParameters(actual.getRawQuery());
if (parameters.containsKey(name))
throw failures.failure(info, shouldHaveNoParameter(actual, name, parameters.get(name)));
}
public void assertHasNoParameter(AssertionInfo info, URI actual, String name, String unwantedValue) {
assertNotNull(info, actual);
Map<String, List<String>> parameters = getParameters(actual.getRawQuery());
if (parameters.containsKey(name)) {
List<String> values = parameters.get(name);
if (values.contains(unwantedValue))
throw failures.failure(info, shouldHaveNoParameter(actual, name, unwantedValue, values));
}
}
}
| Uris |
java | apache__logging-log4j2 | log4j-core-test/src/test/java/org/apache/logging/log4j/core/layout/Rfc5424LayoutTest.java | {
"start": 2988,
"end": 29754
} | class ____ {
LoggerContext ctx = LoggerContext.getContext();
Logger root = ctx.getRootLogger();
private static final String PROCESSID = ProcessIdUtil.getProcessId();
private static final String line1 =
String.format("ATM %s - [RequestContext@3692 loginId=\"JohnDoe\"] starting mdc pattern test", PROCESSID);
private static final String line2 =
String.format("ATM %s - [RequestContext@3692 loginId=\"JohnDoe\"] empty mdc", PROCESSID);
private static final String line3 =
String.format("ATM %s - [RequestContext@3692 loginId=\"JohnDoe\"] filled mdc", PROCESSID);
private static final String line4 = String.format(
"ATM %s Audit [Transfer@18060 Amount=\"200.00\" FromAccount=\"123457\" ToAccount=\"123456\"]"
+ "[RequestContext@3692 ipAddress=\"192.168.0.120\" loginId=\"JohnDoe\"] Transfer Complete",
PROCESSID);
private static final String lineEscaped3 = String.format(
"ATM %s - [RequestContext@3692 escaped=\"Testing escaping #012 \\\" \\] \\\"\" loginId=\"JohnDoe\"] filled mdc",
PROCESSID);
private static final String lineEscaped4 = String.format(
"ATM %s Audit [Transfer@18060 Amount=\"200.00\" FromAccount=\"123457\" ToAccount=\"123456\"]"
+ "[RequestContext@3692 escaped=\"Testing escaping #012 \\\" \\] \\\"\" ipAddress=\"192.168.0.120\" loginId=\"JohnDoe\"] Transfer Complete",
PROCESSID);
private static final String collectionLine1 =
"[Transfer@18060 Amount=\"200.00\" FromAccount=\"123457\" " + "ToAccount=\"123456\"]";
private static final String collectionLine2 = "[Extra@18060 Item1=\"Hello\" Item2=\"World\"]";
private static final String collectionLine3 =
"[RequestContext@3692 ipAddress=\"192.168.0.120\" loginId=\"JohnDoe\"]";
private static final String collectionEndOfLine = "Transfer Complete";
static ConfigurationFactory cf = new BasicConfigurationFactory();
@BeforeAll
static void setupClass() {
StatusLogger.getLogger().setLevel(Level.OFF);
ConfigurationFactory.setConfigurationFactory(cf);
final LoggerContext ctx = LoggerContext.getContext();
ctx.reconfigure();
}
@AfterAll
static void cleanupClass() {
ConfigurationFactory.removeConfigurationFactory(cf);
}
/**
* Test case for MDC conversion pattern.
*/
@Test
void testLayout() {
for (final Appender appender : root.getAppenders().values()) {
root.removeAppender(appender);
}
// set up appender
final AbstractStringLayout layout = Rfc5424Layout.createLayout(
Facility.LOCAL0,
"Event",
3692,
true,
"RequestContext",
null,
null,
true,
null,
"ATM",
null,
"key1, key2, locale",
null,
"loginId",
null,
true,
null,
null);
final ListAppender appender = new ListAppender("List", null, layout, true, false);
appender.start();
// set appender on root and set level to debug
root.addAppender(appender);
root.setLevel(Level.DEBUG);
ThreadContext.put("loginId", "JohnDoe");
// output starting message
root.debug("starting mdc pattern test");
root.debug("empty mdc");
ThreadContext.put("key1", "value1");
ThreadContext.put("key2", "value2");
root.debug("filled mdc");
ThreadContext.put("ipAddress", "192.168.0.120");
ThreadContext.put("locale", Locale.US.getDisplayName());
try {
final StructuredDataMessage msg = new StructuredDataMessage("Transfer@18060", "Transfer Complete", "Audit");
msg.put("ToAccount", "123456");
msg.put("FromAccount", "123457");
msg.put("Amount", "200.00");
root.info(MarkerManager.getMarker("EVENT"), msg);
List<String> list = appender.getMessages();
assertTrue(list.get(0).endsWith(line1), "Expected line 1 to end with: " + line1 + " Actual " + list.get(0));
assertTrue(list.get(1).endsWith(line2), "Expected line 2 to end with: " + line2 + " Actual " + list.get(1));
assertTrue(list.get(2).endsWith(line3), "Expected line 3 to end with: " + line3 + " Actual " + list.get(2));
assertTrue(list.get(3).endsWith(line4), "Expected line 4 to end with: " + line4 + " Actual " + list.get(3));
for (final String frame : list) {
int length = -1;
final int frameLength = frame.length();
final int firstSpacePosition = frame.indexOf(' ');
final String messageLength = frame.substring(0, firstSpacePosition);
try {
length = Integers.parseInt(messageLength);
// the ListAppender removes the ending newline, so we expect one less size
assertEquals(frameLength, messageLength.length() + length);
} catch (final NumberFormatException e) {
fail("Not a valid RFC 5425 frame");
}
}
appender.clear();
ThreadContext.remove("loginId");
root.debug("This is a test");
list = appender.getMessages();
assertTrue(list.isEmpty(), "No messages expected, found " + list.size());
} finally {
root.removeAppender(appender);
appender.stop();
}
}
/**
* Test case for MDC conversion pattern.
*/
@Test
void testCollection() {
for (final Appender appender : root.getAppenders().values()) {
root.removeAppender(appender);
}
// set up appender
final AbstractStringLayout layout = Rfc5424Layout.createLayout(
Facility.LOCAL0,
"Event",
3692,
true,
"RequestContext",
null,
null,
true,
null,
"ATM",
null,
"key1, key2, locale",
null,
"loginId",
null,
true,
null,
null);
final ListAppender appender = new ListAppender("List", null, layout, true, false);
appender.start();
// set appender on root and set level to debug
root.addAppender(appender);
root.setLevel(Level.DEBUG);
ThreadContext.put("loginId", "JohnDoe");
ThreadContext.put("ipAddress", "192.168.0.120");
ThreadContext.put("locale", Locale.US.getDisplayName());
try {
final StructuredDataMessage msg = new StructuredDataMessage("Transfer@18060", "Transfer Complete", "Audit");
msg.put("ToAccount", "123456");
msg.put("FromAccount", "123457");
msg.put("Amount", "200.00");
final StructuredDataMessage msg2 = new StructuredDataMessage("Extra@18060", null, "Audit");
msg2.put("Item1", "Hello");
msg2.put("Item2", "World");
final List<StructuredDataMessage> messages = new ArrayList<>();
messages.add(msg);
messages.add(msg2);
final StructuredDataCollectionMessage collectionMessage = new StructuredDataCollectionMessage(messages);
root.info(MarkerManager.getMarker("EVENT"), collectionMessage);
final List<String> list = appender.getMessages();
final String result = list.get(0);
assertTrue(
result.contains(collectionLine1),
"Expected line to contain " + collectionLine1 + ", Actual " + result);
assertTrue(
result.contains(collectionLine2),
"Expected line to contain " + collectionLine2 + ", Actual " + result);
assertTrue(
result.contains(collectionLine3),
"Expected line to contain " + collectionLine3 + ", Actual " + result);
assertTrue(
result.endsWith(collectionEndOfLine),
"Expected line to end with: " + collectionEndOfLine + " Actual " + result);
for (final String frame : list) {
int length = -1;
final int frameLength = frame.length();
final int firstSpacePosition = frame.indexOf(' ');
final String messageLength = frame.substring(0, firstSpacePosition);
try {
length = Integers.parseInt(messageLength);
// the ListAppender removes the ending newline, so we expect one less size
assertEquals(frameLength, messageLength.length() + length);
} catch (final NumberFormatException e) {
fail("Not a valid RFC 5425 frame");
}
}
appender.clear();
} finally {
root.removeAppender(appender);
appender.stop();
}
}
/**
* Test case for escaping newlines and other SD PARAM-NAME special characters.
*/
@Test
void testEscape() {
for (final Appender appender : root.getAppenders().values()) {
root.removeAppender(appender);
}
// set up layout/appender
final AbstractStringLayout layout = Rfc5424Layout.createLayout(
Facility.LOCAL0,
"Event",
3692,
true,
"RequestContext",
null,
null,
true,
"#012",
"ATM",
null,
"key1, key2, locale",
null,
"loginId",
null,
true,
null,
null);
final ListAppender appender = new ListAppender("List", null, layout, true, false);
appender.start();
// set appender on root and set level to debug
root.addAppender(appender);
root.setLevel(Level.DEBUG);
ThreadContext.put("loginId", "JohnDoe");
// output starting message
root.debug("starting mdc pattern test");
root.debug("empty mdc");
ThreadContext.put("escaped", "Testing escaping \n \" ] \"");
root.debug("filled mdc");
ThreadContext.put("ipAddress", "192.168.0.120");
ThreadContext.put("locale", Locale.US.getDisplayName());
try {
final StructuredDataMessage msg = new StructuredDataMessage("Transfer@18060", "Transfer Complete", "Audit");
msg.put("ToAccount", "123456");
msg.put("FromAccount", "123457");
msg.put("Amount", "200.00");
root.info(MarkerManager.getMarker("EVENT"), msg);
List<String> list = appender.getMessages();
assertTrue(list.get(0).endsWith(line1), "Expected line 1 to end with: " + line1 + " Actual " + list.get(0));
assertTrue(list.get(1).endsWith(line2), "Expected line 2 to end with: " + line2 + " Actual " + list.get(1));
assertTrue(
list.get(2).endsWith(lineEscaped3),
"Expected line 3 to end with: " + lineEscaped3 + " Actual " + list.get(2));
assertTrue(
list.get(3).endsWith(lineEscaped4),
"Expected line 4 to end with: " + lineEscaped4 + " Actual " + list.get(3));
appender.clear();
ThreadContext.remove("loginId");
root.debug("This is a test");
list = appender.getMessages();
assertTrue(list.isEmpty(), "No messages expected, found " + list.size());
} finally {
root.removeAppender(appender);
appender.stop();
}
}
/**
* Test case for MDC exception conversion pattern.
*/
@Test
void testException() {
for (final Appender appender : root.getAppenders().values()) {
root.removeAppender(appender);
}
// set up layout/appender
final AbstractStringLayout layout = Rfc5424Layout.createLayout(
Facility.LOCAL0,
"Event",
3692,
true,
"RequestContext",
null,
null,
true,
null,
"ATM",
null,
"key1, key2, locale",
null,
"loginId",
"%xEx",
true,
null,
null);
final ListAppender appender = new ListAppender("List", null, layout, true, false);
appender.start();
// set appender on root and set level to debug
root.addAppender(appender);
root.setLevel(Level.DEBUG);
ThreadContext.put("loginId", "JohnDoe");
// output starting message
root.debug("starting mdc pattern test", new IllegalArgumentException("Test"));
try {
final List<String> list = appender.getMessages();
assertTrue(list.size() > 1, "Not enough list entries");
final String string = list.get(1);
assertTrue(string.contains("IllegalArgumentException"), "No Exception in " + string);
appender.clear();
} finally {
root.removeAppender(appender);
appender.stop();
}
}
/**
* Test case for MDC logger field inclusion.
*/
@Test
void testMDCLoggerFields() {
for (final Appender appender : root.getAppenders().values()) {
root.removeAppender(appender);
}
final LoggerFields[] loggerFields = new LoggerFields[] {
LoggerFields.createLoggerFields(
new KeyValuePair[] {new KeyValuePair("source", "%C.%M")}, null, null, false),
LoggerFields.createLoggerFields(
new KeyValuePair[] {new KeyValuePair("source2", "%C.%M")}, null, null, false)
};
// set up layout/appender
final AbstractStringLayout layout = Rfc5424Layout.createLayout(
Facility.LOCAL0,
"Event",
3692,
true,
"RequestContext",
null,
null,
true,
null,
"ATM",
null,
"key1, key2, locale",
null,
null,
null,
true,
loggerFields,
null);
final ListAppender appender = new ListAppender("List", null, layout, true, false);
appender.start();
// set appender on root and set level to debug
root.addAppender(appender);
root.setLevel(Level.DEBUG);
// output starting message
root.info("starting logger fields test");
try {
final List<String> list = appender.getMessages();
assertTrue(!list.isEmpty(), "Not enough list entries");
assertTrue(list.get(0).contains("Rfc5424LayoutTest.testMDCLoggerFields"), "No class/method");
appender.clear();
} finally {
root.removeAppender(appender);
appender.stop();
}
}
@Test
void testLoggerFields() {
final String[] fields = new String[] {
"[BAZ@32473 baz=\"org.apache.logging.log4j.core.layout.Rfc5424LayoutTest.testLoggerFields\"]",
"[RequestContext@3692 bar=\"org.apache.logging.log4j.core.layout.Rfc5424LayoutTest.testLoggerFields\"]",
"[SD-ID@32473 source=\"org.apache.logging.log4j.core.layout.Rfc5424LayoutTest.testLoggerFields\"]"
};
final List<String> expectedToContain = Arrays.asList(fields);
for (final Appender appender : root.getAppenders().values()) {
root.removeAppender(appender);
}
final LoggerFields[] loggerFields = new LoggerFields[] {
LoggerFields.createLoggerFields(
new KeyValuePair[] {new KeyValuePair("source", "%C.%M")}, "SD-ID", "32473", false),
LoggerFields.createLoggerFields(
new KeyValuePair[] {new KeyValuePair("baz", "%C.%M"), new KeyValuePair("baz", "%C.%M")},
"BAZ",
"32473",
false),
LoggerFields.createLoggerFields(new KeyValuePair[] {new KeyValuePair("bar", "%C.%M")}, null, null, false)
};
final AbstractStringLayout layout = Rfc5424Layout.createLayout(
Facility.LOCAL0,
"Event",
3692,
true,
"RequestContext",
null,
null,
true,
null,
"ATM",
null,
"key1, key2, locale",
null,
null,
null,
false,
loggerFields,
null);
final ListAppender appender = new ListAppender("List", null, layout, true, false);
appender.start();
root.addAppender(appender);
root.setLevel(Level.DEBUG);
root.info("starting logger fields test");
try {
final List<String> list = appender.getMessages();
assertTrue(!list.isEmpty(), "Not enough list entries");
final String message = list.get(0);
assertTrue(message.contains("Rfc5424LayoutTest.testLoggerFields"), "No class/method");
for (final String value : expectedToContain) {
assertTrue(message.contains(value), "Message expected to contain " + value + " but did not");
}
appender.clear();
} finally {
root.removeAppender(appender);
appender.stop();
}
}
@Test
void testDiscardEmptyLoggerFields() {
final String mdcId = "RequestContext";
for (final Appender appender : root.getAppenders().values()) {
root.removeAppender(appender);
}
final LoggerFields[] loggerFields = new LoggerFields[] {
LoggerFields.createLoggerFields(
new KeyValuePair[] {
new KeyValuePair("dummy", Strings.EMPTY), new KeyValuePair("empty", Strings.EMPTY)
},
"SD-ID",
"32473",
true),
LoggerFields.createLoggerFields(
new KeyValuePair[] {new KeyValuePair("baz", "%C.%M"), new KeyValuePair("baz", "%C.%M")},
"BAZ",
"32473",
false),
LoggerFields.createLoggerFields(new KeyValuePair[] {new KeyValuePair("bar", "%C.%M")}, null, null, false)
};
final AbstractStringLayout layout = Rfc5424Layout.createLayout(
Facility.LOCAL0,
"Event",
3692,
true,
mdcId,
null,
null,
true,
null,
"ATM",
null,
"key1, key2, locale",
null,
null,
null,
false,
loggerFields,
null);
final ListAppender appender = new ListAppender("List", null, layout, true, false);
appender.start();
root.addAppender(appender);
root.setLevel(Level.DEBUG);
root.info("starting logger fields test");
try {
final List<String> list = appender.getMessages();
assertTrue(!list.isEmpty(), "Not enough list entries");
final String message = list.get(0);
assertFalse(message.contains("SD-ID"), "SD-ID should have been discarded");
assertTrue(message.contains("BAZ"), "BAZ should have been included");
assertTrue(message.contains(mdcId), mdcId + "should have been included");
appender.clear();
} finally {
root.removeAppender(appender);
appender.stop();
}
}
@Test
void testSubstituteStructuredData() {
final String mdcId = "RequestContext";
final String expectedToContain = String.format("ATM %s MSG-ID - Message", PROCESSID);
for (final Appender appender : root.getAppenders().values()) {
root.removeAppender(appender);
}
final AbstractStringLayout layout = Rfc5424Layout.createLayout(
Facility.LOCAL0,
"Event",
3692,
false,
mdcId,
null,
null,
true,
null,
"ATM",
"MSG-ID",
"key1, key2, locale",
null,
null,
null,
false,
null,
null);
final ListAppender appender = new ListAppender("List", null, layout, true, false);
appender.start();
root.addAppender(appender);
root.setLevel(Level.DEBUG);
root.info("Message");
try {
final List<String> list = appender.getMessages();
assertTrue(!list.isEmpty(), "Not enough list entries");
final String message = list.get(0);
assertTrue(message.contains(expectedToContain), "Not the expected message received");
appender.clear();
} finally {
root.removeAppender(appender);
appender.stop();
}
}
@Test
void testParameterizedMessage() {
for (final Appender appender : root.getAppenders().values()) {
root.removeAppender(appender);
}
// set up appender
final AbstractStringLayout layout = Rfc5424Layout.createLayout(
Facility.LOCAL0,
"Event",
3692,
true,
"RequestContext",
null,
null,
true,
null,
"ATM",
null,
"key1, key2, locale",
null,
null,
null,
true,
null,
null);
final ListAppender appender = new ListAppender("List", null, layout, true, false);
appender.start();
// set appender on root and set level to debug
root.addAppender(appender);
root.setLevel(Level.DEBUG);
root.info("Hello {}", "World");
try {
final List<String> list = appender.getMessages();
assertTrue(!list.isEmpty(), "Not enough list entries");
final String message = list.get(0);
assertTrue(
message.contains("Hello World"), "Incorrect message. Expected - Hello World, Actual - " + message);
} finally {
root.removeAppender(appender);
appender.stop();
}
}
@Test
void testLayoutBuilder() {
for (final Appender appender : root.getAppenders().values()) {
root.removeAppender(appender);
}
final AbstractStringLayout layout = new Rfc5424Layout.Rfc5424LayoutBuilder()
.setFacility(Facility.LOCAL0)
.setId("Event")
.setEin("1234.56.7")
.setIncludeMDC(true)
.setMdcId("RequestContext")
.setIncludeNL(true)
.setAppName("ATM")
.setExcludes("key1, key2, locale")
.setUseTLSMessageFormat(true)
.build();
final ListAppender appender = new ListAppender("List", null, layout, true, false);
appender.start();
root.addAppender(appender);
root.setLevel(Level.DEBUG);
root.info("Hello {}", "World");
try {
final List<String> list = appender.getMessages();
assertTrue(!list.isEmpty(), "Not enough list entries");
final String message = list.get(0);
assertTrue(
message.contains("Hello World"), "Incorrect message. Expected - Hello World, Actual - " + message);
} finally {
root.removeAppender(appender);
appender.stop();
}
}
@Test
void testLayoutBuilderDefaultValues() {
final Rfc5424Layout layout = new Rfc5424Layout.Rfc5424LayoutBuilder().build();
checkDefaultValues(layout);
final PluginManager manager = new PluginManager(Node.CATEGORY);
manager.collectPlugins();
final Object obj = new PluginBuilder(manager.getPluginType("Rfc5424Layout"))
.withConfigurationNode(new Node())
.withConfiguration(new DefaultConfiguration())
.build();
assertInstanceOf(Rfc5424Layout.class, obj);
checkDefaultValues((Rfc5424Layout) obj);
}
private void checkDefaultValues(final Rfc5424Layout layout) {
assertNotNull(layout);
assertEquals(Facility.LOCAL0, layout.getFacility());
assertEquals(String.valueOf(Rfc5424Layout.DEFAULT_ENTERPRISE_NUMBER), layout.getEnterpriseNumber());
assertTrue(layout.isIncludeMdc());
assertEquals(Rfc5424Layout.DEFAULT_MDCID, layout.getMdcId());
assertEquals(Rfc5424Layout.DEFAULT_ID, layout.getDefaultId());
}
@ParameterizedTest
@ValueSource(strings = {"123456789", "0", "2147483647", "123.45.6.78.9", "0.0.0.0.0.0.0.0.0.0.0.0.0.0"})
void testLayoutBuilderValidEids(final String eid) {
final AbstractStringLayout layout =
new Rfc5424Layout.Rfc5424LayoutBuilder().setEin(eid).build();
assertNotNull(layout);
}
@ParameterizedTest
@ValueSource(strings = {"abc", "someEid", "-1"})
void testLayoutBuilderInvalidEids(final String eid) {
final AbstractStringLayout layout =
new Rfc5424Layout.Rfc5424LayoutBuilder().setEin(eid).build();
assertNull(layout);
}
@Test
void testFQDN() throws UnknownHostException {
final String fqdn = InetAddress.getLocalHost().getCanonicalHostName();
final Rfc5424Layout layout = Rfc5424Layout.newBuilder().build();
assertThat(layout.getLocalHostName()).isEqualTo(fqdn);
}
}
| Rfc5424LayoutTest |
java | elastic__elasticsearch | x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/oidc/OpenIdConnectPrepareAuthenticationRequestTests.java | {
"start": 757,
"end": 3831
} | class ____ extends ESTestCase {
public void testSerialization() throws IOException {
final OpenIdConnectPrepareAuthenticationRequest request = new OpenIdConnectPrepareAuthenticationRequest();
request.setRealmName("oidc-realm1");
final BytesStreamOutput out = new BytesStreamOutput();
request.writeTo(out);
final OpenIdConnectPrepareAuthenticationRequest deserialized = new OpenIdConnectPrepareAuthenticationRequest(
out.bytes().streamInput()
);
assertThat(deserialized.getRealmName(), equalTo("oidc-realm1"));
final OpenIdConnectPrepareAuthenticationRequest request2 = new OpenIdConnectPrepareAuthenticationRequest();
request2.setIssuer("https://op.company.org/");
final BytesStreamOutput out2 = new BytesStreamOutput();
request2.writeTo(out2);
final OpenIdConnectPrepareAuthenticationRequest deserialized2 = new OpenIdConnectPrepareAuthenticationRequest(
out2.bytes().streamInput()
);
assertThat(deserialized2.getIssuer(), equalTo("https://op.company.org/"));
}
public void testSerializationWithStateAndNonce() throws IOException {
final OpenIdConnectPrepareAuthenticationRequest request = new OpenIdConnectPrepareAuthenticationRequest();
final String nonce = randomAlphaOfLengthBetween(8, 12);
final String state = randomAlphaOfLengthBetween(8, 12);
request.setRealmName("oidc-realm1");
request.setNonce(nonce);
request.setState(state);
final BytesStreamOutput out = new BytesStreamOutput();
request.writeTo(out);
final OpenIdConnectPrepareAuthenticationRequest deserialized = new OpenIdConnectPrepareAuthenticationRequest(
out.bytes().streamInput()
);
assertThat(deserialized.getRealmName(), equalTo("oidc-realm1"));
assertThat(deserialized.getState(), equalTo(state));
assertThat(deserialized.getNonce(), equalTo(nonce));
}
public void testValidation() {
final OpenIdConnectPrepareAuthenticationRequest request = new OpenIdConnectPrepareAuthenticationRequest();
final ActionRequestValidationException validation = request.validate();
assertNotNull(validation);
assertThat(validation.validationErrors(), hasSize(1));
assertThat(validation.validationErrors().get(0), containsString("one of [realm, issuer] must be provided"));
final OpenIdConnectPrepareAuthenticationRequest request2 = new OpenIdConnectPrepareAuthenticationRequest();
request2.setRealmName("oidc-realm1");
request2.setIssuer("https://op.company.org/");
final ActionRequestValidationException validation2 = request2.validate();
assertNotNull(validation2);
assertThat(validation2.validationErrors(), hasSize(1));
assertThat(
validation2.validationErrors().get(0),
containsString("only one of [realm, issuer] can be provided in the same request")
);
}
}
| OpenIdConnectPrepareAuthenticationRequestTests |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/derivedidentities/e5/c/Person.java | {
"start": 322,
"end": 373
} | class ____ {
@Id @GeneratedValue
Integer id;
}
| Person |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/plugins/ReloadablePlugin.java | {
"start": 805,
"end": 934
} | interface ____ order to reread the values of {@code SecureSetting}s and
* then rebuild any dependent internal members.
*/
public | in |
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/common/operators/DualInputOperator.java | {
"start": 2442,
"end": 7067
} | class ____ the user function.
* @param name The given name for the operator, used in plans, logs and progress messages.
*/
protected DualInputOperator(
UserCodeWrapper<FT> stub,
BinaryOperatorInformation<IN1, IN2, OUT> operatorInfo,
String name) {
super(stub, operatorInfo, name);
this.keyFields1 = this.keyFields2 = new int[0];
}
/**
* Creates a new abstract dual-input operator with the given name wrapping the given user
* function. This constructor is specialized only for operator that require no keys for their
* processing.
*
* @param stub The object containing the user function.
* @param keyPositions1 The positions of the fields in the first input that act as keys.
* @param keyPositions2 The positions of the fields in the second input that act as keys.
* @param name The given name for the operator, used in plans, logs and progress messages.
*/
protected DualInputOperator(
UserCodeWrapper<FT> stub,
BinaryOperatorInformation<IN1, IN2, OUT> operatorInfo,
int[] keyPositions1,
int[] keyPositions2,
String name) {
super(stub, operatorInfo, name);
this.keyFields1 = keyPositions1;
this.keyFields2 = keyPositions2;
}
// --------------------------------------------------------------------------------------------
/** Gets the information about the operators input/output types. */
@Override
@SuppressWarnings("unchecked")
public BinaryOperatorInformation<IN1, IN2, OUT> getOperatorInfo() {
return (BinaryOperatorInformation<IN1, IN2, OUT>) this.operatorInfo;
}
/**
* Returns the first input, or null, if none is set.
*
* @return The contract's first input.
*/
public Operator<IN1> getFirstInput() {
return this.input1;
}
/**
* Returns the second input, or null, if none is set.
*
* @return The contract's second input.
*/
public Operator<IN2> getSecondInput() {
return this.input2;
}
/** Clears this operator's first input. */
public void clearFirstInput() {
this.input1 = null;
}
/** Clears this operator's second input. */
public void clearSecondInput() {
this.input2 = null;
}
/**
* Clears all previous connections and connects the first input to the task wrapped in this
* contract
*
* @param input The contract that is connected as the first input.
*/
public void setFirstInput(Operator<IN1> input) {
this.input1 = input;
}
/**
* Clears all previous connections and connects the second input to the task wrapped in this
* contract
*
* @param input The contract that is connected as the second input.
*/
public void setSecondInput(Operator<IN2> input) {
this.input2 = input;
}
// --------------------------------------------------------------------------------------------
public DualInputSemanticProperties getSemanticProperties() {
return this.semanticProperties;
}
public void setSemanticProperties(DualInputSemanticProperties semanticProperties) {
this.semanticProperties = semanticProperties;
}
// --------------------------------------------------------------------------------------------
@Override
public final int getNumberOfInputs() {
return 2;
}
@Override
public int[] getKeyColumns(int inputNum) {
if (inputNum == 0) {
return this.keyFields1;
} else if (inputNum == 1) {
return this.keyFields2;
} else {
throw new IndexOutOfBoundsException();
}
}
// --------------------------------------------------------------------------------------------
@Override
public void accept(Visitor<Operator<?>> visitor) {
boolean descend = visitor.preVisit(this);
if (descend) {
this.input1.accept(visitor);
this.input2.accept(visitor);
for (Operator<?> c : this.broadcastInputs.values()) {
c.accept(visitor);
}
visitor.postVisit(this);
}
}
// --------------------------------------------------------------------------------------------
protected abstract List<OUT> executeOnCollections(
List<IN1> inputData1,
List<IN2> inputData2,
RuntimeContext runtimeContext,
ExecutionConfig executionConfig)
throws Exception;
}
| containing |
java | alibaba__nacos | common/src/main/java/com/alibaba/nacos/common/packagescan/classreading/ClassReader.java | {
"start": 20017,
"end": 20964
} | class ____ or adapters.</i>
*
* @param constantPoolEntryIndex the index a constant pool entry in the class's constant pool
* table.
* @return the start offset in this {@link ClassReader} of the corresponding JVMS 'cp_info'
* structure, plus one.
*/
public int getItem(final int constantPoolEntryIndex) {
return cpInfoOffsets[constantPoolEntryIndex];
}
/**
* Returns a conservative estimate of the maximum length of the strings contained in the class's
* constant pool table.
*
* @return a conservative estimate of the maximum length of the strings contained in the class's
* constant pool table.
*/
public int getMaxStringLength() {
return maxStringLength;
}
/**
* Reads a byte value in this {@link ClassReader}. <i>This method is intended for
* Attribute sub classes, and is normally not needed by | generators |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/internal/operators/flowable/FlowableRefCountTest.java | {
"start": 25710,
"end": 28130
} | class ____ extends Exception {
private static final long serialVersionUID = -6763898015338136119L;
public final Object data;
ExceptionData(Object data) {
this.data = data;
}
}
static final int GC_SLEEP_TIME = 250;
@Test
public void publishNoLeak() throws Exception {
System.gc();
Thread.sleep(GC_SLEEP_TIME);
source = Flowable.fromCallable(new Callable<Object>() {
@Override
public Object call() throws Exception {
throw new ExceptionData(new byte[100 * 1000 * 1000]);
}
})
.publish()
.refCount();
long start = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage().getUsed();
source.subscribe(Functions.emptyConsumer(), Functions.emptyConsumer());
long after = TestHelper.awaitGC(GC_SLEEP_TIME, 20, start + 20 * 1000 * 1000);
source = null;
assertTrue(String.format("%,3d -> %,3d%n", start, after), start + 20 * 1000 * 1000 > after);
}
@Test
public void publishNoLeak2() throws Exception {
System.gc();
Thread.sleep(GC_SLEEP_TIME);
long start = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage().getUsed();
source = Flowable.fromCallable(new Callable<Object>() {
@Override
public Object call() throws Exception {
return new byte[100 * 1000 * 1000];
}
}).concatWith(Flowable.never())
.publish()
.refCount();
TestSubscriber<Object> d1 = source.test();
TestSubscriber<Object> d2 = source.test();
d1.cancel();
d2.cancel();
d1 = null;
d2 = null;
long after = TestHelper.awaitGC(GC_SLEEP_TIME, 20, start + 20 * 1000 * 1000);
source = null;
assertTrue(String.format("%,3d -> %,3d%n", start, after), start + 20 * 1000 * 1000 > after);
}
@Test
public void replayIsUnsubscribed() {
ConnectableFlowable<Integer> cf = Flowable.just(1)
.replay();
if (cf instanceof Disposable) {
assertTrue(((Disposable)cf).isDisposed());
Disposable connection = cf.connect();
assertFalse(((Disposable)cf).isDisposed());
connection.dispose();
assertTrue(((Disposable)cf).isDisposed());
}
}
static final | ExceptionData |
java | micronaut-projects__micronaut-core | inject/src/main/java/io/micronaut/context/exceptions/NoSuchBeanException.java | {
"start": 901,
"end": 3337
} | class ____ extends BeanContextException {
private static final String MESSAGE_PREFIX = "No bean of type [";
private static final String MESSAGE_SUFFIX = "] exists.";
private static final String MESSAGE_EXISTS = "] exists";
private static final String MESSAGE_FOR_THE_GIVEN_QUALIFIER = " for the given qualifier: ";
/**
* @param beanType The bean type
*/
public NoSuchBeanException(@NonNull Class<?> beanType) {
super(MESSAGE_PREFIX + beanType.getName() + MESSAGE_SUFFIX + additionalMessage());
}
/**
* @param beanType The bean type
*/
public NoSuchBeanException(@NonNull Argument<?> beanType) {
super(MESSAGE_PREFIX + beanType.getTypeName() + MESSAGE_SUFFIX + additionalMessage());
}
/**
* @param beanType The bean type
* @param qualifier The qualifier
* @param <T> The type
*/
public <T> NoSuchBeanException(@NonNull Class<T> beanType, @Nullable Qualifier<T> qualifier) {
super(MESSAGE_PREFIX + beanType.getName() + MESSAGE_EXISTS + (qualifier != null ? MESSAGE_FOR_THE_GIVEN_QUALIFIER + qualifier : "") + "." + additionalMessage());
}
/**
* @param beanType The bean type
* @param qualifier The qualifier
* @param <T> The type
*/
public <T> NoSuchBeanException(@NonNull Argument<T> beanType, @Nullable Qualifier<T> qualifier) {
super(MESSAGE_PREFIX + beanType.getTypeName() + MESSAGE_EXISTS + (qualifier != null ? MESSAGE_FOR_THE_GIVEN_QUALIFIER + qualifier : "") + "." + additionalMessage());
}
/**
* @param beanType The bean type
* @param qualifier The qualifier
* @param message The message
* @param <T> The type
* @since 4.0.0
*/
public <T> NoSuchBeanException(@NonNull Argument<T> beanType, @Nullable Qualifier<T> qualifier, String message) {
super(MESSAGE_PREFIX + beanType.getTypeName() + MESSAGE_EXISTS + (qualifier != null ? MESSAGE_FOR_THE_GIVEN_QUALIFIER + qualifier : "") + ". " + message);
}
/**
* @param message The message
*/
protected NoSuchBeanException(String message) {
super(message);
}
@NonNull
private static String additionalMessage() {
return " Make sure the bean is not disabled by bean requirements (enable trace logging for 'io.micronaut.context.condition' to check) and if the bean is enabled then ensure the | NoSuchBeanException |
java | spring-projects__spring-boot | module/spring-boot-devtools/src/main/java/org/springframework/boot/devtools/restart/ClassLoaderFilesResourcePatternResolver.java | {
"start": 6397,
"end": 6961
} | class ____ extends AbstractResource {
private final String name;
private DeletedClassLoaderFileResource(String name) {
this.name = name;
}
@Override
public boolean exists() {
return false;
}
@Override
public String getDescription() {
return "Deleted: " + this.name;
}
@Override
public InputStream getInputStream() throws IOException {
throw new IOException(this.name + " has been deleted");
}
}
/**
* Factory used to create the {@link ResourcePatternResolver} delegate.
*/
private static | DeletedClassLoaderFileResource |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/bugs/_1650/CPrime.java | {
"start": 198,
"end": 369
} | class ____ {
private int value;
public int getValue() {
return value;
}
public void setValue(int value) {
this.value = value;
}
}
| CPrime |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/functions/Functions.java | {
"start": 11065,
"end": 11471
} | class ____<T, U> implements Predicate<T> {
final Class<U> clazz;
ClassFilter(Class<U> clazz) {
this.clazz = clazz;
}
@Override
public boolean test(T t) {
return clazz.isInstance(t);
}
}
public static <T, U> Predicate<T> isInstanceOf(Class<U> clazz) {
return new ClassFilter<>(clazz);
}
static final | ClassFilter |
java | apache__flink | flink-libraries/flink-cep/src/test/java/org/apache/flink/cep/nfa/NFAIterativeConditionTimeContextTest.java | {
"start": 1568,
"end": 4139
} | class ____ extends TestLogger {
@Test
public void testEventTimestamp() throws Exception {
final Event event = event().withId(1).build();
final long timestamp = 3;
final Pattern<Event, ?> pattern =
Pattern.<Event>begin("start")
.where(
new IterativeCondition<Event>() {
@Override
public boolean filter(Event value, Context<Event> ctx)
throws Exception {
return ctx.timestamp() == timestamp;
}
});
final NFATestHarness testHarness = forPattern(pattern).build();
final List<List<Event>> resultingPattern =
testHarness.feedRecord(new StreamRecord<>(event, timestamp));
comparePatterns(
resultingPattern, Collections.singletonList(Collections.singletonList(event)));
}
@Test
public void testCurrentProcessingTime() throws Exception {
final Event event1 = event().withId(1).build();
final Event event2 = event().withId(2).build();
final Pattern<Event, ?> pattern =
Pattern.<Event>begin("start")
.where(
new IterativeCondition<Event>() {
@Override
public boolean filter(Event value, Context<Event> ctx)
throws Exception {
return ctx.currentProcessingTime() == 3;
}
});
final TestTimerService cepTimerService = new TestTimerService();
final NFATestHarness testHarness =
forPattern(pattern).withTimerService(cepTimerService).build();
cepTimerService.setCurrentProcessingTime(1);
final List<List<Event>> resultingPatterns1 =
testHarness.feedRecord(new StreamRecord<>(event1, 7));
cepTimerService.setCurrentProcessingTime(3);
final List<List<Event>> resultingPatterns2 =
testHarness.feedRecord(new StreamRecord<>(event2, 8));
comparePatterns(resultingPatterns1, Collections.emptyList());
comparePatterns(
resultingPatterns2, Collections.singletonList(Collections.singletonList(event2)));
}
}
| NFAIterativeConditionTimeContextTest |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/date/AbstractDateAssertWithOneIntArg_Test.java | {
"start": 937,
"end": 1421
} | class ____ factorize DateAssert tests with an int arg.
* <p>
* For the most part, date assertion tests are (whatever the concrete date assertion method invoked is) :
* <ul>
* <li>successful assertion test with an int</li>
* <li>checking that DateAssert instance used for assertions is returned to allow fluent assertions chaining</li>
* </ul>
*
* Subclasses are expected to define what is the invoked assertion method.
*
* @author Joel Costigliola
*
*/
public abstract | that |
java | apache__hadoop | hadoop-tools/hadoop-gridmix/src/test/java/org/apache/hadoop/mapred/gridmix/TestResourceUsageEmulators.java | {
"start": 13836,
"end": 23337
} | class ____
extends DefaultCpuUsageEmulator {
private int numCalls = 0;
private int unitUsage = 1;
private int cpuUsage = 0;
@Override
protected void performUnitComputation() {
++numCalls;
cpuUsage += unitUsage;
}
int getNumCalls() {
return numCalls;
}
int getCpuUsage() {
return cpuUsage;
}
void reset() {
numCalls = 0;
cpuUsage = 0;
}
void setUnitUsage(int unitUsage) {
this.unitUsage = unitUsage;
}
}
// Creates a ResourceUsageMetrics object from the target usage
static ResourceUsageMetrics createMetrics(long target) {
ResourceUsageMetrics metrics = new ResourceUsageMetrics();
metrics.setCumulativeCpuUsage(target);
metrics.setVirtualMemoryUsage(target);
metrics.setPhysicalMemoryUsage(target);
metrics.setHeapUsage(target);
return metrics;
}
/**
* Test {@link CumulativeCpuUsageEmulatorPlugin}.
*/
@Test
public void testCumulativeCpuUsageEmulatorPlugin() throws Exception {
Configuration conf = new Configuration();
long targetCpuUsage = 1000L;
int unitCpuUsage = 50;
// fake progress indicator
FakeProgressive fakeProgress = new FakeProgressive();
// fake cpu usage generator
FakeCpuUsageEmulatorCore fakeCore = new FakeCpuUsageEmulatorCore();
fakeCore.setUnitUsage(unitCpuUsage);
// a cumulative cpu usage emulator with fake core
CumulativeCpuUsageEmulatorPlugin cpuPlugin =
new CumulativeCpuUsageEmulatorPlugin(fakeCore);
// test with invalid or missing resource usage value
ResourceUsageMetrics invalidUsage = createMetrics(0);
cpuPlugin.initialize(conf, invalidUsage, null, null);
// test if disabled cpu emulation plugin's emulate() call is a no-operation
// this will test if the emulation plugin is disabled or not
int numCallsPre = fakeCore.getNumCalls();
long cpuUsagePre = fakeCore.getCpuUsage();
cpuPlugin.emulate();
int numCallsPost = fakeCore.getNumCalls();
long cpuUsagePost = fakeCore.getCpuUsage();
// test if no calls are made cpu usage emulator core
assertEquals(numCallsPre, numCallsPost,
"Disabled cumulative CPU usage emulation plugin works!");
// test if no calls are made cpu usage emulator core
assertEquals(cpuUsagePre, cpuUsagePost,
"Disabled cumulative CPU usage emulation plugin works!");
// test with get progress
float progress = cpuPlugin.getProgress();
assertEquals(1.0f, progress, 0f,
"Invalid progress of disabled cumulative CPU usage emulation "
+ "plugin!");
// test with valid resource usage value
ResourceUsageMetrics metrics = createMetrics(targetCpuUsage);
// fake monitor
ResourceCalculatorPlugin monitor = new FakeResourceUsageMonitor(fakeCore);
// test with default emulation interval
testEmulationAccuracy(conf, fakeCore, monitor, metrics, cpuPlugin,
targetCpuUsage, targetCpuUsage / unitCpuUsage);
// test with custom value for emulation interval of 20%
conf.setFloat(CumulativeCpuUsageEmulatorPlugin.CPU_EMULATION_PROGRESS_INTERVAL,
0.2F);
testEmulationAccuracy(conf, fakeCore, monitor, metrics, cpuPlugin,
targetCpuUsage, targetCpuUsage / unitCpuUsage);
// test if emulation interval boundary is respected (unit usage = 1)
// test the case where the current progress is less than threshold
fakeProgress = new FakeProgressive(); // initialize
fakeCore.reset();
fakeCore.setUnitUsage(1);
conf.setFloat(CumulativeCpuUsageEmulatorPlugin.CPU_EMULATION_PROGRESS_INTERVAL,
0.25F);
cpuPlugin.initialize(conf, metrics, monitor, fakeProgress);
// take a snapshot after the initialization
long initCpuUsage = monitor.getCumulativeCpuTime();
long initNumCalls = fakeCore.getNumCalls();
// test with 0 progress
testEmulationBoundary(0F, fakeCore, fakeProgress, cpuPlugin, initCpuUsage,
initNumCalls, "[no-op, 0 progress]");
// test with 24% progress
testEmulationBoundary(0.24F, fakeCore, fakeProgress, cpuPlugin,
initCpuUsage, initNumCalls, "[no-op, 24% progress]");
// test with 25% progress
// target = 1000ms, target emulation at 25% = 250ms,
// weighed target = 1000 * 0.25^4 (we are using progress^4 as the weight)
// ~ 4
// but current usage = init-usage = 100, hence expected = 100
testEmulationBoundary(0.25F, fakeCore, fakeProgress, cpuPlugin,
initCpuUsage, initNumCalls, "[op, 25% progress]");
// test with 80% progress
// target = 1000ms, target emulation at 80% = 800ms,
// weighed target = 1000 * 0.25^4 (we are using progress^4 as the weight)
// ~ 410
// current-usage = init-usage = 100, hence expected-usage = 410
testEmulationBoundary(0.80F, fakeCore, fakeProgress, cpuPlugin, 410, 410,
"[op, 80% progress]");
// now test if the final call with 100% progress ramps up the CPU usage
testEmulationBoundary(1F, fakeCore, fakeProgress, cpuPlugin, targetCpuUsage,
targetCpuUsage, "[op, 100% progress]");
// test if emulation interval boundary is respected (unit usage = 50)
// test the case where the current progress is less than threshold
fakeProgress = new FakeProgressive(); // initialize
fakeCore.reset();
fakeCore.setUnitUsage(unitCpuUsage);
conf.setFloat(CumulativeCpuUsageEmulatorPlugin.CPU_EMULATION_PROGRESS_INTERVAL,
0.40F);
cpuPlugin.initialize(conf, metrics, monitor, fakeProgress);
// take a snapshot after the initialization
initCpuUsage = monitor.getCumulativeCpuTime();
initNumCalls = fakeCore.getNumCalls();
// test with 0 progress
testEmulationBoundary(0F, fakeCore, fakeProgress, cpuPlugin, initCpuUsage,
initNumCalls, "[no-op, 0 progress]");
// test with 39% progress
testEmulationBoundary(0.39F, fakeCore, fakeProgress, cpuPlugin,
initCpuUsage, initNumCalls, "[no-op, 39% progress]");
// test with 40% progress
// target = 1000ms, target emulation at 40% = 4000ms,
// weighed target = 1000 * 0.40^4 (we are using progress^4 as the weight)
// ~ 26
// current-usage = init-usage = 100, hence expected-usage = 100
testEmulationBoundary(0.40F, fakeCore, fakeProgress, cpuPlugin,
initCpuUsage, initNumCalls, "[op, 40% progress]");
// test with 90% progress
// target = 1000ms, target emulation at 90% = 900ms,
// weighed target = 1000 * 0.90^4 (we are using progress^4 as the weight)
// ~ 657
// current-usage = init-usage = 100, hence expected-usage = 657 but
// the fake-core increases in steps of 50, hence final target = 700
testEmulationBoundary(0.90F, fakeCore, fakeProgress, cpuPlugin, 700,
700 / unitCpuUsage, "[op, 90% progress]");
// now test if the final call with 100% progress ramps up the CPU usage
testEmulationBoundary(1F, fakeCore, fakeProgress, cpuPlugin, targetCpuUsage,
targetCpuUsage / unitCpuUsage, "[op, 100% progress]");
}
// test whether the CPU usage emulator achieves the desired target using
// desired calls to the underling core engine.
private static void testEmulationAccuracy(Configuration conf,
FakeCpuUsageEmulatorCore fakeCore,
ResourceCalculatorPlugin monitor,
ResourceUsageMetrics metrics,
CumulativeCpuUsageEmulatorPlugin cpuPlugin,
long expectedTotalCpuUsage, long expectedTotalNumCalls)
throws Exception {
FakeProgressive fakeProgress = new FakeProgressive();
fakeCore.reset();
cpuPlugin.initialize(conf, metrics, monitor, fakeProgress);
int numLoops = 0;
while (fakeProgress.getProgress() < 1) {
++numLoops;
float progress = (float)numLoops / 100;
fakeProgress.setProgress(progress);
cpuPlugin.emulate();
}
// test if the resource plugin shows the expected invocations
assertEquals(expectedTotalNumCalls, fakeCore.getNumCalls(), 0L,
"Cumulative cpu usage emulator plugin failed (num calls)!");
// test if the resource plugin shows the expected usage
assertEquals(expectedTotalCpuUsage, fakeCore.getCpuUsage(), 0L,
"Cumulative cpu usage emulator plugin failed (total usage)!");
}
// tests if the CPU usage emulation plugin emulates only at the expected
// progress gaps
private static void testEmulationBoundary(float progress,
FakeCpuUsageEmulatorCore fakeCore, FakeProgressive fakeProgress,
CumulativeCpuUsageEmulatorPlugin cpuPlugin, long expectedTotalCpuUsage,
long expectedTotalNumCalls, String info) throws Exception {
fakeProgress.setProgress(progress);
cpuPlugin.emulate();
assertEquals(expectedTotalCpuUsage, fakeCore.getCpuUsage(), 0L,
"Emulation interval test for cpu usage failed " + info + "!");
assertEquals(expectedTotalNumCalls, fakeCore.getNumCalls(), 0L,
"Emulation interval test for num calls failed " + info + "!");
}
}
| FakeCpuUsageEmulatorCore |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/jsontype/TestSubtypes.java | {
"start": 3815,
"end": 3992
} | class ____ implements Factory1311 { }
// [databind#2515]
@JsonTypeInfo(use=JsonTypeInfo.Id.NAME, include=As.PROPERTY, property="#type")
static abstract | Factory1311ImplB |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestRenameWhileOpen.java | {
"start": 1337,
"end": 11942
} | class ____ {
{
DFSTestUtil.setNameNodeLogLevel(Level.TRACE);
}
//TODO: un-comment checkFullFile once the lease recovery is done
private static void checkFullFile(FileSystem fs, Path p) throws IOException {
//TestFileCreation.checkFullFile(fs, p);
}
/**
* open /user/dir1/file1 /user/dir2/file2
* mkdir /user/dir3
* move /user/dir1 /user/dir3
*/
@Test
public void testWhileOpenRenameParent() throws IOException {
Configuration conf = new HdfsConfiguration();
final int MAX_IDLE_TIME = 2000; // 2s
conf.setInt("ipc.client.connection.maxidletime", MAX_IDLE_TIME);
conf.setInt(DFSConfigKeys.DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_KEY, 1000);
conf.setInt(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 1);
conf.setInt(DFSConfigKeys.DFS_NAMENODE_SAFEMODE_THRESHOLD_PCT_KEY, 1);
conf.setInt(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, TestFileCreation.blockSize);
// create cluster
System.out.println("Test 1*****************************");
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build();
FileSystem fs = null;
try {
cluster.waitActive();
fs = cluster.getFileSystem();
// Normally, the in-progress edit log would be finalized by
// FSEditLog#endCurrentLogSegment. For testing purposes, we
// disable that here.
FSEditLog spyLog =
spy(cluster.getNameNode().getFSImage().getEditLog());
doNothing().when(spyLog).endCurrentLogSegment(Mockito.anyBoolean());
DFSTestUtil.setEditLogForTesting(cluster.getNamesystem(), spyLog);
// create file1.
Path dir1 = new Path("/user/a+b/dir1");
Path file1 = new Path(dir1, "file1");
FSDataOutputStream stm1 = TestFileCreation.createFile(fs, file1, 1);
System.out.println("testFileCreationDeleteParent: "
+ "Created file " + file1);
TestFileCreation.writeFile(stm1);
stm1.hflush();
// create file2.
Path dir2 = new Path("/user/dir2");
Path file2 = new Path(dir2, "file2");
FSDataOutputStream stm2 = TestFileCreation.createFile(fs, file2, 1);
System.out.println("testFileCreationDeleteParent: "
+ "Created file " + file2);
TestFileCreation.writeFile(stm2);
stm2.hflush();
// move dir1 while file1 is open
Path dir3 = new Path("/user/dir3");
fs.mkdirs(dir3);
fs.rename(dir1, dir3);
// create file3
Path file3 = new Path(dir3, "file3");
FSDataOutputStream stm3 = fs.create(file3);
fs.rename(file3, new Path(dir3, "bozo"));
// Get a new block for the file.
TestFileCreation.writeFile(stm3, TestFileCreation.blockSize + 1);
stm3.hflush();
// Stop the NameNode before closing the files.
// This will ensure that the write leases are still active and present
// in the edit log. Simiarly, there should be a pending ADD_BLOCK_OP
// for file3, since we just added a block to that file.
cluster.getNameNode().stop();
// Restart cluster.
cluster.shutdown();
try {Thread.sleep(2*MAX_IDLE_TIME);} catch (InterruptedException e) {}
cluster = new MiniDFSCluster.Builder(conf).format(false).build();
cluster.waitActive();
// restart cluster yet again. This triggers the code to read in
// persistent leases from the edit log.
cluster.shutdown();
try {Thread.sleep(5000);} catch (InterruptedException e) {}
cluster = new MiniDFSCluster.Builder(conf).format(false).build();
cluster.waitActive();
fs = cluster.getFileSystem();
Path newfile = new Path("/user/dir3/dir1", "file1");
assertTrue(!fs.exists(file1));
assertTrue(fs.exists(file2));
assertTrue(fs.exists(newfile));
checkFullFile(fs, newfile);
} finally {
fs.close();
cluster.shutdown();
}
}
/**
* open /user/dir1/file1 /user/dir2/file2
* move /user/dir1 /user/dir3
*/
@Test
public void testWhileOpenRenameParentToNonexistentDir() throws IOException {
Configuration conf = new HdfsConfiguration();
final int MAX_IDLE_TIME = 2000; // 2s
conf.setInt("ipc.client.connection.maxidletime", MAX_IDLE_TIME);
conf.setInt(DFSConfigKeys.DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_KEY, 1000);
conf.setInt(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 1);
conf.setInt(DFSConfigKeys.DFS_NAMENODE_SAFEMODE_THRESHOLD_PCT_KEY, 1);
System.out.println("Test 2************************************");
// create cluster
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build();
FileSystem fs = null;
try {
cluster.waitActive();
fs = cluster.getFileSystem();
// create file1.
Path dir1 = new Path("/user/dir1");
Path file1 = new Path(dir1, "file1");
FSDataOutputStream stm1 = TestFileCreation.createFile(fs, file1, 1);
System.out.println("testFileCreationDeleteParent: "
+ "Created file " + file1);
TestFileCreation.writeFile(stm1);
stm1.hflush();
// create file2.
Path dir2 = new Path("/user/dir2");
Path file2 = new Path(dir2, "file2");
FSDataOutputStream stm2 = TestFileCreation.createFile(fs, file2, 1);
System.out.println("testFileCreationDeleteParent: "
+ "Created file " + file2);
TestFileCreation.writeFile(stm2);
stm2.hflush();
// move dir1 while file1 is open
Path dir3 = new Path("/user/dir3");
fs.rename(dir1, dir3);
// restart cluster.
// This ensures that leases are persisted in fsimage.
cluster.shutdown();
try {Thread.sleep(2*MAX_IDLE_TIME);} catch (InterruptedException e) {}
cluster = new MiniDFSCluster.Builder(conf).format(false).build();
cluster.waitActive();
// restart cluster yet again. This triggers the code to read in
// persistent leases from fsimage.
cluster.shutdown();
try {Thread.sleep(5000);} catch (InterruptedException e) {}
cluster = new MiniDFSCluster.Builder(conf).format(false).build();
cluster.waitActive();
fs = cluster.getFileSystem();
Path newfile = new Path("/user/dir3", "file1");
assertTrue(!fs.exists(file1));
assertTrue(fs.exists(file2));
assertTrue(fs.exists(newfile));
checkFullFile(fs, newfile);
} finally {
fs.close();
cluster.shutdown();
}
}
/**
* open /user/dir1/file1
* mkdir /user/dir2
* move /user/dir1/file1 /user/dir2/
*/
@Test
public void testWhileOpenRenameToExistentDirectory() throws IOException {
Configuration conf = new HdfsConfiguration();
final int MAX_IDLE_TIME = 2000; // 2s
conf.setInt("ipc.client.connection.maxidletime", MAX_IDLE_TIME);
conf.setInt(DFSConfigKeys.DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_KEY, 1000);
conf.setInt(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 1);
conf.setInt(DFSConfigKeys.DFS_NAMENODE_SAFEMODE_THRESHOLD_PCT_KEY, 1);
System.out.println("Test 3************************************");
// create cluster
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build();
FileSystem fs = null;
try {
cluster.waitActive();
fs = cluster.getFileSystem();
// create file1.
Path dir1 = new Path("/user/dir1");
Path file1 = new Path(dir1, "file1");
FSDataOutputStream stm1 = TestFileCreation.createFile(fs, file1, 1);
System.out.println("testFileCreationDeleteParent: " +
"Created file " + file1);
TestFileCreation.writeFile(stm1);
stm1.hflush();
Path dir2 = new Path("/user/dir2");
fs.mkdirs(dir2);
fs.rename(file1, dir2);
// restart cluster.
// This ensures that leases are persisted in fsimage.
cluster.shutdown();
try {Thread.sleep(2*MAX_IDLE_TIME);} catch (InterruptedException e) {}
cluster = new MiniDFSCluster.Builder(conf).format(false).build();
cluster.waitActive();
// restart cluster yet again. This triggers the code to read in
// persistent leases from fsimage.
cluster.shutdown();
try {Thread.sleep(5000);} catch (InterruptedException e) {}
cluster = new MiniDFSCluster.Builder(conf).format(false).build();
cluster.waitActive();
fs = cluster.getFileSystem();
Path newfile = new Path("/user/dir2", "file1");
assertTrue(!fs.exists(file1));
assertTrue(fs.exists(newfile));
checkFullFile(fs, newfile);
} finally {
fs.close();
cluster.shutdown();
}
}
/**
* open /user/dir1/file1
* move /user/dir1/file1 /user/dir2/
*/
@Test
public void testWhileOpenRenameToNonExistentDirectory() throws IOException {
Configuration conf = new HdfsConfiguration();
final int MAX_IDLE_TIME = 2000; // 2s
conf.setInt("ipc.client.connection.maxidletime", MAX_IDLE_TIME);
conf.setInt(DFSConfigKeys.DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_KEY, 1000);
conf.setInt(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 1);
conf.setInt(DFSConfigKeys.DFS_NAMENODE_SAFEMODE_THRESHOLD_PCT_KEY, 1);
System.out.println("Test 4************************************");
// create cluster
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build();
FileSystem fs = null;
try {
cluster.waitActive();
fs = cluster.getFileSystem();
// create file1.
Path dir1 = new Path("/user/dir1");
Path file1 = new Path(dir1, "file1");
FSDataOutputStream stm1 = TestFileCreation.createFile(fs, file1, 1);
System.out.println("testFileCreationDeleteParent: "
+ "Created file " + file1);
TestFileCreation.writeFile(stm1);
stm1.hflush();
Path dir2 = new Path("/user/dir2");
fs.rename(file1, dir2);
// restart cluster.
// This ensures that leases are persisted in fsimage.
cluster.shutdown();
try {Thread.sleep(2*MAX_IDLE_TIME);} catch (InterruptedException e) {}
cluster = new MiniDFSCluster.Builder(conf).format(false).build();
cluster.waitActive();
// restart cluster yet again. This triggers the code to read in
// persistent leases from fsimage.
cluster.shutdown();
try {Thread.sleep(5000);} catch (InterruptedException e) {}
cluster = new MiniDFSCluster.Builder(conf).format(false).build();
cluster.waitActive();
fs = cluster.getFileSystem();
Path newfile = new Path("/user", "dir2");
assertTrue(!fs.exists(file1));
assertTrue(fs.exists(newfile));
checkFullFile(fs, newfile);
} finally {
fs.close();
cluster.shutdown();
}
}
}
| TestRenameWhileOpen |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/inlineme/InlinerTest.java | {
"start": 33198,
"end": 33488
} | class ____ {
public void doTest() {
Client client = new Client();
int x = client.multiply(5, 10);
}
}
""")
.addOutputLines(
"out/Caller.java",
"""
public final | Caller |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestMaintenanceWithStriped.java | {
"start": 2728,
"end": 10671
} | class ____ {
private static final Logger LOG =
LoggerFactory.getLogger(TestMaintenanceWithStriped.class);
// heartbeat interval in seconds
private static final int HEARTBEAT_INTERVAL = 1;
// block report in msec
private static final int BLOCKREPORT_INTERVAL_MSEC = 1000;
// replication interval
private static final int NAMENODE_REPLICATION_INTERVAL = 1;
private Configuration conf;
private MiniDFSCluster cluster;
private DistributedFileSystem dfs;
private final ErasureCodingPolicy ecPolicy =
StripedFileTestUtil.getDefaultECPolicy();
private int numDNs;
private final int cellSize = ecPolicy.getCellSize();
private final int dataBlocks = ecPolicy.getNumDataUnits();
private final int parityBlocks = ecPolicy.getNumParityUnits();
private final int blockSize = cellSize * 4;
private final int blockGroupSize = blockSize * dataBlocks;
private final Path ecDir = new Path("/" + this.getClass().getSimpleName());
private HostsFileWriter hostsFileWriter;
private boolean useCombinedHostFileManager = true;
private FSNamesystem fsn;
private BlockManager bm;
protected Configuration createConfiguration() {
return new HdfsConfiguration();
}
@BeforeEach
public void setup() throws IOException {
// Set up the hosts/exclude files.
hostsFileWriter = new HostsFileWriter();
conf = createConfiguration();
if (useCombinedHostFileManager) {
conf.setClass(DFSConfigKeys.DFS_NAMENODE_HOSTS_PROVIDER_CLASSNAME_KEY,
CombinedHostFileManager.class, HostConfigManager.class);
}
hostsFileWriter.initialize(conf, "temp/admin");
conf.setInt(DFSConfigKeys.DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_KEY,
2000);
conf.setInt(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, HEARTBEAT_INTERVAL);
conf.setInt(DFSConfigKeys.DFS_NAMENODE_REDUNDANCY_INTERVAL_SECONDS_KEY, 1);
conf.setInt(DFSConfigKeys.DFS_BLOCKREPORT_INTERVAL_MSEC_KEY,
BLOCKREPORT_INTERVAL_MSEC);
conf.setInt(DFSConfigKeys.DFS_NAMENODE_RECONSTRUCTION_PENDING_TIMEOUT_SEC_KEY,
4);
conf.setInt(DFSConfigKeys.DFS_NAMENODE_REDUNDANCY_INTERVAL_SECONDS_KEY,
NAMENODE_REPLICATION_INTERVAL);
conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, blockSize);
conf.setInt(
DFSConfigKeys.DFS_DN_EC_RECONSTRUCTION_STRIPED_READ_BUFFER_SIZE_KEY,
cellSize - 1);
conf.setInt(DFSConfigKeys.DFS_NAMENODE_REDUNDANCY_INTERVAL_SECONDS_KEY, 1);
conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_REDUNDANCY_CONSIDERLOAD_KEY,
false);
numDNs = dataBlocks + parityBlocks + 5;
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(numDNs).build();
cluster.waitActive();
dfs = cluster.getFileSystem(0);
fsn = cluster.getNamesystem();
bm = fsn.getBlockManager();
dfs.enableErasureCodingPolicy(
StripedFileTestUtil.getDefaultECPolicy().getName());
dfs.mkdirs(ecDir);
dfs.setErasureCodingPolicy(ecDir,
StripedFileTestUtil.getDefaultECPolicy().getName());
}
@AfterEach
public void teardown() throws IOException {
hostsFileWriter.cleanup();
if (cluster != null) {
cluster.shutdown();
cluster = null;
}
}
/**
* test DN maintenance with striped blocks.
* @throws Exception
*/
@Test
@Timeout(value = 120)
public void testInMaintenance() throws Exception {
//1. create EC file
// d0 d1 d2 d3 d4 d5 d6 d7 d8
final Path ecFile = new Path(ecDir, "testInMaintenance");
int writeBytes = cellSize * dataBlocks;
writeStripedFile(dfs, ecFile, writeBytes);
assertEquals(0, bm.numOfUnderReplicatedBlocks());
FileChecksum fileChecksum1 = dfs.getFileChecksum(ecFile, writeBytes);
final INodeFile fileNode = cluster.getNamesystem().getFSDirectory()
.getINode4Write(ecFile.toString()).asFile();
BlockInfo firstBlock = fileNode.getBlocks()[0];
DatanodeStorageInfo[] dnStorageInfos = bm.getStorages(firstBlock);
//2. maintenance node
// d4 d5 d6 d7 d8
int maintenanceDNIndex = 4;
int numMaintenance= 5;
List<DatanodeInfo> maintenanceNodes = new ArrayList<>();
for (int i = maintenanceDNIndex; i < numMaintenance + maintenanceDNIndex; ++i) {
maintenanceNodes.add(dnStorageInfos[i].getDatanodeDescriptor());
}
maintenanceNode(0, maintenanceNodes, AdminStates.IN_MAINTENANCE, Long.MAX_VALUE);
//3. wait for maintenance block to replicate
GenericTestUtils.waitFor(
() -> maintenanceNodes.size() == fsn.getNumInMaintenanceLiveDataNodes(),
100, 60000);
//4. check DN status, it should be reconstructed again
LocatedBlocks lbs = cluster.getNameNodeRpc().getBlockLocations(
ecFile.toString(), 0, writeBytes);
LocatedStripedBlock bg = (LocatedStripedBlock) (lbs.get(0));
BlockInfoStriped blockInfo =
(BlockInfoStriped)bm.getStoredBlock(
new Block(bg.getBlock().getBlockId()));
// So far, there are 11 total internal blocks, 6 live (d0 d1 d2 d3 d4' d5')
// and 5 in maintenance (d4 d5 d6 d7 d8) internal blocks.
assertEquals(6, bm.countNodes(blockInfo).liveReplicas());
assertEquals(5, bm.countNodes(blockInfo).maintenanceNotForReadReplicas());
FileChecksum fileChecksum2 = dfs.getFileChecksum(ecFile, writeBytes);
assertEquals(fileChecksum1, fileChecksum2, "Checksum mismatches!");
}
/* Get DFSClient to the namenode */
private static DFSClient getDfsClient(NameNode nn, Configuration conf)
throws IOException {
return new DFSClient(nn.getNameNodeAddress(), conf);
}
private byte[] writeStripedFile(DistributedFileSystem fs, Path ecFile,
int writeBytes) throws Exception {
byte[] bytes = StripedFileTestUtil.generateBytes(writeBytes);
DFSTestUtil.writeFile(fs, ecFile, new String(bytes));
StripedFileTestUtil.waitBlockGroupsReported(fs, ecFile.toString());
StripedFileTestUtil.checkData(fs, ecFile, writeBytes,
new ArrayList<DatanodeInfo>(), null, blockGroupSize);
return bytes;
}
/*
* maintenance the DN at index dnIndex or one random node if dnIndex is set
* to -1 and wait for the node to reach the given {@code waitForState}.
*/
private void maintenanceNode(int nnIndex, List<DatanodeInfo> maintenancedNodes,
AdminStates waitForState, long maintenanceExpirationInMS)
throws IOException, TimeoutException, InterruptedException {
DFSClient client = getDfsClient(cluster.getNameNode(nnIndex), conf);
DatanodeInfo[] info = client.datanodeReport(DatanodeReportType.LIVE);
// write nodename into the exclude file.
Map<String, Long> maintenanceNodes = new HashMap<>();
for (DatanodeInfo dn : maintenancedNodes) {
boolean nodeExists = false;
for (DatanodeInfo dninfo : info) {
if (dninfo.getDatanodeUuid().equals(dn.getDatanodeUuid())) {
nodeExists = true;
break;
}
}
assertTrue(nodeExists, "Datanode: " + dn + " is not LIVE");
maintenanceNodes.put(dn.getName(), maintenanceExpirationInMS);
LOG.info("Maintenance node: " + dn.getName());
}
// write node names into the json host file.
hostsFileWriter.initOutOfServiceHosts(null, maintenanceNodes);
refreshNodes(cluster.getNamesystem(nnIndex), conf);
for (DatanodeInfo dn : maintenancedNodes) {
DatanodeInfo ret = NameNodeAdapter
.getDatanode(cluster.getNamesystem(nnIndex), dn);
LOG.info("Waiting for node " + ret + " to change state to " + waitForState
+ " current state: " + ret.getAdminState());
GenericTestUtils.waitFor(
() -> ret.getAdminState() == waitForState,
100, 60000);
LOG.info("node " + ret + " reached the state " + waitForState);
}
}
private static void refreshNodes(final FSNamesystem ns,
final Configuration conf) throws IOException {
ns.getBlockManager().getDatanodeManager().refreshNodes(conf);
}
}
| TestMaintenanceWithStriped |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-router/src/main/java/org/apache/hadoop/yarn/server/router/webapp/FederationBlock.java | {
"start": 1878,
"end": 9731
} | class ____ extends RouterBlock {
private final Router router;
@Inject
FederationBlock(ViewContext ctx, Router router) {
super(router, ctx);
this.router = router;
}
@Override
public void render(Block html) {
boolean isEnabled = isYarnFederationEnabled();
// init Html Page Federation
initHtmlPageFederation(html, isEnabled);
}
/**
* Parse the capability and obtain the metric information of the cluster.
*
* @param capability metric json obtained from RM.
* @return ClusterMetricsInfo Object
*/
protected ClusterMetricsInfo getClusterMetricsInfo(String capability) {
try {
if (capability != null && !capability.isEmpty()) {
JettisonJaxbContext jettisonJaxbContext = new JettisonJaxbContext(ClusterMetricsInfo.class);
JettisonUnmarshaller jsonMarshaller = jettisonJaxbContext.createJsonUnmarshaller();
ClusterMetricsInfo clusterMetricsInfo = jsonMarshaller.unmarshalFromJSON(
new StringReader(capability), ClusterMetricsInfo.class);
return clusterMetricsInfo;
}
} catch (Exception e) {
LOG.error("Cannot parse SubCluster info", e);
}
return null;
}
/**
* Initialize the subCluster details JavaScript of the Federation page.
*
* This part of the js script will control to display or hide the detailed information
* of the subCluster when the user clicks on the subClusterId.
*
* We will obtain the specific information of a SubCluster,
* including the information of Applications, Resources, and Nodes.
*
* @param html html object
* @param subClusterDetailMap subCluster Detail Map
*/
private void initFederationSubClusterDetailTableJs(Block html,
List<Map<String, String>> subClusterDetailMap) {
Gson gson = new Gson();
html.script().$type("text/javascript").
__(" var scTableData = " + gson.toJson(subClusterDetailMap) + "; ")
.__();
html.script(root_url("static/federation/federation.js"));
}
/**
* Initialize the Html page.
*
* @param html html object
*/
private void initHtmlPageFederation(Block html, boolean isEnabled) {
List<Map<String, String>> lists = new ArrayList<>();
// Table header
TBODY<TABLE<Hamlet>> tbody =
html.table("#rms").$class("cell-border").$style("width:100%").thead().tr()
.th(".id", "SubCluster")
.th(".state", "State")
.th(".lastStartTime", "LastStartTime")
.th(".lastHeartBeat", "LastHeartBeat")
.th(".resources", "Resources")
.th(".nodes", "Nodes")
.__().__().tbody();
try {
if (isEnabled) {
initSubClusterPage(tbody, lists);
} else {
initLocalClusterPage(tbody, lists);
}
} catch (Exception e) {
LOG.error("Cannot render Router Federation.", e);
}
// Init FederationBlockTableJs
initFederationSubClusterDetailTableJs(html, lists);
// Tips
tbody.__().__().div().p().$style("color:red")
.__("*The application counts are local per subcluster").__().__();
}
/**
* Initialize the Federation page of the local-cluster.
*
* @param tbody HTML tbody.
* @param lists subCluster page data list.
*/
private void initLocalClusterPage(TBODY<TABLE<Hamlet>> tbody, List<Map<String, String>> lists) {
Configuration config = this.router.getConfig();
SubClusterInfo localCluster = getSubClusterInfoByLocalCluster(config);
if (localCluster != null) {
try {
initSubClusterPageItem(tbody, localCluster, lists);
} catch (Exception e) {
LOG.error("init LocalCluster = {} page data error.", localCluster, e);
}
}
}
/**
* Initialize the Federation page of the sub-cluster.
*
* @param tbody HTML tbody.
* @param lists subCluster page data list.
*/
private void initSubClusterPage(TBODY<TABLE<Hamlet>> tbody, List<Map<String, String>> lists) {
// Sort the SubClusters
List<SubClusterInfo> subClusters = getSubClusterInfoList();
// Iterate through the sub-clusters and display data for each sub-cluster.
// If a sub-cluster cannot display data, skip it.
for (SubClusterInfo subCluster : subClusters) {
try {
initSubClusterPageItem(tbody, subCluster, lists);
} catch (Exception e) {
LOG.error("init subCluster = {} page data error.", subCluster, e);
}
}
}
/**
* We will initialize the specific SubCluster's data within this method.
*
* @param tbody HTML TBody.
* @param subClusterInfo Sub-cluster information.
* @param lists Used to record data that needs to be displayed in JS.
*/
private void initSubClusterPageItem(TBODY<TABLE<Hamlet>> tbody,
SubClusterInfo subClusterInfo, List<Map<String, String>> lists) {
Map<String, String> subClusterMap = new HashMap<>();
// Prepare subCluster
SubClusterId subClusterId = subClusterInfo.getSubClusterId();
String subClusterIdText = subClusterId.getId();
// Prepare WebAppAddress
String webAppAddress = subClusterInfo.getRMWebServiceAddress();
String herfWebAppAddress = "";
if (webAppAddress != null && !webAppAddress.isEmpty()) {
herfWebAppAddress =
WebAppUtils.getHttpSchemePrefix(this.router.getConfig()) + webAppAddress;
}
// Prepare Capability
String capability = subClusterInfo.getCapability();
ClusterMetricsInfo subClusterMetricsInfo = getClusterMetricsInfo(capability);
if (subClusterMetricsInfo == null) {
return;
}
// Prepare LastStartTime & LastHeartBeat
Date lastStartTime = new Date(subClusterInfo.getLastStartTime());
Date lastHeartBeat = new Date(subClusterInfo.getLastHeartBeat());
// Prepare Resource
long totalMB = subClusterMetricsInfo.getTotalMB();
String totalMBDesc = StringUtils.byteDesc(totalMB * BYTES_IN_MB);
long totalVirtualCores = subClusterMetricsInfo.getTotalVirtualCores();
String resources = String.format("<memory:%s, vCores:%s>", totalMBDesc, totalVirtualCores);
// Prepare Node
long totalNodes = subClusterMetricsInfo.getTotalNodes();
long activeNodes = subClusterMetricsInfo.getActiveNodes();
String nodes = String.format("<totalNodes:%s, activeNodes:%s>", totalNodes, activeNodes);
// Prepare HTML Table
String stateStyle = "color:#dc3545;font-weight:bolder";
SubClusterState state = subClusterInfo.getState();
if (SubClusterState.SC_RUNNING == state) {
stateStyle = "color:#28a745;font-weight:bolder";
}
tbody.tr().$id(subClusterIdText)
.td().$class("details-control").a(herfWebAppAddress, subClusterIdText).__()
.td().$style(stateStyle).__(state.name()).__()
.td().__(lastStartTime).__()
.td().__(lastHeartBeat).__()
.td(resources)
.td(nodes)
.__();
// Formatted memory information
long allocatedMB = subClusterMetricsInfo.getAllocatedMB();
String allocatedMBDesc = StringUtils.byteDesc(allocatedMB * BYTES_IN_MB);
long availableMB = subClusterMetricsInfo.getAvailableMB();
String availableMBDesc = StringUtils.byteDesc(availableMB * BYTES_IN_MB);
long pendingMB = subClusterMetricsInfo.getPendingMB();
String pendingMBDesc = StringUtils.byteDesc(pendingMB * BYTES_IN_MB);
long reservedMB = subClusterMetricsInfo.getReservedMB();
String reservedMBDesc = StringUtils.byteDesc(reservedMB * BYTES_IN_MB);
subClusterMap.put("totalmemory", totalMBDesc);
subClusterMap.put("allocatedmemory", allocatedMBDesc);
subClusterMap.put("availablememory", availableMBDesc);
subClusterMap.put("pendingmemory", pendingMBDesc);
subClusterMap.put("reservedmemory", reservedMBDesc);
subClusterMap.put("subcluster", subClusterId.getId());
subClusterMap.put("capability", capability);
lists.add(subClusterMap);
}
}
| FederationBlock |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java | {
"start": 8725,
"end": 10966
} | class ____ implements Accountable {
private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(Key.class);
public final CacheEntity entity; // use as identity equality
public final MappingLookup.CacheKey mappingCacheKey;
public final Object readerCacheKey;
public final BytesReference value;
Key(CacheEntity entity, MappingLookup.CacheKey mappingCacheKey, Object readerCacheKey, BytesReference value) {
this.entity = entity;
this.mappingCacheKey = Objects.requireNonNull(mappingCacheKey);
this.readerCacheKey = Objects.requireNonNull(readerCacheKey);
this.value = value;
}
@Override
public long ramBytesUsed() {
return BASE_RAM_BYTES_USED + entity.ramBytesUsed() + value.length();
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Key key = (Key) o;
if (mappingCacheKey.equals(key.mappingCacheKey) == false) return false;
if (readerCacheKey.equals(key.readerCacheKey) == false) return false;
if (entity.getCacheIdentity().equals(key.entity.getCacheIdentity()) == false) return false;
if (value.equals(key.value) == false) return false;
return true;
}
@Override
public int hashCode() {
int result = entity.getCacheIdentity().hashCode();
result = 31 * result + mappingCacheKey.hashCode();
result = 31 * result + readerCacheKey.hashCode();
result = 31 * result + value.hashCode();
return result;
}
@Override
public String toString() {
return "Key(mappingKey=["
+ mappingCacheKey
+ "],readerKey=["
+ readerCacheKey
+ "],entityKey=["
+ entity.getCacheIdentity()
+ ",value=" // BytesRef's toString already has [] so we don't add it here
+ value.toBytesRef() // BytesRef has a readable toString
+ ")";
}
}
private | Key |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/lucene/spatial/BinaryShapeDocValuesField.java | {
"start": 840,
"end": 1781
} | class ____ extends CustomDocValuesField {
private final List<IndexableField> fields;
private final CoordinateEncoder coordinateEncoder;
private final CentroidCalculator centroidCalculator;
public BinaryShapeDocValuesField(String name, CoordinateEncoder coordinateEncoder) {
super(name);
this.fields = new ArrayList<>();
this.coordinateEncoder = coordinateEncoder;
this.centroidCalculator = new CentroidCalculator();
}
public void add(List<IndexableField> fields, Geometry geometry) {
this.fields.addAll(fields);
this.centroidCalculator.add(geometry);
}
@Override
public BytesRef binaryValue() {
try {
return GeometryDocValueWriter.write(fields, coordinateEncoder, centroidCalculator);
} catch (IOException e) {
throw new ElasticsearchException("failed to encode shape", e);
}
}
}
| BinaryShapeDocValuesField |
java | dropwizard__dropwizard | dropwizard-jersey/src/main/java/io/dropwizard/jersey/optional/EmptyOptionalException.java | {
"start": 155,
"end": 466
} | class ____ extends RuntimeException {
/**
* Auto-generated by Eclipse.
*/
private static final long serialVersionUID = -3398853218754085781L;
public static final EmptyOptionalException INSTANCE = new EmptyOptionalException();
private EmptyOptionalException() { }
}
| EmptyOptionalException |
java | quarkusio__quarkus | test-framework/junit5/src/main/java/io/quarkus/test/junit/GraalVMVersion.java | {
"start": 88,
"end": 802
} | enum ____ {
GRAALVM_23_1_0(GraalVM.Version.VERSION_23_1_0),
GRAALVM_24_0_0(GraalVM.Version.VERSION_24_0_0),
GRAALVM_24_0_999(GraalVM.Version.VERSION_24_0_999),
GRAALVM_24_1_0(GraalVM.Version.VERSION_24_1_0),
GRAALVM_24_1_999(GraalVM.Version.VERSION_24_1_999),
GRAALVM_24_2_0(GraalVM.Version.VERSION_24_2_0);
private final GraalVM.Version version;
GraalVMVersion(GraalVM.Version version) {
this.version = version;
}
public GraalVM.Version getVersion() {
return version;
}
@Override
public String toString() {
return "GraalVMVersion{" +
"version=" + version.getVersionAsString() +
'}';
}
}
| GraalVMVersion |
java | google__dagger | java/dagger/testing/compile/CompilerProcessors.java | {
"start": 2479,
"end": 3233
} | class ____ extends KspBasicAnnotationProcessor {
private final ImmutableCollection<XProcessingStep> processingSteps;
private KspProcessor(
SymbolProcessorEnvironment symbolProcessorEnvironment,
ImmutableCollection<XProcessingStep> processingSteps) {
super(symbolProcessorEnvironment, CompilerTests.PROCESSING_ENV_CONFIG);
this.processingSteps = processingSteps;
}
@Override
public void initialize(XProcessingEnv env) {}
@Override
public ImmutableCollection<XProcessingStep> processingSteps() {
return processingSteps;
}
@Override
public void postRound(XProcessingEnv env, XRoundEnv roundEnv) {}
/** Provides the {@link KspComponentProcessor}. */
static final | KspProcessor |
java | spring-projects__spring-framework | spring-webmvc/src/main/java/org/springframework/web/servlet/tags/form/OptionsTag.java | {
"start": 5622,
"end": 10042
} | class ____ extends AbstractHtmlElementTag {
/**
* The {@link java.util.Collection}, {@link java.util.Map} or array of
* objects used to generate the inner '{@code option}' tags.
*/
private @Nullable Object items;
/**
* The name of the property mapped to the '{@code value}' attribute
* of the '{@code option}' tag.
*/
private @Nullable String itemValue;
/**
* The name of the property mapped to the inner text of the
* '{@code option}' tag.
*/
private @Nullable String itemLabel;
private boolean disabled;
/**
* Set the {@link java.util.Collection}, {@link java.util.Map} or array
* of objects used to generate the inner '{@code option}' tags.
* <p>Required when wishing to render '{@code option}' tags from an
* array, {@link java.util.Collection} or {@link java.util.Map}.
* <p>Typically a runtime expression.
*/
public void setItems(Object items) {
this.items = items;
}
/**
* Get the {@link java.util.Collection}, {@link java.util.Map} or array
* of objects used to generate the inner '{@code option}' tags.
* <p>Typically a runtime expression.
*/
protected @Nullable Object getItems() {
return this.items;
}
/**
* Set the name of the property mapped to the '{@code value}'
* attribute of the '{@code option}' tag.
* <p>Required when wishing to render '{@code option}' tags from
* an array or {@link java.util.Collection}.
*/
public void setItemValue(String itemValue) {
Assert.hasText(itemValue, "'itemValue' must not be empty");
this.itemValue = itemValue;
}
/**
* Return the name of the property mapped to the '{@code value}'
* attribute of the '{@code option}' tag.
*/
protected @Nullable String getItemValue() {
return this.itemValue;
}
/**
* Set the name of the property mapped to the label (inner text) of the
* '{@code option}' tag.
*/
public void setItemLabel(String itemLabel) {
Assert.hasText(itemLabel, "'itemLabel' must not be empty");
this.itemLabel = itemLabel;
}
/**
* Get the name of the property mapped to the label (inner text) of the
* '{@code option}' tag.
*/
protected @Nullable String getItemLabel() {
return this.itemLabel;
}
/**
* Set the value of the '{@code disabled}' attribute.
*/
public void setDisabled(boolean disabled) {
this.disabled = disabled;
}
/**
* Get the value of the '{@code disabled}' attribute.
*/
protected boolean isDisabled() {
return this.disabled;
}
@Override
protected int writeTagContent(TagWriter tagWriter) throws JspException {
SelectTag selectTag = getSelectTag();
Object items = getItems();
Object itemsObject = null;
if (items != null) {
itemsObject = (items instanceof String ? evaluate("items", items) : items);
}
else {
Class<?> selectTagBoundType = selectTag.getBindStatus().getValueType();
if (selectTagBoundType != null && selectTagBoundType.isEnum()) {
itemsObject = selectTagBoundType.getEnumConstants();
}
}
if (itemsObject != null) {
String selectName = selectTag.getName();
String itemValue = getItemValue();
String itemLabel = getItemLabel();
String valueProperty =
(itemValue != null ? ObjectUtils.getDisplayString(evaluate("itemValue", itemValue)) : null);
String labelProperty =
(itemLabel != null ? ObjectUtils.getDisplayString(evaluate("itemLabel", itemLabel)) : null);
String encodingToUse =
(isResponseEncodedHtmlEscape() ? this.pageContext.getResponse().getCharacterEncoding() : null);
OptionsWriter optionWriter =
new OptionsWriter(selectName, itemsObject, valueProperty, labelProperty, encodingToUse);
optionWriter.writeOptions(tagWriter);
}
return SKIP_BODY;
}
/**
* Appends a counter to a specified id,
* since we're dealing with multiple HTML elements.
*/
@Override
protected @Nullable String resolveId() throws JspException {
Object id = evaluate("id", getId());
if (id != null) {
String idString = id.toString();
return (StringUtils.hasText(idString) ? TagIdGenerator.nextId(idString, this.pageContext) : null);
}
return null;
}
private SelectTag getSelectTag() {
TagUtils.assertHasAncestorOfType(this, SelectTag.class, "options", "select");
return (SelectTag) findAncestorWithClass(this, SelectTag.class);
}
@Override
protected BindStatus getBindStatus() {
return (BindStatus) this.pageContext.getAttribute(SelectTag.LIST_VALUE_PAGE_ATTRIBUTE);
}
/**
* Inner | OptionsTag |
java | spring-projects__spring-data-jpa | spring-data-jpa/src/test/java/org/springframework/data/jpa/repository/query/AbstractStringBasedJpaQueryIntegrationTests.java | {
"start": 2086,
"end": 3646
} | class ____ {
private static final JpaQueryConfiguration CONFIG = new JpaQueryConfiguration(QueryRewriterProvider.simple(),
QueryEnhancerSelector.DEFAULT_SELECTOR, ValueExpressionDelegate.create(), EscapeCharacter.DEFAULT);
@PersistenceContext EntityManager em;
@Autowired BeanFactory beanFactory;
@Test // DATAJPA-885
void createsNormalQueryForJpaManagedReturnTypes() throws Exception {
EntityManager mock = mock(EntityManager.class);
when(mock.getDelegate()).thenReturn(mock);
when(mock.getEntityManagerFactory()).thenReturn(em.getEntityManagerFactory());
when(mock.getMetamodel()).thenReturn(em.getMetamodel());
JpaQueryMethod method = getMethod("findRolesByEmailAddress", String.class);
AbstractStringBasedJpaQuery jpaQuery = new SimpleJpaQuery(method, mock, method.getRequiredDeclaredQuery(), null,
CONFIG);
jpaQuery.createJpaQuery(method.getRequiredDeclaredQuery(), Sort.unsorted(), null,
method.getResultProcessor().getReturnedType());
verify(mock, times(1)).createQuery(anyString());
verify(mock, times(0)).createQuery(anyString(), eq(Tuple.class));
}
private JpaQueryMethod getMethod(String name, Class<?>... parameterTypes) throws Exception {
Method method = SampleRepository.class.getMethod(name, parameterTypes);
PersistenceProvider persistenceProvider = PersistenceProvider.fromEntityManager(em);
return new JpaQueryMethod(method, new DefaultRepositoryMetadata(SampleRepository.class),
new SpelAwareProxyProjectionFactory(), persistenceProvider);
}
| AbstractStringBasedJpaQueryIntegrationTests |
java | elastic__elasticsearch | x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/ClearCcrRestoreSessionAction.java | {
"start": 3644,
"end": 4137
} | class ____ extends TransportDeleteCcrRestoreSessionAction {
@Inject
public InternalTransportAction(
ActionFilters actionFilters,
TransportService transportService,
CcrRestoreSourceService ccrRestoreService,
NamedWriteableRegistry namedWriteableRegistry
) {
super(INTERNAL_NAME, actionFilters, transportService, ccrRestoreService, namedWriteableRegistry);
}
}
public static | InternalTransportAction |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/ClientEndpointBuilderFactory.java | {
"start": 62578,
"end": 64369
} | class ____ {
/**
* The internal instance of the builder used to access to all the
* methods representing the name of headers.
*/
private static final ClientHeaderNameBuilder INSTANCE = new ClientHeaderNameBuilder();
/**
* The value.
*
* The option is a: {@code Object} type.
*
* Group: consumer
*
* @return the name of the header {@code Iec60870Value}.
*/
public String iec60870Value() {
return "CamelIec60870Value";
}
/**
* The timestamp of the value.
*
* The option is a: {@code long} type.
*
* Group: consumer
*
* @return the name of the header {@code Iec60870Timestamp}.
*/
public String iec60870Timestamp() {
return "CamelIec60870Timestamp";
}
/**
* The quality information of the value.
*
* The option is a: {@code
* org.eclipse.neoscada.protocol.iec60870.asdu.types.QualityInformation}
* type.
*
* Group: consumer
*
* @return the name of the header {@code Iec60870Quality}.
*/
public String iec60870Quality() {
return "CamelIec60870Quality";
}
/**
* Is overflow.
*
* The option is a: {@code boolean} type.
*
* Group: consumer
*
* @return the name of the header {@code Iec60870Overflow}.
*/
public String iec60870Overflow() {
return "CamelIec60870Overflow";
}
}
static ClientEndpointBuilder endpointBuilder(String componentName, String path) {
| ClientHeaderNameBuilder |
java | netty__netty | transport/src/main/java/io/netty/channel/ChannelInboundHandlerAdapter.java | {
"start": 734,
"end": 1387
} | class ____ {@link ChannelInboundHandler} implementations which provide
* implementations of all of their methods.
*
* <p>
* This implementation just forward the operation to the next {@link ChannelHandler} in the
* {@link ChannelPipeline}. Sub-classes may override a method implementation to change this.
* </p>
* <p>
* Be aware that messages are not released after the {@link #channelRead(ChannelHandlerContext, Object)}
* method returns automatically. If you are looking for a {@link ChannelInboundHandler} implementation that
* releases the received messages automatically, please see {@link SimpleChannelInboundHandler}.
* </p>
*/
public | for |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/oracle/OracleMergeTest2.java | {
"start": 971,
"end": 6179
} | class ____ extends OracleTest {
public void test_0() throws Exception {
String sql = "MERGE INTO \"ESCROW\".\"HT_TASK_TRADE_HISTORY_NEW\" SNA$ " +
" USING (" +
" SELECT CURRENT$.\"ID\",CURRENT$.\"GMT_MODIFIED\"" +
" ,CURRENT$.\"GMT_CREATE\",CURRENT$.\"TRADE_ID\",CURRENT$.\"STATUS\",CURRENT$.\"OWNER\"" +
" ,CURRENT$.\"GMT_FETCH_TASK\",CURRENT$.\"GMT_FINISH_TASK\",CURRENT$.\"VERSION\"" +
" ,CURRENT$.\"RECORD_TYPE\",CURRENT$.\"TASK_FLOW_LEVEL\",CURRENT$.\"DEAL_TYPE\"" +
" ,CURRENT$.\"END_REASON\",CURRENT$.\"TRANSIT_TIME\" " +
" FROM (" +
" SELECT \"HT_TASK_TRADE_HISTORY\".\"ID\" \"ID\"" +
" ,\"HT_TASK_TRADE_HISTORY\".\"GMT_MODIFIED\" \"GMT_MODIFIED\"" +
" ,\"HT_TASK_TRADE_HISTORY\".\"GMT_CREATE\" \"GMT_CREATE\"" +
" ,\"HT_TASK_TRADE_HISTORY\".\"TRADE_ID\" \"TRADE_ID\"" +
" ,\"HT_TASK_TRADE_HISTORY\".\"STATUS\" \"STATUS\"" +
" ,\"HT_TASK_TRADE_HISTORY\".\"OWNER\" \"OWNER\"" +
" ,\"HT_TASK_TRADE_HISTORY\".\"GMT_FETCH_TASK\" \"GMT_FETCH_TASK\"" +
" ,\"HT_TASK_TRADE_HISTORY\".\"GMT_FINISH_TASK\" \"GMT_FINISH_TASK\"" +
" ,\"HT_TASK_TRADE_HISTORY\".\"VERSION\" \"VERSION\"" +
" ,\"HT_TASK_TRADE_HISTORY\".\"RECORD_TYPE\" \"RECORD_TYPE\"" +
" ,\"HT_TASK_TRADE_HISTORY\".\"TASK_FLOW_LEVEL\" \"TASK_FLOW_LEVEL\"" +
" ,\"HT_TASK_TRADE_HISTORY\".\"DEAL_TYPE\" \"DEAL_TYPE\"" +
" ,\"HT_TASK_TRADE_HISTORY\".\"END_REASON\" \"END_REASON\"" +
" ,\"HT_TASK_TRADE_HISTORY\".\"TRANSIT_TIME\" \"TRANSIT_TIME\" " +
" FROM \"ESCROW\".\"HT_TASK_TRADE_HISTORY\" \"HT_TASK_TRADE_HISTORY\"" +
" ) CURRENT$, " +
" (SELECT DISTINCT MLOG$.\"ID\" " +
" FROM \"ESCROW\".\"MLOG$_HT_TASK_TRADE_HISTOR\" MLOG$ " +
" WHERE \"SNAPTIME$$\" > :1 AND (\"DMLTYPE$$\" != 'D')" +
" ) LOG$ " +
" WHERE CURRENT$.\"ID\" = LOG$.\"ID\") AS OF SNAPSHOT(:SCN) MAS$ ON (SNA$.\"ID\" = MAS$.\"ID\") WHEN MATCHED THEN UPDATE SET SNA$.\"ID\" = MAS$.\"ID\", SNA$.\"GMT_MODIFIED\" = MAS$.\"GMT_MODIFIED\", SNA$.\"GMT_CREATE\" = MAS$.\"GMT_CREATE\", SNA$.\"TRADE_ID\" = MAS$.\"TRADE_ID\", SNA$.\"STATUS\" = MAS$.\"STATUS\", SNA$.\"OWNER\" = MAS$.\"OWNER\", SNA$.\"GMT_FETCH_TASK\" = MAS$.\"GMT_FETCH_TASK\", SNA$.\"GMT_FINISH_TASK\" = MAS$.\"GMT_FINISH_TASK\", SNA$.\"VERSION\" = MAS$.\"VERSION\", SNA$.\"RECORD_TYPE\" = MAS$.\"RECORD_TYPE\", SNA$.\"TASK_FLOW_LEVEL\" = MAS$.\"TASK_FLOW_LEVEL\", SNA$.\"DEAL_TYPE\" = MAS$.\"DEAL_TYPE\", SNA$.\"END_REASON\" = MAS$.\"END_REASON\", SNA$.\"TRANSIT_TIME\" = MAS$.\"TRANSIT_TIME\" WHEN NOT MATCHED THEN INSERT (\"ID\",\"GMT_MODIFIED\",\"GMT_CREATE\",\"TRADE_ID\",\"STATUS\",\"OWNER\",\"GMT_FETCH_TASK\",\"GMT_FINISH_TASK\",\"VERSION\",\"RECORD_TYPE\",\"TASK_FLOW_LEVEL\",\"DEAL_TYPE\",\"END_REASON\",\"TRANSIT_TIME\") VALUES (MAS$.\"ID\",MAS$.\"GMT_MODIFIED\",MAS$.\"GMT_CREATE\",MAS$.\"TRADE_ID\",MAS$.\"STATUS\",MAS$.\"OWNER\",MAS$.\"GMT_FETCH_TASK\",MAS$.\"GMT_FINISH_TASK\",MAS$.\"VERSION\",MAS$.\"RECORD_TYPE\",MAS$.\"TASK_FLOW_LEVEL\",MAS$.\"DEAL_TYPE\",MAS$.\"END_REASON\",MAS$.\"TRANSIT_TIME\")";
OracleStatementParser parser = new OracleStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement stmt = statementList.get(0);
print(statementList);
assertEquals(1, statementList.size());
OracleSchemaStatVisitor visitor = new OracleSchemaStatVisitor();
stmt.accept(visitor);
System.out.println("Tables : " + visitor.getTables());
System.out.println("fields : " + visitor.getColumns());
System.out.println("coditions : " + visitor.getConditions());
System.out.println("relationships : " + visitor.getRelationships());
assertEquals(3, visitor.getTables().size());
assertTrue(visitor.getTables().containsKey(new TableStat.Name("ESCROW.HT_TASK_TRADE_HISTORY")));
assertTrue(visitor.getTables().containsKey(new TableStat.Name("ESCROW.MLOG$_HT_TASK_TRADE_HISTOR")));
assertTrue(visitor.getTables().containsKey(new TableStat.Name("ESCROW.HT_TASK_TRADE_HISTORY_NEW")));
assertEquals(31, visitor.getColumns().size());
// assertTrue(visitor.getColumns().contains(new TableStat.Column("employees", "employee_id")));
// assertTrue(visitor.getColumns().contains(new TableStat.Column("employees", "salary")));
// assertTrue(visitor.getColumns().contains(new TableStat.Column("employees", "department_id")));
// assertTrue(visitor.getColumns().contains(new TableStat.Column("bonuses", "employee_id")));
// assertTrue(visitor.getColumns().contains(new TableStat.Column("bonuses", "bonus")));
}
}
| OracleMergeTest2 |
java | elastic__elasticsearch | plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2EcsCredentialsIT.java | {
"start": 988,
"end": 2904
} | class ____ extends DiscoveryEc2ClusterFormationTestCase {
private static final String PREFIX = getIdentifierPrefix("DiscoveryEc2EcsCredentialsIT");
private static final String REGION = PREFIX + "-region";
private static final String CREDENTIALS_ENDPOINT = "/ecs_credentials_endpoint_" + PREFIX;
private static final DynamicAwsCredentials dynamicCredentials = new DynamicAwsCredentials(REGION, "ec2");
private static final Ec2ImdsHttpFixture ec2ImdsHttpFixture = new Ec2ImdsHttpFixture(
new Ec2ImdsServiceBuilder(Ec2ImdsVersion.V1).newCredentialsConsumer(dynamicCredentials::addValidCredentials)
.alternativeCredentialsEndpoints(Set.of(CREDENTIALS_ENDPOINT))
);
private static final AwsEc2HttpFixture ec2ApiFixture = new AwsEc2HttpFixture(
dynamicCredentials::isAuthorized,
DiscoveryEc2EcsCredentialsIT::getAvailableTransportEndpoints
);
private static final ElasticsearchCluster cluster = ElasticsearchCluster.local()
.nodes(2)
.plugin("discovery-ec2")
.setting(DiscoveryModule.DISCOVERY_SEED_PROVIDERS_SETTING.getKey(), Ec2DiscoveryPlugin.EC2_SEED_HOSTS_PROVIDER_NAME)
.setting("logger." + AwsEc2SeedHostsProvider.class.getCanonicalName(), "DEBUG")
.setting(Ec2ClientSettings.ENDPOINT_SETTING.getKey(), ec2ApiFixture::getAddress)
.environment("AWS_CONTAINER_CREDENTIALS_FULL_URI", () -> ec2ImdsHttpFixture.getAddress() + CREDENTIALS_ENDPOINT)
.environment("AWS_REGION", REGION)
.build();
private static List<String> getAvailableTransportEndpoints() {
return cluster.getAvailableTransportEndpoints();
}
@ClassRule
public static TestRule ruleChain = RuleChain.outerRule(ec2ImdsHttpFixture).around(ec2ApiFixture).around(cluster);
@Override
protected ElasticsearchCluster getCluster() {
return cluster;
}
}
| DiscoveryEc2EcsCredentialsIT |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.