language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/hive/issues/Issue5853.java | {
"start": 467,
"end": 3619
} | class ____ {
@Test
public void test_parse_create_0() {
for (String sql : new String[]{
"CREATE TABLE page_view (\n"
+ "\tviewTime INT,\n"
+ "\tuserid BIGINT,\n"
+ "\tpage_url STRING,\n"
+ "\treferrer_url STRING,\n"
+ "\tip STRING COMMENT 'IP Address of the User'\n"
+ ")\n"
+ "COMMENT 'This is the page view table'\n"
+ "PARTITIONED BY (\n"
+ "\tdt STRING,\n"
+ "\tcountry STRING\n"
+ ")\n"
+ "CLUSTERED BY (userid)\n"
+ "SORTED BY (viewTime)\n"
+ "INTO 32 BUCKETS\n"
+ "ROW FORMAT DELIMITED\n"
+ "\tFIELDS TERMINATED BY '\\001'\n"
+ "\tCOLLECTION ITEMS TERMINATED BY '\\002'\n"
+ "\tMAP KEYS TERMINATED BY '\\003'\n"
+ "STORED AS SEQUENCEFILE;",
}) {
System.out.println("原始的sql===" + sql);
SQLStatementParser parser1 = SQLParserUtils.createSQLStatementParser(sql, DbType.hive);
List<SQLStatement> statementList1 = parser1.parseStatementList();
String sqleNew = statementList1.get(0).toString();
System.out.println("生成的sql===" + sqleNew);
assertEquals(sql, sqleNew);
SQLStatementParser parser2 = SQLParserUtils.createSQLStatementParser(sqleNew, DbType.hive);
List<SQLStatement> statementList2 = parser2.parseStatementList();
String sqleNew2 = statementList2.get(0).toString();
System.out.println("再次解析生成的sql===" + sqleNew2);
assertEquals(sqleNew, sqleNew2);
}
}
@Test
public void test_parse_create_1() {
for (String sql : new String[]{
"CREATE TABLE db.route(\n"
+ "od_id string COMMENT 'OD',\n"
+ "data_dt string COMMENT 'data date')\n"
+ "CLUSTERED BY (\n"
+ "od_id)\n"
+ "INTO 8 BUCKETS\n"
+ "ROW FORMAT SERDE\n"
+ "'org.apache.hadoop.hive.ql.io.orc.OrcSerde'\n"
+ "STORED AS INPUTFORMAT\n"
+ "'org.apache.hadoop.hive.ql.io.orc.OrcInputFormat'\n"
+ "OUTPUTFORMAT\n"
+ "'org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat';",
}) {
System.out.println("原始的sql===" + sql);
SQLStatementParser parser1 = SQLParserUtils.createSQLStatementParser(sql, DbType.hive);
List<SQLStatement> statementList1 = parser1.parseStatementList();
String sqleNew = statementList1.get(0).toString();
System.out.println("生成的sql===" + sqleNew);
SQLStatementParser parser2 = SQLParserUtils.createSQLStatementParser(sqleNew, DbType.hive);
List<SQLStatement> statementList2 = parser2.parseStatementList();
String sqleNew2 = statementList2.get(0).toString();
System.out.println("再次解析生成的sql===" + sqleNew2);
assertEquals(sqleNew, sqleNew2);
}
}
}
| Issue5853 |
java | apache__camel | components/camel-jms/src/test/java/org/apache/camel/component/jms/issues/JmsReplyToComponentEndlessLoopTest.java | {
"start": 1699,
"end": 3656
} | class ____ extends AbstractJMSTest {
@Order(2)
@RegisterExtension
public static CamelContextExtension camelContextExtension = new DefaultCamelContextExtension(new ContextLifeCycleManager() {
@Override
public void afterAll(CamelContext context) {
}
@Override
public void beforeAll(CamelContext context) {
}
@Override
public void afterEach(CamelContext context) {
}
@Override
public void beforeEach(CamelContext context) {
}
});
protected CamelContext context;
protected ProducerTemplate template;
protected ConsumerTemplate consumer;
@Override
protected String getComponentName() {
return "activemq";
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("activemq:queue:JmsReplyToComponentEndlessLoopTest?replyTo=JmsReplyToComponentEndlessLoopTest")
.to("mock:result");
}
};
}
@Test
public void testReplyToInvalid() {
Exception ex = assertThrows(FailedToStartRouteException.class, () -> context.start(),
"Should have thrown exception");
IllegalArgumentException iae = assertIsInstanceOf(IllegalArgumentException.class, ex.getCause());
assertTrue(iae.getMessage()
.contains("ReplyTo=JmsReplyToComponentEndlessLoopTest cannot be the same as the destination name"));
}
@Override
public CamelContextExtension getCamelContextExtension() {
return camelContextExtension;
}
@BeforeEach
void setUpRequirements() {
context = camelContextExtension.getContext();
template = camelContextExtension.getProducerTemplate();
consumer = camelContextExtension.getConsumerTemplate();
}
}
| JmsReplyToComponentEndlessLoopTest |
java | apache__dubbo | dubbo-common/src/main/java/org/apache/dubbo/common/utils/Pair.java | {
"start": 1068,
"end": 3942
} | class ____<L, R> implements Map.Entry<L, R>, Comparable<Pair<L, R>>, Serializable {
private static final long serialVersionUID = 1L;
@SuppressWarnings("rawtypes")
private static final Pair NULL = new Pair<>(null, null);
private final L left;
private final R right;
public static <L, R> Pair<L, R> of(L left, R right) {
return left == null && right == null ? nullPair() : new Pair<>(left, right);
}
@SuppressWarnings("unchecked")
public static <L, R> Pair<L, R> nullPair() {
return NULL;
}
@SafeVarargs
public static <L, R> Map<L, R> toMap(Pair<L, R>... pairs) {
if (pairs == null) {
return Collections.emptyMap();
}
return toMap(Arrays.asList(pairs));
}
public static <L, R> Map<L, R> toMap(Collection<Pair<L, R>> pairs) {
if (pairs == null) {
return Collections.emptyMap();
}
Map<L, R> map = CollectionUtils.newLinkedHashMap(pairs.size());
for (Pair<L, R> pair : pairs) {
map.put(pair.getLeft(), pair.getRight());
}
return map;
}
public static <L, R> List<Pair<L, R>> toPairs(Map<L, R> map) {
if (map == null) {
return Collections.emptyList();
}
List<Pair<L, R>> pairs = new ArrayList<>(map.size());
for (Map.Entry<L, R> entry : map.entrySet()) {
pairs.add(of(entry.getKey(), entry.getValue()));
}
return pairs;
}
public Pair(L left, R right) {
this.left = left;
this.right = right;
}
public L getLeft() {
return left;
}
public R getRight() {
return right;
}
public boolean isNull() {
return this == NULL || left == null && right == null;
}
@Override
public L getKey() {
return left;
}
@Override
public R getValue() {
return right;
}
@Override
public R setValue(R value) {
throw new UnsupportedOperationException();
}
@Override
@SuppressWarnings("unchecked")
public int compareTo(Pair<L, R> other) {
return left.equals(other.left)
? ((Comparable<R>) right).compareTo(other.right)
: ((Comparable<L>) left).compareTo(other.left);
}
@Override
public int hashCode() {
return Objects.hashCode(left) ^ Objects.hashCode(right);
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other instanceof Map.Entry) {
Map.Entry<?, ?> that = (Map.Entry<?, ?>) other;
return Objects.equals(left, that.getKey()) && Objects.equals(right, that.getValue());
}
return false;
}
@Override
public String toString() {
return "(" + left + ", " + right + ')';
}
}
| Pair |
java | spring-projects__spring-security | oauth2/oauth2-client/src/main/java/org/springframework/security/oauth2/client/AuthorizationCodeOAuth2AuthorizedClientProvider.java | {
"start": 1266,
"end": 2759
} | class ____ implements OAuth2AuthorizedClientProvider {
/**
* Attempt to authorize the {@link OAuth2AuthorizationContext#getClientRegistration()
* client} in the provided {@code context}. Returns {@code null} if authorization is
* not supported, e.g. the client's
* {@link ClientRegistration#getAuthorizationGrantType() authorization grant type} is
* not {@link AuthorizationGrantType#AUTHORIZATION_CODE authorization_code} OR the
* client is already authorized.
* @param context the context that holds authorization-specific state for the client
* @return {@code null} if authorization is not supported or the client is already
* authorized
* @throws ClientAuthorizationRequiredException in order to trigger authorization in
* which the {@link OAuth2AuthorizationRequestRedirectFilter} will catch and initiate
* the authorization request
*/
@Override
@Nullable
public OAuth2AuthorizedClient authorize(OAuth2AuthorizationContext context) {
Assert.notNull(context, "context cannot be null");
if (AuthorizationGrantType.AUTHORIZATION_CODE.equals(
context.getClientRegistration().getAuthorizationGrantType()) && context.getAuthorizedClient() == null) {
// ClientAuthorizationRequiredException is caught by
// OAuth2AuthorizationRequestRedirectFilter which initiates authorization
throw new ClientAuthorizationRequiredException(context.getClientRegistration().getRegistrationId());
}
return null;
}
}
| AuthorizationCodeOAuth2AuthorizedClientProvider |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/mixed/ConcatMapXMainObserver.java | {
"start": 1279,
"end": 4169
} | class ____<T> extends AtomicInteger
implements Observer<T>, Disposable {
private static final long serialVersionUID = -3214213361171757852L;
final AtomicThrowable errors;
final int prefetch;
final ErrorMode errorMode;
SimpleQueue<T> queue;
Disposable upstream;
volatile boolean done;
volatile boolean disposed;
public ConcatMapXMainObserver(int prefetch, ErrorMode errorMode) {
this.errorMode = errorMode;
this.errors = new AtomicThrowable();
this.prefetch = prefetch;
}
@Override
public final void onSubscribe(Disposable d) {
if (DisposableHelper.validate(upstream, d)) {
upstream = d;
if (d instanceof QueueDisposable) {
@SuppressWarnings("unchecked")
QueueDisposable<T> qd = (QueueDisposable<T>)d;
int mode = qd.requestFusion(QueueFuseable.ANY | QueueFuseable.BOUNDARY);
if (mode == QueueFuseable.SYNC) {
queue = qd;
done = true;
onSubscribeDownstream();
drain();
return;
}
else if (mode == QueueFuseable.ASYNC) {
queue = qd;
onSubscribeDownstream();
return;
}
}
queue = new SpscLinkedArrayQueue<>(prefetch);
onSubscribeDownstream();
}
}
@Override
public final void onNext(T t) {
// In async fusion mode, t is a drain indicator
if (t != null) {
queue.offer(t);
}
drain();
}
@Override
public final void onError(Throwable t) {
if (errors.tryAddThrowableOrReport(t)) {
if (errorMode == ErrorMode.IMMEDIATE) {
disposeInner();
}
done = true;
drain();
}
}
@Override
public final void onComplete() {
done = true;
drain();
}
@Override
public final void dispose() {
disposed = true;
upstream.dispose();
disposeInner();
errors.tryTerminateAndReport();
if (getAndIncrement() == 0) {
queue.clear();
clearValue();
}
}
@Override
public final boolean isDisposed() {
return disposed;
}
/**
* Override this to clear values when the downstream disposes.
*/
void clearValue() {
}
/**
* Typically, this should be {@code downstream.onSubscribe(this)}.
*/
abstract void onSubscribeDownstream();
/**
* Typically, this should be {@code inner.dispose()}.
*/
abstract void disposeInner();
/**
* Implement the serialized inner subscribing and value emission here.
*/
abstract void drain();
}
| ConcatMapXMainObserver |
java | quarkusio__quarkus | extensions/qute/deployment/src/main/java/io/quarkus/qute/deployment/QuteProcessor.java | {
"start": 109037,
"end": 111680
} | class ____ of the same priority
for (Entry<DotName, Map<String, List<TemplateExtensionMethodBuildItem>>> classEntry : classToNamespaceExtensions
.entrySet()) {
Map<String, List<TemplateExtensionMethodBuildItem>> namespaceToMethods = classEntry.getValue();
for (Entry<String, List<TemplateExtensionMethodBuildItem>> nsEntry : namespaceToMethods.entrySet()) {
Map<Integer, List<TemplateExtensionMethodBuildItem>> priorityToMethods = nsEntry.getValue().stream()
.collect(Collectors.groupingBy(TemplateExtensionMethodBuildItem::getPriority));
for (Entry<Integer, List<TemplateExtensionMethodBuildItem>> priorityEntry : priorityToMethods.entrySet()) {
List<ExtensionMethodGenerator.NamespaceExtensionMethodInfo> extensionMethods = new ArrayList<>(
priorityEntry.getValue().size());
for (TemplateExtensionMethodBuildItem method : priorityEntry.getValue()) {
extensionMethods
.add(new NamespaceExtensionMethodInfo(method.getMethod(), method.getMatchName(),
Set.copyOf(method.getMatchNames()),
method.getMatchRegex(), method.getParams()));
}
String generatedType = extensionMethodGenerator.generateNamespaceResolver(
priorityEntry.getValue().get(0).getMethod().declaringClass(), nsEntry.getKey(),
priorityEntry.getKey(), extensionMethods);
for (TemplateExtensionMethodBuildItem extensionMethod : priorityEntry.getValue()) {
existingValueResolvers.add(extensionMethod.getMethod(), generatedType, applicationClassPredicate);
}
}
}
}
generatedValueResolvers.addAll(extensionMethodGenerator.getGeneratedTypes());
LOGGER.debugf("Generated %s value resolvers: %s", generatedValueResolvers.size(), generatedValueResolvers);
for (String generatedType : generatedValueResolvers) {
generatedResolvers.produce(new GeneratedValueResolverBuildItem(generatedType));
reflectiveClass.produce(ReflectiveClassBuildItem.builder(generatedType).build());
}
if (!templateGlobals.isEmpty()) {
Set<String> generatedGlobals = new HashSet<>();
// The classes for non-application globals are only generated during the first run because they can't be reloaded
// by the | and |
java | apache__flink | flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/operators/wmassigners/PunctuatedWatermarkAssignerWrapper.java | {
"start": 1576,
"end": 3059
} | class ____
implements WatermarkStrategyWithPunctuatedWatermarks<RowData> {
private static final long serialVersionUID = 1L;
private final PunctuatedWatermarkAssigner assigner;
private final int timeFieldIdx;
private final DataFormatConverters.DataFormatConverter<RowData, Row> converter;
/**
* @param timeFieldIdx the index of the rowtime attribute.
* @param assigner the watermark assigner.
* @param sourceType the type of source
*/
@SuppressWarnings("unchecked")
public PunctuatedWatermarkAssignerWrapper(
PunctuatedWatermarkAssigner assigner, int timeFieldIdx, DataType sourceType) {
this.assigner = assigner;
this.timeFieldIdx = timeFieldIdx;
DataType originDataType;
if (sourceType instanceof FieldsDataType) {
originDataType = sourceType;
} else {
originDataType = DataTypes.ROW(DataTypes.FIELD("f0", sourceType));
}
converter =
DataFormatConverters.getConverterForDataType(originDataType.bridgedTo(Row.class));
}
@Nullable
@Override
public Watermark checkAndGetNextWatermark(RowData row, long extractedTimestamp) {
long timestamp = row.getLong(timeFieldIdx);
return assigner.getWatermark(converter.toExternal(row), timestamp);
}
@Override
public long extractTimestamp(RowData element, long recordTimestamp) {
return 0;
}
}
| PunctuatedWatermarkAssignerWrapper |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/core/BridgeMethodResolverTests.java | {
"start": 18875,
"end": 19105
} | class ____ implements GenericParameter<String> {
@Override
public String getFor(Class<String> cls) {
return "foo";
}
public String getFor(Integer integer) {
return "foo";
}
}
private static | StringGenericParameter |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/introspect/BeanNamingTest.java | {
"start": 445,
"end": 550
} | class ____ {
public String getURL() {
return "http:";
}
}
static | URLBean |
java | spring-projects__spring-framework | spring-beans/src/test/java/org/springframework/beans/factory/aot/BeanDefinitionMethodGeneratorFactoryTests.java | {
"start": 1599,
"end": 11199
} | class ____ {
@Test
void createWhenBeanRegistrationExcludeFilterBeanIsNotAotProcessorThrowsException() {
BeanRegistrationExcludeFilter filter = registeredBean -> false;
DefaultListableBeanFactory beanFactory = new DefaultListableBeanFactory();
beanFactory.registerSingleton("filter", filter);
assertThatIllegalStateException()
.isThrownBy(() -> new BeanDefinitionMethodGeneratorFactory(beanFactory))
.withMessageContaining("also implement an AOT processor interface");
}
@Test
void createWhenBeanRegistrationExcludeFilterFactoryIsNotAotProcessorLoads() {
BeanRegistrationExcludeFilter filter = registeredBean -> false;
MockSpringFactoriesLoader loader = new MockSpringFactoriesLoader();
loader.addInstance(BeanRegistrationExcludeFilter.class, filter);
assertThatNoException().isThrownBy(() -> new BeanDefinitionMethodGeneratorFactory(
AotServices.factories(loader)));
}
@Test
void getBeanDefinitionMethodGeneratorWhenExcludedByBeanDefinitionAttributeReturnsNull() {
MockSpringFactoriesLoader springFactoriesLoader = new MockSpringFactoriesLoader();
DefaultListableBeanFactory beanFactory = new DefaultListableBeanFactory();
RegisteredBean registeredBean = registerTestBean(beanFactory);
registeredBean.getMergedBeanDefinition().setAttribute(
BeanRegistrationAotProcessor.IGNORE_REGISTRATION_ATTRIBUTE, true);
BeanDefinitionMethodGeneratorFactory methodGeneratorFactory = new BeanDefinitionMethodGeneratorFactory(
AotServices.factoriesAndBeans(springFactoriesLoader, beanFactory));
assertThat(methodGeneratorFactory.getBeanDefinitionMethodGenerator(registeredBean)).isNull();
}
@Test
void getBeanDefinitionMethodGeneratorWhenBeanDefinitionAttributeSetToFalseDoesNotFilterBean() {
MockSpringFactoriesLoader springFactoriesLoader = new MockSpringFactoriesLoader();
DefaultListableBeanFactory beanFactory = new DefaultListableBeanFactory();
RegisteredBean registeredBean = registerTestBean(beanFactory);
registeredBean.getMergedBeanDefinition().setAttribute(
BeanRegistrationAotProcessor.IGNORE_REGISTRATION_ATTRIBUTE, false);
BeanDefinitionMethodGeneratorFactory methodGeneratorFactory = new BeanDefinitionMethodGeneratorFactory(
AotServices.factoriesAndBeans(springFactoriesLoader, beanFactory));
assertThat(methodGeneratorFactory.getBeanDefinitionMethodGenerator(registeredBean)).isNotNull();
}
@Test
void getBeanDefinitionMethodGeneratorWhenBeanDefinitionAttributeIsNotSetDoesNotFilterBean() {
MockSpringFactoriesLoader springFactoriesLoader = new MockSpringFactoriesLoader();
DefaultListableBeanFactory beanFactory = new DefaultListableBeanFactory();
RegisteredBean registeredBean = registerTestBean(beanFactory);
BeanDefinitionMethodGeneratorFactory methodGeneratorFactory = new BeanDefinitionMethodGeneratorFactory(
AotServices.factoriesAndBeans(springFactoriesLoader, beanFactory));
assertThat(methodGeneratorFactory.getBeanDefinitionMethodGenerator(registeredBean)).isNotNull();
}
@Test
void getBeanDefinitionMethodGeneratorWhenExcludedByBeanRegistrationExcludeFilterReturnsNull() {
MockSpringFactoriesLoader springFactoriesLoader = new MockSpringFactoriesLoader();
DefaultListableBeanFactory beanFactory = new DefaultListableBeanFactory();
springFactoriesLoader.addInstance(BeanRegistrationExcludeFilter.class,
new MockBeanRegistrationExcludeFilter(true, 0));
RegisteredBean registeredBean = registerTestBean(beanFactory);
BeanDefinitionMethodGeneratorFactory methodGeneratorFactory = new BeanDefinitionMethodGeneratorFactory(
AotServices.factoriesAndBeans(springFactoriesLoader, beanFactory));
assertThat(methodGeneratorFactory.getBeanDefinitionMethodGenerator(registeredBean)).isNull();
}
@Test
void getBeanDefinitionMethodGeneratorWhenExcludedByBeanRegistrationExcludeFilterBeanReturnsNull() {
MockSpringFactoriesLoader springFactoriesLoader = new MockSpringFactoriesLoader();
DefaultListableBeanFactory beanFactory = new DefaultListableBeanFactory();
RegisteredBean registeredBean = registerTestBean(beanFactory);
beanFactory.registerSingleton("filter",
new MockBeanRegistrationExcludeFilter(true, 0));
BeanDefinitionMethodGeneratorFactory methodGeneratorFactory = new BeanDefinitionMethodGeneratorFactory(
AotServices.factoriesAndBeans(springFactoriesLoader, beanFactory));
assertThat(methodGeneratorFactory.getBeanDefinitionMethodGenerator(registeredBean)).isNull();
}
@Test
void getBeanDefinitionMethodGeneratorConsidersFactoryLoadedExcludeFiltersAndBeansInOrderedOrder() {
MockBeanRegistrationExcludeFilter filter1 = new MockBeanRegistrationExcludeFilter(false, 1);
MockBeanRegistrationExcludeFilter filter2 = new MockBeanRegistrationExcludeFilter(false, 2);
MockBeanRegistrationExcludeFilter filter3 = new MockBeanRegistrationExcludeFilter(false, 3);
MockBeanRegistrationExcludeFilter filter4 = new MockBeanRegistrationExcludeFilter(true, 4);
MockBeanRegistrationExcludeFilter filter5 = new MockBeanRegistrationExcludeFilter(true, 5);
MockBeanRegistrationExcludeFilter filter6 = new MockBeanRegistrationExcludeFilter(true, 6);
MockSpringFactoriesLoader springFactoriesLoader = new MockSpringFactoriesLoader();
springFactoriesLoader.addInstance(BeanRegistrationExcludeFilter.class, filter3, filter1, filter5);
DefaultListableBeanFactory beanFactory = new DefaultListableBeanFactory();
beanFactory.registerSingleton("filter4", filter4);
beanFactory.registerSingleton("filter2", filter2);
beanFactory.registerSingleton("filter6", filter6);
RegisteredBean registeredBean = registerTestBean(beanFactory);
BeanDefinitionMethodGeneratorFactory methodGeneratorFactory = new BeanDefinitionMethodGeneratorFactory(
AotServices.factoriesAndBeans(springFactoriesLoader, beanFactory));
assertThat(methodGeneratorFactory.getBeanDefinitionMethodGenerator(registeredBean)).isNull();
assertThat(filter1.wasCalled()).isTrue();
assertThat(filter2.wasCalled()).isTrue();
assertThat(filter3.wasCalled()).isTrue();
assertThat(filter4.wasCalled()).isTrue();
assertThat(filter5.wasCalled()).isFalse();
assertThat(filter6.wasCalled()).isFalse();
}
@Test
void getBeanDefinitionMethodGeneratorAddsContributionsFromProcessors() {
DefaultListableBeanFactory beanFactory = new DefaultListableBeanFactory();
BeanRegistrationAotContribution beanContribution = mock();
BeanRegistrationAotProcessor processorBean = registeredBean -> beanContribution;
beanFactory.registerSingleton("processorBean", processorBean);
MockSpringFactoriesLoader springFactoriesLoader = new MockSpringFactoriesLoader();
BeanRegistrationAotContribution loaderContribution = mock();
BeanRegistrationAotProcessor loaderProcessor = registeredBean -> loaderContribution;
springFactoriesLoader.addInstance(BeanRegistrationAotProcessor.class,
loaderProcessor);
RegisteredBean registeredBean = registerTestBean(beanFactory);
BeanDefinitionMethodGeneratorFactory methodGeneratorFactory = new BeanDefinitionMethodGeneratorFactory(
AotServices.factoriesAndBeans(springFactoriesLoader, beanFactory));
BeanDefinitionMethodGenerator methodGenerator = methodGeneratorFactory
.getBeanDefinitionMethodGenerator(registeredBean);
assertThat(methodGenerator).extracting("aotContributions").asInstanceOf(LIST)
.containsExactly(beanContribution, loaderContribution);
}
@Test
void getBeanDefinitionMethodGeneratorWhenRegisteredBeanIsAotProcessorFiltersBean() {
MockSpringFactoriesLoader springFactoriesLoader = new MockSpringFactoriesLoader();
DefaultListableBeanFactory beanFactory = new DefaultListableBeanFactory();
beanFactory.registerBeanDefinition("test1", BeanDefinitionBuilder
.rootBeanDefinition(TestBeanFactoryInitializationAotProcessorBean.class).getBeanDefinition());
RegisteredBean registeredBean1 = RegisteredBean.of(beanFactory, "test1");
beanFactory.registerBeanDefinition("test2", BeanDefinitionBuilder
.rootBeanDefinition(TestBeanRegistrationAotProcessorBean.class).getBeanDefinition());
RegisteredBean registeredBean2 = RegisteredBean.of(beanFactory, "test2");
BeanDefinitionMethodGeneratorFactory methodGeneratorFactory = new BeanDefinitionMethodGeneratorFactory(
AotServices.factoriesAndBeans(springFactoriesLoader, beanFactory));
assertThat(methodGeneratorFactory.getBeanDefinitionMethodGenerator(registeredBean1)).isNull();
assertThat(methodGeneratorFactory.getBeanDefinitionMethodGenerator(registeredBean2)).isNull();
}
@Test
void getBeanDefinitionMethodGeneratorWhenRegisteredBeanIsAotProcessorAndIsNotExcludedAndBeanRegistrationExcludeFilterDoesNotFilterBean() {
MockSpringFactoriesLoader springFactoriesLoader = new MockSpringFactoriesLoader();
DefaultListableBeanFactory beanFactory = new DefaultListableBeanFactory();
beanFactory.registerBeanDefinition("test", BeanDefinitionBuilder
.rootBeanDefinition(TestBeanRegistrationAotProcessorAndNotExcluded.class).getBeanDefinition());
RegisteredBean registeredBean1 = RegisteredBean.of(beanFactory, "test");
BeanDefinitionMethodGeneratorFactory methodGeneratorFactory = new BeanDefinitionMethodGeneratorFactory(
AotServices.factoriesAndBeans(springFactoriesLoader, beanFactory));
assertThat(methodGeneratorFactory.getBeanDefinitionMethodGenerator(registeredBean1)).isNotNull();
}
private RegisteredBean registerTestBean(DefaultListableBeanFactory beanFactory) {
beanFactory.registerBeanDefinition("test", BeanDefinitionBuilder
.rootBeanDefinition(TestBean.class).getBeanDefinition());
return RegisteredBean.of(beanFactory, "test");
}
static | BeanDefinitionMethodGeneratorFactoryTests |
java | apache__camel | core/camel-core-processor/src/main/java/org/apache/camel/processor/PausableProcessor.java | {
"start": 1306,
"end": 2699
} | class ____ extends BaseProcessorSupport
implements Navigate<Processor>, CamelContextAware, IdAware, RouteIdAware {
private final AsyncProcessor processor;
private CamelContext camelContext;
private String id;
private String routeId;
public PausableProcessor(Processor processor) {
this.processor = AsyncProcessorConverterHelper.convert(processor);
}
@Override
public boolean process(Exchange exchange, AsyncCallback callback) {
return processor.process(exchange, callback);
}
@Override
public CamelContext getCamelContext() {
return camelContext;
}
@Override
public void setCamelContext(CamelContext camelContext) {
this.camelContext = camelContext;
}
@Override
public List<Processor> next() {
if (!hasNext()) {
return null;
}
List<Processor> answer = new ArrayList<>(1);
answer.add(processor);
return answer;
}
@Override
public boolean hasNext() {
return false;
}
@Override
public String getId() {
return id;
}
@Override
public void setId(String id) {
this.id = id;
}
@Override
public String getRouteId() {
return routeId;
}
@Override
public void setRouteId(String routeId) {
this.routeId = routeId;
}
}
| PausableProcessor |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/engine/SearchBasedChangesSnapshot.java | {
"start": 2002,
"end": 12066
} | class ____ implements Translog.Snapshot, Closeable {
public static final int DEFAULT_BATCH_SIZE = 1024;
private final IndexSettings indexSettings;
private final IndexSearcher indexSearcher;
private final ValueFetcher sourceMetadataFetcher;
private final Closeable onClose;
protected final long fromSeqNo, toSeqNo;
protected final boolean requiredFullRange;
protected final int searchBatchSize;
private final boolean accessStats;
private final int totalHits;
private FieldDoc afterDoc;
private long lastSeenSeqNo;
/**
* Constructs a new snapshot for fetching changes within a sequence number range.
*
* @param engineSearcher Engine searcher instance.
* @param searchBatchSize Number of documents to retrieve per batch.
* @param fromSeqNo Starting sequence number.
* @param toSeqNo Ending sequence number.
* @param requiredFullRange Whether the full range is required.
* @param accessStats If true, enable access statistics for counting total operations.
* @param indexVersionCreated Version of the index when it was created.
*/
protected SearchBasedChangesSnapshot(
MapperService mapperService,
Engine.Searcher engineSearcher,
int searchBatchSize,
long fromSeqNo,
long toSeqNo,
boolean requiredFullRange,
boolean accessStats,
IndexVersion indexVersionCreated
) throws IOException {
if (fromSeqNo < 0 || toSeqNo < 0 || fromSeqNo > toSeqNo) {
throw new IllegalArgumentException("Invalid range; from_seqno [" + fromSeqNo + "], to_seqno [" + toSeqNo + "]");
}
if (searchBatchSize <= 0) {
throw new IllegalArgumentException("Search_batch_size must be positive [" + searchBatchSize + "]");
}
final AtomicBoolean closed = new AtomicBoolean();
this.onClose = () -> {
if (closed.compareAndSet(false, true)) {
IOUtils.close(engineSearcher);
}
};
this.indexSettings = mapperService.getIndexSettings();
this.fromSeqNo = fromSeqNo;
this.toSeqNo = toSeqNo;
this.lastSeenSeqNo = fromSeqNo - 1;
this.requiredFullRange = requiredFullRange;
this.indexSearcher = newIndexSearcher(engineSearcher);
this.indexSearcher.setQueryCache(null);
long requestingSize = (toSeqNo - fromSeqNo == Long.MAX_VALUE) ? Long.MAX_VALUE : (toSeqNo - fromSeqNo + 1L);
this.searchBatchSize = (int) Math.min(requestingSize, searchBatchSize);
this.accessStats = accessStats;
this.totalHits = accessStats ? indexSearcher.count(rangeQuery(indexSettings, fromSeqNo, toSeqNo)) : -1;
this.sourceMetadataFetcher = createSourceMetadataValueFetcher(mapperService, indexSearcher);
}
private ValueFetcher createSourceMetadataValueFetcher(MapperService mapperService, IndexSearcher searcher) {
if (mapperService.mappingLookup().inferenceFields().isEmpty()) {
return null;
}
var mapper = (InferenceMetadataFieldsMapper) mapperService.mappingLookup()
.getMapping()
.getMetadataMapperByName(InferenceMetadataFieldsMapper.NAME);
return mapper != null
? mapper.fieldType().valueFetcher(mapperService.mappingLookup(), mapperService.getBitSetProducer(), searcher)
: null;
}
/**
* Abstract method for retrieving the next operation. Should be implemented by subclasses.
*
* @return The next Translog.Operation in the snapshot.
* @throws IOException If an I/O error occurs.
*/
protected abstract Translog.Operation nextOperation() throws IOException;
/**
* Returns the list of index leaf reader contexts.
*
* @return List of LeafReaderContext.
*/
public List<LeafReaderContext> leaves() {
return indexSearcher.getIndexReader().leaves();
}
@Override
public int totalOperations() {
if (accessStats == false) {
throw new IllegalStateException("Access stats of a snapshot created with [access_stats] is false");
}
return totalHits;
}
@Override
public final Translog.Operation next() throws IOException {
Translog.Operation op = nextOperation();
if (requiredFullRange) {
verifyRange(op);
}
if (op != null) {
assert fromSeqNo <= op.seqNo() && op.seqNo() <= toSeqNo && lastSeenSeqNo < op.seqNo()
: "Unexpected operation; last_seen_seqno ["
+ lastSeenSeqNo
+ "], from_seqno ["
+ fromSeqNo
+ "], to_seqno ["
+ toSeqNo
+ "], op ["
+ op
+ "]";
lastSeenSeqNo = op.seqNo();
}
return op;
}
@Override
public void close() throws IOException {
onClose.close();
}
/**
* Retrieves the next batch of top documents based on the sequence range.
*
* @return TopDocs instance containing the documents in the current batch.
*/
protected TopDocs nextTopDocs() throws IOException {
Query rangeQuery = rangeQuery(indexSettings, Math.max(fromSeqNo, lastSeenSeqNo), toSeqNo);
SortField sortBySeqNo = new SortField(SeqNoFieldMapper.NAME, SortField.Type.LONG);
TopFieldCollectorManager collectorManager = new TopFieldCollectorManager(new Sort(sortBySeqNo), searchBatchSize, afterDoc, 0);
TopDocs results = indexSearcher.search(rangeQuery, collectorManager);
if (results.scoreDocs.length > 0) {
afterDoc = (FieldDoc) results.scoreDocs[results.scoreDocs.length - 1];
}
for (int i = 0; i < results.scoreDocs.length; i++) {
results.scoreDocs[i].shardIndex = i;
}
return results;
}
/**
* Sets the reader context to enable reading synthetic fields that were removed from the {@code _source}.
* This method sets up the {@code sourceMetadataFetcher} with the provided {@link LeafReaderContext},
* ensuring it is ready to fetch metadata for subsequent operations.
*
* <p>Note: This method should be called before {@link #addSyntheticFields(Source, int)} at the start of every leaf
* to ensure the metadata fetcher is properly initialized.</p>
*/
protected void setNextSyntheticFieldsReader(LeafReaderContext context) throws IOException {
if (sourceMetadataFetcher != null) {
sourceMetadataFetcher.setNextReader(context);
}
}
/**
* Creates a new {@link Source} object by combining the provided {@code originalSource}
* with additional synthetic fields. If the {@code sourceMetadataFetcher} is null or no metadata
* fields are fetched, the original source is returned unchanged.
*
* @param originalSource the original source
* @param segmentDocID the document ID used to fetch metadata fields
* @return a new {@link Source} instance containing the original data and additional metadata,
* or the original source if no metadata is added
* @throws IOException if an error occurs while fetching synthetic values
*/
protected Source addSyntheticFields(Source originalSource, int segmentDocID) throws IOException {
if (sourceMetadataFetcher == null) {
return originalSource;
}
List<Object> values = sourceMetadataFetcher.fetchValues(originalSource, segmentDocID, List.of());
if (values.isEmpty()) {
return originalSource;
}
originalSource.source().put(InferenceMetadataFieldsMapper.NAME, values.get(0));
return Source.fromMap(originalSource.source(), originalSource.sourceContentType());
}
static IndexSearcher newIndexSearcher(Engine.Searcher engineSearcher) throws IOException {
return new IndexSearcher(Lucene.wrapAllDocsLive(engineSearcher.getDirectoryReader()));
}
static Query rangeQuery(IndexSettings indexSettings, long fromSeqNo, long toSeqNo) {
Query seqNoQuery = SeqNoFieldMapper.rangeQueryForSeqNo(indexSettings.seqNoIndexOptions(), fromSeqNo, toSeqNo);
return new BooleanQuery.Builder().add(seqNoQuery, BooleanClause.Occur.MUST)
.add(Queries.newNonNestedFilter(indexSettings.getIndexVersionCreated()), BooleanClause.Occur.MUST)
.build();
}
private void verifyRange(Translog.Operation op) {
if (op == null && lastSeenSeqNo < toSeqNo) {
throw new MissingHistoryOperationsException(
"Not all operations between from_seqno ["
+ fromSeqNo
+ "] "
+ "and to_seqno ["
+ toSeqNo
+ "] found; prematurely terminated last_seen_seqno ["
+ lastSeenSeqNo
+ "]"
);
} else if (op != null && op.seqNo() != lastSeenSeqNo + 1) {
throw new MissingHistoryOperationsException(
"Not all operations between from_seqno ["
+ fromSeqNo
+ "] "
+ "and to_seqno ["
+ toSeqNo
+ "] found; expected seqno ["
+ lastSeenSeqNo
+ 1
+ "]; found ["
+ op
+ "]"
);
}
}
protected static boolean assertDocSoftDeleted(LeafReader leafReader, int segmentDocId) throws IOException {
NumericDocValues docValues = leafReader.getNumericDocValues(Lucene.SOFT_DELETES_FIELD);
if (docValues == null || docValues.advanceExact(segmentDocId) == false) {
throw new IllegalStateException("DocValues for field [" + Lucene.SOFT_DELETES_FIELD + "] is not found");
}
return docValues.longValue() == 1;
}
}
| SearchBasedChangesSnapshot |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/onetoone/OneToOneInverseInEmbeddableTest.java | {
"start": 2140,
"end": 2737
} | class ____ {
private Integer id;
private String name;
private Parent parent;
Child() {
}
Child(Integer id, Parent parent) {
this.id = id;
this.parent = parent;
this.parent.getEmbeddable().setChild( this );
}
@Id
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
@OneToOne
public Parent getParent() {
return parent;
}
public void setParent(Parent parent) {
this.parent = parent;
}
}
}
| Child |
java | spring-projects__spring-data-jpa | spring-data-jpa/src/test/java/org/springframework/data/jpa/repository/custom/CustomGenericJpaRepository.java | {
"start": 943,
"end": 1067
} | class ____ common custom functionality for all derived repository instances.
*
* @author Oliver Gierke
*/
public | implementing |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/component/file/FileConsumerResumeStrategyTest.java | {
"start": 1597,
"end": 1683
} | class ____ extends ContextTestSupport {
private static | FileConsumerResumeStrategyTest |
java | spring-projects__spring-boot | smoke-test/spring-boot-smoke-test-webservices/src/main/java/smoketest/webservices/endpoint/HolidayEndpoint.java | {
"start": 1148,
"end": 2580
} | class ____ {
private static final String NAMESPACE_URI = "https://company.example.com/hr/schemas";
private final XPathExpression<Element> startDateExpression;
private final XPathExpression<Element> endDateExpression;
private final XPathExpression<String> nameExpression;
private final HumanResourceService humanResourceService;
public HolidayEndpoint(HumanResourceService humanResourceService) {
this.humanResourceService = humanResourceService;
Namespace namespace = Namespace.getNamespace("hr", NAMESPACE_URI);
XPathFactory xPathFactory = XPathFactory.instance();
this.startDateExpression = xPathFactory.compile("//hr:StartDate", Filters.element(), null, namespace);
this.endDateExpression = xPathFactory.compile("//hr:EndDate", Filters.element(), null, namespace);
this.nameExpression = xPathFactory.compile("concat(//hr:FirstName,' ',//hr:LastName)", Filters.fstring(), null,
namespace);
}
@PayloadRoot(namespace = NAMESPACE_URI, localPart = "HolidayRequest")
public void handleHolidayRequest(@RequestPayload Element holidayRequest) {
LocalDate startDate = LocalDate.parse(this.startDateExpression.evaluateFirst(holidayRequest).getText());
LocalDate endDate = LocalDate.parse(this.endDateExpression.evaluateFirst(holidayRequest).getText());
String name = this.nameExpression.evaluateFirst(holidayRequest);
this.humanResourceService.bookHoliday(startDate, endDate, name);
}
}
| HolidayEndpoint |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/ide/EffectiveIdeBuildItem.java | {
"start": 147,
"end": 178
} | class ____ made
*/
public final | is |
java | processing__processing4 | core/src/processing/core/PVector.java | {
"start": 24060,
"end": 28305
} | class ____. (See the
* middle example above.) The non-static versions, <b>lerp(v, amt)</b> and
* <b>lerp(x, y, z, amt)</b>, do not create a new PVector, but transform the
* values of the <b>PVector</b> on which they are called. These non-static versions
* perform the same operation, but the former takes another vector as input,
* while the latter takes three float values. (See the top and bottom examples
* above, respectively.)
*
*
* @webref pvector:method
* @usage web_application
* @webBrief Linear interpolate the vector to another vector
* @param v the vector to lerp to
* @param amt The amount of interpolation; some value between 0.0 (old vector)
* and 1.0 (new vector). 0.1 is very near the old vector; 0.5 is
* halfway in between.
* @see PApplet#lerp(float, float, float)
*/
public PVector lerp(PVector v, float amt) {
x = PApplet.lerp(x, v.x, amt);
y = PApplet.lerp(y, v.y, amt);
z = PApplet.lerp(z, v.z, amt);
return this;
}
/**
* Linear interpolate between two vectors (returns a new PVector object)
* @param v1 the vector to start from
* @param v2 the vector to lerp to
*/
public static PVector lerp(PVector v1, PVector v2, float amt) {
PVector v = v1.copy();
v.lerp(v2, amt);
return v;
}
/**
* Linear interpolate the vector to x,y,z values
* @param x the x component to lerp to
* @param y the y component to lerp to
* @param z the z component to lerp to
*/
public PVector lerp(float x, float y, float z, float amt) {
this.x = PApplet.lerp(this.x, x, amt);
this.y = PApplet.lerp(this.y, y, amt);
this.z = PApplet.lerp(this.z, z, amt);
return this;
}
/**
*
* Calculates and returns the angle (in radians) between two vectors.
*
*
* @webref pvector:method
* @usage web_application
* @param v1 the x, y, and z components of a PVector
* @param v2 the x, y, and z components of a PVector
* @webBrief Calculate and return the angle between two vectors
*/
static public float angleBetween(PVector v1, PVector v2) {
// We get NaN if we pass in a zero vector which can cause problems
// Zero seems like a reasonable angle between a (0,0,0) vector and something else
if (v1.x == 0 && v1.y == 0 && v1.z == 0 ) return 0.0f;
if (v2.x == 0 && v2.y == 0 && v2.z == 0 ) return 0.0f;
double dot = v1.x * v2.x + v1.y * v2.y + v1.z * v2.z;
double v1mag = Math.sqrt(v1.x * v1.x + v1.y * v1.y + v1.z * v1.z);
double v2mag = Math.sqrt(v2.x * v2.x + v2.y * v2.y + v2.z * v2.z);
// This should be a number between -1 and 1, since it's "normalized"
double amt = dot / (v1mag * v2mag);
// But if it's not due to rounding error, then we need to fix it
// https://github.com/processing/processing/issues/379
// Otherwise if outside the range, acos() will return NaN
// http://www.cppreference.com/wiki/c/math/acos
if (amt <= -1) {
return PConstants.PI;
} else if (amt >= 1) {
// https://github.com/processing/processing/issues/474
return 0;
}
return (float) Math.acos(amt);
}
@Override
public String toString() {
return "[ " + x + ", " + y + ", " + z + " ]";
}
/**
*
* Return a representation of this vector as a float array. This is only for
* temporary use. If used in any other fashion, the contents should be copied by
* using the <b>copy()</b> method to copy into your own array.
*
*
* @webref pvector:method
* @usage: web_application
* @webBrief Return a representation of the vector as a float array
*/
public float[] array() {
if (array == null) {
array = new float[3];
}
array[0] = x;
array[1] = y;
array[2] = z;
return array;
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof PVector)) {
return false;
}
final PVector p = (PVector) obj;
return x == p.x && y == p.y && z == p.z;
}
@Override
public int hashCode() {
int result = 1;
result = 31 * result + Float.floatToIntBits(x);
result = 31 * result + Float.floatToIntBits(y);
result = 31 * result + Float.floatToIntBits(z);
return result;
}
}
| directly |
java | apache__kafka | streams/src/main/java/org/apache/kafka/streams/processor/internals/assignment/LegacyStickyTaskAssignor.java | {
"start": 14254,
"end": 15324
} | class ____ {
private final Set<Pair> pairs;
private final int maxPairs;
TaskPairs(final int maxPairs) {
this.maxPairs = maxPairs;
this.pairs = new HashSet<>(maxPairs);
}
boolean hasNewPair(final TaskId task1,
final Set<TaskId> taskIds) {
if (pairs.size() == maxPairs) {
return false;
}
for (final TaskId taskId : taskIds) {
if (!pairs.contains(pair(task1, taskId))) {
return true;
}
}
return false;
}
void addPairs(final TaskId taskId, final Set<TaskId> assigned) {
for (final TaskId id : assigned) {
pairs.add(pair(id, taskId));
}
}
Pair pair(final TaskId task1, final TaskId task2) {
if (task1.compareTo(task2) < 0) {
return new Pair(task1, task2);
}
return new Pair(task2, task1);
}
private static | TaskPairs |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/AnnotationMirrorToStringTest.java | {
"start": 1613,
"end": 1890
} | class ____ {
String f(AnnotationMirror av) {
return AnnotationMirrors.toString(av);
}
}
""")
.allowBreakingChanges() // TODO(cushon): remove after the next auto-common release
.doTest();
}
}
| Test |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/JsonataComponentBuilderFactory.java | {
"start": 1855,
"end": 5894
} | interface ____ extends ComponentBuilder<JsonataComponent> {
/**
* Whether to allow to use resource template from header or not (default
* false). Enabling this allows to specify dynamic templates via message
* header. However this can be seen as a potential security
* vulnerability if the header is coming from a malicious user, so use
* this with care.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param allowTemplateFromHeader the value to set
* @return the dsl builder
*/
default JsonataComponentBuilder allowTemplateFromHeader(boolean allowTemplateFromHeader) {
doSetProperty("allowTemplateFromHeader", allowTemplateFromHeader);
return this;
}
/**
* Sets whether to use resource content cache or not.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: producer
*
* @param contentCache the value to set
* @return the dsl builder
*/
default JsonataComponentBuilder contentCache(boolean contentCache) {
doSetProperty("contentCache", contentCache);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default JsonataComponentBuilder lazyStartProducer(boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether autowiring is enabled. This is used for automatic autowiring
* options (the option must be marked as autowired) by looking up in the
* registry to find if there is a single instance of matching type,
* which then gets configured on the component. This can be used for
* automatic configuring JDBC data sources, JMS connection factories,
* AWS Clients, etc.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: advanced
*
* @param autowiredEnabled the value to set
* @return the dsl builder
*/
default JsonataComponentBuilder autowiredEnabled(boolean autowiredEnabled) {
doSetProperty("autowiredEnabled", autowiredEnabled);
return this;
}
/**
* To configure custom frame bindings and inject user functions.
*
* The option is a:
* <code>org.apache.camel.component.jsonata.JsonataFrameBinding</code> type.
*
* Group: advanced
*
* @param frameBinding the value to set
* @return the dsl builder
*/
default JsonataComponentBuilder frameBinding(org.apache.camel.component.jsonata.JsonataFrameBinding frameBinding) {
doSetProperty("frameBinding", frameBinding);
return this;
}
}
| JsonataComponentBuilder |
java | netty__netty | codec-mqtt/src/main/java/io/netty/handler/codec/mqtt/MqttReasonCodes.java | {
"start": 6251,
"end": 6757
} | enum ____ only correct values
VALUES[unsignedByte] = code; // [java/index-out-of-bounds]
}
}
private final byte byteValue;
PubAck(byte byteValue) {
this.byteValue = byteValue;
}
/**
* @return the value number corresponding to the constant.
* */
public byte byteValue() {
return byteValue;
}
/**
* @param b the number to decode.
* @return the | contains |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/generics/GenericMappedSuperclassPropertyUpdateTest.java | {
"start": 3906,
"end": 4593
} | class ____<E extends CommonEntity<?>> {
@Id
@GeneratedValue
private Long id;
Long getId() {
return id;
}
@ManyToOne
@JoinColumn
private E relative;
void setRelative(E relative) {
this.relative = relative;
}
E getRelative() {
return relative;
}
@Override
public boolean equals(Object o) {
if ( o == null || getClass() != o.getClass() ) {
return false;
}
CommonEntity<?> that = (CommonEntity<?>) o;
return Objects.equals( id, that.id ) && Objects.equals( relative, that.relative );
}
@Override
public int hashCode() {
return Objects.hash( id, relative );
}
}
@Entity(name = "SpecificEntity")
public static | CommonEntity |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/TransportGetCertificateInfoAction.java | {
"start": 897,
"end": 2000
} | class ____ extends HandledTransportAction<
GetCertificateInfoAction.Request,
GetCertificateInfoAction.Response> {
private final SSLService sslService;
@Inject
public TransportGetCertificateInfoAction(TransportService transportService, ActionFilters actionFilters, SSLService sslService) {
super(
GetCertificateInfoAction.NAME,
transportService,
actionFilters,
GetCertificateInfoAction.Request::new,
EsExecutors.DIRECT_EXECUTOR_SERVICE
);
this.sslService = sslService;
}
@Override
protected void doExecute(
Task task,
GetCertificateInfoAction.Request request,
ActionListener<GetCertificateInfoAction.Response> listener
) {
try {
Collection<CertificateInfo> certificates = sslService.getLoadedCertificates();
listener.onResponse(new GetCertificateInfoAction.Response(certificates));
} catch (GeneralSecurityException | IOException e) {
listener.onFailure(e);
}
}
}
| TransportGetCertificateInfoAction |
java | playframework__playframework | core/play/src/main/java/play/mvc/BodyParser.java | {
"start": 18972,
"end": 20807
} | class ____ extends MaxLengthBodyParser<play.libs.Files.TemporaryFile> {
private final play.libs.Files.TemporaryFileCreator temporaryFileCreator;
private final Materializer materializer;
public TemporaryFile(
long maxLength,
play.libs.Files.TemporaryFileCreator temporaryFileCreator,
HttpErrorHandler errorHandler,
Materializer materializer) {
super(maxLength, errorHandler);
this.temporaryFileCreator = temporaryFileCreator;
this.materializer = materializer;
}
@Inject
public TemporaryFile(
HttpConfiguration httpConfiguration,
play.libs.Files.TemporaryFileCreator temporaryFileCreator,
HttpErrorHandler errorHandler,
Materializer materializer) {
this(
httpConfiguration.parser().maxDiskBuffer(),
temporaryFileCreator,
errorHandler,
materializer);
}
@Override
protected Accumulator<ByteString, F.Either<Result, play.libs.Files.TemporaryFile>> apply1(
Http.RequestHeader request) {
if (BodyParserUtils.contentLengthHeaderExceedsMaxLength(request.asScala(), super.maxLength)) {
// We check early here already to not even create a temporary file
return Accumulator.done(requestEntityTooLarge(request));
} else {
play.libs.Files.TemporaryFile tempFile =
temporaryFileCreator.create("requestBody", "asTemporaryFile");
return Accumulator.fromSink(
StreamConverters.fromOutputStream(
() -> java.nio.file.Files.newOutputStream(tempFile.path())))
.map(ioResult -> F.Either.Right(tempFile), materializer.executionContext());
}
}
}
/**
* Parse the body as form url encoded if the Content-Type is application/x-www-form-urlencoded.
*/
| TemporaryFile |
java | junit-team__junit5 | junit-platform-commons/src/main/java/org/junit/platform/commons/function/Try.java | {
"start": 8510,
"end": 8649
} | interface ____ similar to {@link Function},
* except that a {@code Transformer} may throw an exception.
*/
@FunctionalInterface
public | is |
java | elastic__elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/GroupConfigSerializingTests.java | {
"start": 598,
"end": 1264
} | class ____ extends AbstractXContentSerializingTestCase<GroupConfig> {
@Override
protected GroupConfig doParseInstance(final XContentParser parser) throws IOException {
return GroupConfig.fromXContent(parser);
}
@Override
protected Writeable.Reader<GroupConfig> instanceReader() {
return GroupConfig::new;
}
@Override
protected GroupConfig createTestInstance() {
return randomGroupConfig(random());
}
@Override
protected GroupConfig mutateInstance(GroupConfig instance) {
return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929
}
}
| GroupConfigSerializingTests |
java | quarkusio__quarkus | integration-tests/rest-client-reactive/src/main/java/io/quarkus/it/rest/client/main/ClientCallingResource.java | {
"start": 1465,
"end": 15003
} | class ____ {
private static final ObjectMapper mapper = new JsonMapper();
private static final String[] RESPONSES = { "cortland", "lobo", "golden delicious" };
private final AtomicInteger count = new AtomicInteger(0);
@RestClient
ClientWithClientLogger clientWithClientLogger;
@RestClient
ClientWithExceptionMapper clientWithExceptionMapper;
@RestClient
FaultToleranceClient faultToleranceClient;
@RestClient
FaultToleranceOnInterfaceClient faultToleranceOnInterfaceClient;
@RestClient
ExternalSelfSignedClient externalSelfSignedClient;
@RestClient
WrongHostClient wrongHostClient;
@RestClient
WrongHostRejectedClient wrongHostRejectedClient;
@Inject
InMemorySpanExporter inMemorySpanExporter;
void init(@Observes Router router) {
router.post().handler(BodyHandler.create());
router.get("/unprocessable").handler(rc -> rc.response().setStatusCode(422).end("the entity was unprocessable"));
router.get("/client-logger").handler(rc -> {
rc.response().end("Hello World!");
});
router.get("/correlation").handler(rc -> {
rc.response().end(rc.request().getHeader(CorrelationIdClient.CORRELATION_ID_HEADER_NAME));
});
router.post("/call-client-with-global-client-logger").blockingHandler(rc -> {
String url = rc.body().asString();
ClientWithClientLogger client = QuarkusRestClientBuilder.newBuilder().baseUri(URI.create(url))
.build(ClientWithClientLogger.class);
Arc.container().instance(MyClientLogger.class).get().reset();
client.call();
if (Arc.container().instance(MyClientLogger.class).get().wasUsed()) {
success(rc, "global client logger was used");
} else {
fail(rc, "global client logger was not used");
}
});
router.post("/call-client-with-explicit-client-logger").blockingHandler(rc -> {
String url = rc.body().asString();
MyClientLogger explicitClientLogger = new MyClientLogger();
ClientWithClientLogger client = QuarkusRestClientBuilder.newBuilder().baseUri(URI.create(url))
.clientLogger(explicitClientLogger)
.build(ClientWithClientLogger.class);
client.call();
if (explicitClientLogger.wasUsed()) {
success(rc, "explicit client logger was used");
} else {
fail(rc, "explicit client logger was not used");
}
});
router.post("/call-cdi-client-with-global-client-logger").blockingHandler(rc -> {
Arc.container().instance(MyClientLogger.class).get().reset();
clientWithClientLogger.call();
if (Arc.container().instance(MyClientLogger.class).get().wasUsed()) {
success(rc, "global client logger was used");
} else {
fail(rc, "global client logger was not used");
}
});
router.post("/call-client-with-exception-mapper").blockingHandler(rc -> {
String url = rc.body().asString();
ClientWithExceptionMapper client = QuarkusRestClientBuilder.newBuilder().baseUri(URI.create(url))
.register(MyResponseExceptionMapper.class)
.build(ClientWithExceptionMapper.class);
callGet(rc, client);
});
router.post("/call-cdi-client-with-exception-mapper").blockingHandler(rc -> callGet(rc, clientWithExceptionMapper));
router.post("/apples").handler(rc -> {
int count = this.count.getAndIncrement();
rc.response().putHeader("content-type", "application/json")
.end(String.format("{\"cultivar\": \"%s\"}", RESPONSES[count % RESPONSES.length]));
});
router.route("/call-client").blockingHandler(rc -> {
String url = rc.body().asString();
AppleClient client = QuarkusRestClientBuilder.newBuilder().baseUri(URI.create(url))
.build(AppleClient.class);
Uni<Apple> apple1 = Uni.createFrom().item(client.swapApple(new Apple("lobo")));
Uni<Apple> apple2 = Uni.createFrom().completionStage(client.completionSwapApple(new Apple("lobo2")));
Uni<Apple> apple3 = client.uniSwapApple(new Apple("lobo3"));
Uni<Apple> apple4 = Uni.createFrom().item(client.someApple());
Uni<Apple> apple5 = Uni.createFrom().completionStage(client.completionSomeApple());
Uni<Apple> apple6 = client.uniSomeApple();
Uni<Apple> apple7 = Uni.createFrom().item(client.stringApple()).onItem().transform(this::toApple);
Uni<Apple> apple8 = Uni.createFrom().completionStage(client.completionStringApple()).onItem()
.transform(this::toApple);
Uni<Apple> apple9 = client.uniStringApple().onItem().transform(this::toApple);
Uni<Apple> apple10 = Uni.createFrom().item(client.restResponseApple().getEntity());
Uni<Apple> apple11 = client.uniRestResponseApple().onItem().transform(RestResponse::getEntity);
Uni.combine().all().unis(apple1, apple2, apple3, apple4, apple5, apple6, apple7, apple8, apple9, apple10, apple11)
.combinedWith(Function.identity())
.subscribe()
.with(list -> {
try {
rc.response().putHeader("content-type", "application/json")
.end(mapper.writeValueAsString(list));
} catch (JsonProcessingException e) {
fail(rc, e.getMessage());
}
}, t -> fail(rc, t.getMessage()));
});
router.route("/call-client-retry").blockingHandler(rc -> {
String url = rc.body().asString();
AppleClient client = QuarkusRestClientBuilder.newBuilder().baseUri(URI.create(url + "/does-not-exist"))
.build(AppleClient.class);
AtomicInteger count = new AtomicInteger(0);
client.uniSwapApple(new Apple("lobo")).onFailure().retry().until(t -> count.incrementAndGet() <= 3)
.subscribe()
.with(m -> success(rc, count.toString()), t -> success(rc, count.toString()));
});
router.post("/hello").handler(rc -> rc.response().putHeader("content-type", MediaType.TEXT_PLAIN)
.end("Hello, " + (rc.body().asString()).repeat(getCount(rc))));
router.post("/hello/fromMessage").handler(rc -> rc.response().putHeader("content-type", MediaType.TEXT_PLAIN)
.end(rc.body().asJsonObject().getString("message")));
router.route("/call-hello-client").blockingHandler(rc -> {
String url = rc.body().asString();
HelloClient client = QuarkusRestClientBuilder.newBuilder().baseUri(URI.create(url))
.build(HelloClient.class);
String greeting = client.greeting("John", 2);
rc.response().end(greeting);
});
router.route("/call-hello-client-trace").blockingHandler(rc -> {
String url = rc.body().asString();
HelloClient client = QuarkusRestClientBuilder.newBuilder().baseUri(URI.create(url))
.build(HelloClient.class);
String greeting = client.greeting("Mary", 3);
rc.response().end(greeting);
});
router.route("/call-helloFromMessage-client").blockingHandler(rc -> {
String url = rc.body().asString();
HelloClient client = QuarkusRestClientBuilder.newBuilder().baseUri(URI.create(url))
.build(HelloClient.class);
String greeting = client.fromMessage(new HelloClient.Message("Hello world"));
rc.response().end(greeting);
});
router.post("/params/param").handler(rc -> rc.response().putHeader("content-type", MediaType.TEXT_PLAIN)
.end(getParam(rc)));
router.route("/call-params-client-with-param-first").blockingHandler(rc -> {
String url = rc.body().asString();
ParamClient client = QuarkusRestClientBuilder.newBuilder().baseUri(URI.create(url))
.build(ParamClient.class);
String result = client.getParam(Param.FIRST);
rc.response().end(result);
});
router.route("/rest-response").blockingHandler(rc -> {
String url = rc.body().asString();
RestResponseClient client = QuarkusRestClientBuilder.newBuilder().baseUri(URI.create(url))
.property("microprofile.rest.client.disable.default.mapper", true)
.build(RestResponseClient.class);
RestResponse<String> restResponse = client.response();
rc.response().end("" + restResponse.getStatus());
});
router.route("/export-clear").blockingHandler(rc -> {
inMemorySpanExporter.reset();
rc.response().end();
});
router.route("/export").blockingHandler(rc -> {
rc.response().putHeader("content-type", "application/json")
.end(Json.encodePrettily(inMemorySpanExporter.getFinishedSpanItems()
.stream().filter(sd -> !sd.getName().contains("export"))
.collect(Collectors.toList())));
});
router.route("/call-with-fault-tolerance").blockingHandler(rc -> {
rc.end(faultToleranceClient.helloWithFallback());
});
router.route("/call-with-fault-tolerance-on-interface").blockingHandler(rc -> {
String result = "";
try {
result = faultToleranceOnInterfaceClient.hello();
} catch (Exception e) {
result = e.getClass().getSimpleName();
}
rc.end(result);
});
router.get("/with%20space").handler(rc -> rc.response().setStatusCode(200).end());
router.get("/self-signed").blockingHandler(
rc -> rc.response().setStatusCode(200).end(String.valueOf(externalSelfSignedClient.invoke().getStatus())));
router.get("/wrong-host").blockingHandler(
rc -> rc.response().setStatusCode(200).end(String.valueOf(wrongHostClient.invoke().getStatus())));
router.get("/wrong-host-rejected").blockingHandler(rc -> {
try {
int result = wrongHostRejectedClient.invoke().getStatus();
rc.response().setStatusCode(200).end(String.valueOf(result));
} catch (Exception e) {
rc.response().setStatusCode(500).end(e.getCause().getClass().getSimpleName());
}
});
router.post("/preserve-response-entity").blockingHandler(rc -> {
String url = rc.body().asString();
JAXRSResponseClient client = QuarkusRestClientBuilder.newBuilder().baseUri(URI.create(url))
.build(JAXRSResponseClient.class);
Response response = client.call();
Response newResponse = Response.fromResponse(response).build();
rc.response().end(String.valueOf(newResponse.getEntity() instanceof InputStream));
});
router.post("/preserve-response-entity-async").blockingHandler(rc -> {
String url = rc.body().asString();
final JAXRSResponseClient client = QuarkusRestClientBuilder.newBuilder().baseUri(URI.create(url))
.build(JAXRSResponseClient.class);
Response response = client.asyncCall().await().atMost(Duration.of(5, ChronoUnit.SECONDS));
rc.response().end(String.valueOf(response.getEntity() instanceof InputStream));
});
}
private Future<Void> success(RoutingContext rc, String body) {
return rc.response().putHeader("content-type", "text-plain").end(body);
}
private int getCount(io.vertx.ext.web.RoutingContext rc) {
List<String> countQueryParam = rc.queryParam("count");
if (countQueryParam.isEmpty()) {
return 1;
}
return Integer.parseInt(countQueryParam.get(0));
}
private String getParam(io.vertx.ext.web.RoutingContext rc) {
return rc.queryParam("param").get(0);
}
private void callGet(RoutingContext rc, ClientWithExceptionMapper client) {
try {
String response = client.get();
if ("MockAnswer".equals(response)) {
rc.response().setStatusCode(503).end(response);
return;
}
} catch (MyException expected) {
rc.response().setStatusCode(200).end();
return;
} catch (Exception unexpected) {
rc.response().setStatusCode(500).end("Expected MyException to be thrown, got " + unexpected.getClass());
return;
}
rc.response().setStatusCode(500).end("Expected MyException to be thrown but no exception has been thrown");
}
private void fail(RoutingContext rc, String message) {
rc.response().putHeader("content-type", "text/plain").setStatusCode(500).end(message);
}
private Apple toApple(String s) {
try {
return mapper.readValue(s, Apple.class);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}
}
| ClientCallingResource |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestSecurityUtil.java | {
"start": 2639,
"end": 24536
} | class ____ {
private static final String ZK_AUTH_VALUE = "a_scheme:a_password";
@BeforeAll
public static void unsetKerberosRealm() {
// prevent failures if kinit-ed or on os x with no realm
System.setProperty("java.security.krb5.kdc", "");
System.setProperty("java.security.krb5.realm", "NONE");
}
@Test
public void isOriginalTGTReturnsCorrectValues() {
assertTrue(SecurityUtil.isTGSPrincipal
(new KerberosPrincipal("krbtgt/foo@foo")));
assertTrue(SecurityUtil.isTGSPrincipal
(new KerberosPrincipal("krbtgt/foo.bar.bat@foo.bar.bat")));
assertFalse(SecurityUtil.isTGSPrincipal
(null));
assertFalse(SecurityUtil.isTGSPrincipal
(new KerberosPrincipal("blah")));
assertFalse(SecurityUtil.isTGSPrincipal
(new KerberosPrincipal("krbtgt/hello")));
assertFalse(SecurityUtil.isTGSPrincipal
(new KerberosPrincipal("krbtgt/foo@FOO")));
}
private void verify(String original, String hostname, String expected)
throws IOException {
assertEquals(expected,
SecurityUtil.getServerPrincipal(original, hostname));
InetAddress addr = mockAddr(hostname);
assertEquals(expected,
SecurityUtil.getServerPrincipal(original, addr));
}
private InetAddress mockAddr(String reverseTo) {
InetAddress mock = Mockito.mock(InetAddress.class);
Mockito.doReturn(reverseTo).when(mock).getCanonicalHostName();
return mock;
}
@Test
public void testGetServerPrincipal() throws IOException {
String service = "hdfs/";
String realm = "@REALM";
String hostname = "foohost";
String userPrincipal = "foo@FOOREALM";
String shouldReplace = service + SecurityUtil.HOSTNAME_PATTERN + realm;
String replaced = service + hostname + realm;
verify(shouldReplace, hostname, replaced);
String shouldNotReplace = service + SecurityUtil.HOSTNAME_PATTERN + "NAME"
+ realm;
verify(shouldNotReplace, hostname, shouldNotReplace);
verify(userPrincipal, hostname, userPrincipal);
// testing reverse DNS lookup doesn't happen
InetAddress notUsed = Mockito.mock(InetAddress.class);
assertEquals(shouldNotReplace,
SecurityUtil.getServerPrincipal(shouldNotReplace, notUsed));
Mockito.verify(notUsed, Mockito.never()).getCanonicalHostName();
}
@Test
public void testPrincipalsWithLowerCaseHosts() throws IOException {
String service = "xyz/";
String realm = "@REALM";
String principalInConf = service + SecurityUtil.HOSTNAME_PATTERN + realm;
String hostname = "FooHost";
String principal =
service + StringUtils.toLowerCase(hostname) + realm;
verify(principalInConf, hostname, principal);
}
@Test
public void testLocalHostNameForNullOrWild() throws Exception {
String local = StringUtils.toLowerCase(SecurityUtil.getLocalHostName(null));
assertEquals("hdfs/" + local + "@REALM",
SecurityUtil.getServerPrincipal("hdfs/_HOST@REALM", (String)null));
assertEquals("hdfs/" + local + "@REALM",
SecurityUtil.getServerPrincipal("hdfs/_HOST@REALM", "0.0.0.0"));
}
@Test
public void testStartsWithIncorrectSettings() throws IOException {
Configuration conf = new Configuration();
SecurityUtil.setAuthenticationMethod(KERBEROS, conf);
String keyTabKey="key";
conf.set(keyTabKey, "");
UserGroupInformation.setConfiguration(conf);
boolean gotException = false;
try {
SecurityUtil.login(conf, keyTabKey, "", "");
} catch (IOException e) {
// expected
gotException=true;
}
assertTrue(gotException, "Exception for empty keytabfile name was expected");
}
@Test
public void testGetHostFromPrincipal() {
assertEquals("host",
SecurityUtil.getHostFromPrincipal("service/host@realm"));
assertEquals(null,
SecurityUtil.getHostFromPrincipal("service@realm"));
}
@Test
public void testBuildDTServiceName() {
Configuration conf = new Configuration(false);
conf.setBoolean(
CommonConfigurationKeys.HADOOP_SECURITY_TOKEN_SERVICE_USE_IP, true);
SecurityUtil.setConfiguration(conf);
assertEquals("127.0.0.1:123",
SecurityUtil.buildDTServiceName(URI.create("test://LocalHost"), 123)
);
assertEquals("127.0.0.1:123",
SecurityUtil.buildDTServiceName(URI.create("test://LocalHost:123"), 456)
);
assertEquals("127.0.0.1:123",
SecurityUtil.buildDTServiceName(URI.create("test://127.0.0.1"), 123)
);
assertEquals("127.0.0.1:123",
SecurityUtil.buildDTServiceName(URI.create("test://127.0.0.1:123"), 456)
);
}
@Test
public void testBuildTokenServiceSockAddr() {
Configuration conf = new Configuration(false);
conf.setBoolean(
CommonConfigurationKeys.HADOOP_SECURITY_TOKEN_SERVICE_USE_IP, true);
SecurityUtil.setConfiguration(conf);
assertEquals("127.0.0.1:123",
SecurityUtil.buildTokenService(new InetSocketAddress("LocalHost", 123)).toString()
);
assertEquals("127.0.0.1:123",
SecurityUtil.buildTokenService(new InetSocketAddress("127.0.0.1", 123)).toString()
);
// what goes in, comes out
assertEquals("127.0.0.1:123",
SecurityUtil.buildTokenService(NetUtils.createSocketAddr("127.0.0.1", 123)).toString()
);
}
@Test
public void testGoodHostsAndPorts() {
InetSocketAddress compare = NetUtils.createSocketAddrForHost("localhost", 123);
runGoodCases(compare, "localhost", 123);
runGoodCases(compare, "localhost:", 123);
runGoodCases(compare, "localhost:123", 456);
}
void runGoodCases(InetSocketAddress addr, String host, int port) {
assertEquals(addr, NetUtils.createSocketAddr(host, port));
assertEquals(addr, NetUtils.createSocketAddr("hdfs://"+host, port));
assertEquals(addr, NetUtils.createSocketAddr("hdfs://"+host+"/path", port));
}
@Test
public void testBadHostsAndPorts() {
runBadCases("", true);
runBadCases(":", false);
runBadCases("hdfs/", false);
runBadCases("hdfs:/", false);
runBadCases("hdfs://", true);
}
void runBadCases(String prefix, boolean validIfPosPort) {
runBadPortPermutes(prefix, false);
runBadPortPermutes(prefix+"*", false);
runBadPortPermutes(prefix+"localhost", validIfPosPort);
runBadPortPermutes(prefix+"localhost:-1", false);
runBadPortPermutes(prefix+"localhost:-123", false);
runBadPortPermutes(prefix+"localhost:xyz", false);
runBadPortPermutes(prefix+"localhost/xyz", validIfPosPort);
runBadPortPermutes(prefix+"localhost/:123", validIfPosPort);
runBadPortPermutes(prefix+":123", false);
runBadPortPermutes(prefix+":xyz", false);
}
void runBadPortPermutes(String arg, boolean validIfPosPort) {
int ports[] = { -123, -1, 123 };
boolean bad = false;
try {
NetUtils.createSocketAddr(arg);
} catch (IllegalArgumentException e) {
bad = true;
} finally {
assertTrue(bad, "should be bad: '"+arg+"'");
}
for (int port : ports) {
if (validIfPosPort && port > 0) continue;
bad = false;
try {
NetUtils.createSocketAddr(arg, port);
} catch (IllegalArgumentException e) {
bad = true;
} finally {
assertTrue(bad, "should be bad: '"+arg+"' (default port:"+port+")");
}
}
}
// check that the socket addr has:
// 1) the InetSocketAddress has the correct hostname, ie. exact host/ip given
// 2) the address is resolved, ie. has an ip
// 3,4) the socket's InetAddress has the same hostname, and the correct ip
// 5) the port is correct
private void
verifyValues(InetSocketAddress addr, String host, String ip, int port) {
assertTrue(!addr.isUnresolved());
// don't know what the standard resolver will return for hostname.
// should be host for host; host or ip for ip is ambiguous
if (!SecurityUtil.useIpForTokenService) {
assertEquals(host, addr.getHostName());
assertEquals(host, addr.getAddress().getHostName());
}
assertEquals(ip, addr.getAddress().getHostAddress());
assertEquals(port, addr.getPort());
}
// check:
// 1) buildTokenService honors use_ip setting
// 2) setTokenService & getService works
// 3) getTokenServiceAddr decodes to the identical socket addr
private void
verifyTokenService(InetSocketAddress addr, String host, String ip, int port, boolean useIp) {
//LOG.info("address:"+addr+" host:"+host+" ip:"+ip+" port:"+port);
Configuration conf = new Configuration(false);
conf.setBoolean(
CommonConfigurationKeys.HADOOP_SECURITY_TOKEN_SERVICE_USE_IP, useIp);
SecurityUtil.setConfiguration(conf);
String serviceHost = useIp ? ip : StringUtils.toLowerCase(host);
Token<?> token = new Token<TokenIdentifier>();
Text service = new Text(serviceHost+":"+port);
assertEquals(service, SecurityUtil.buildTokenService(addr));
SecurityUtil.setTokenService(token, addr);
assertEquals(service, token.getService());
InetSocketAddress serviceAddr = SecurityUtil.getTokenServiceAddr(token);
assertNotNull(serviceAddr);
verifyValues(serviceAddr, serviceHost, ip, port);
}
// check:
// 1) socket addr is created with fields set as expected
// 2) token service with ips
// 3) token service with the given host or ip
private void
verifyAddress(InetSocketAddress addr, String host, String ip, int port) {
verifyValues(addr, host, ip, port);
//LOG.info("test that token service uses ip");
verifyTokenService(addr, host, ip, port, true);
//LOG.info("test that token service uses host");
verifyTokenService(addr, host, ip, port, false);
}
// check:
// 1-4) combinations of host and port
// this will construct a socket addr, verify all the fields, build the
// service to verify the use_ip setting is honored, set the token service
// based on addr and verify the token service is set correctly, decode
// the token service and ensure all the fields of the decoded addr match
private void verifyServiceAddr(String host, String ip) {
InetSocketAddress addr;
int port = 123;
// test host, port tuple
//LOG.info("test tuple ("+host+","+port+")");
addr = NetUtils.createSocketAddrForHost(host, port);
verifyAddress(addr, host, ip, port);
// test authority with no default port
//LOG.info("test authority '"+host+":"+port+"'");
addr = NetUtils.createSocketAddr(host+":"+port);
verifyAddress(addr, host, ip, port);
// test authority with a default port, make sure default isn't used
//LOG.info("test authority '"+host+":"+port+"' with ignored default port");
addr = NetUtils.createSocketAddr(host+":"+port, port+1);
verifyAddress(addr, host, ip, port);
// test host-only authority, using port as default port
//LOG.info("test host:"+host+" port:"+port);
addr = NetUtils.createSocketAddr(host, port);
verifyAddress(addr, host, ip, port);
}
@Test
public void testSocketAddrWithName() {
String staticHost = "my";
NetUtils.addStaticResolution(staticHost, "localhost");
verifyServiceAddr("LocalHost", "127.0.0.1");
}
@Test
public void testSocketAddrWithIP() {
String staticHost = "127.0.0.1";
NetUtils.addStaticResolution(staticHost, "localhost");
verifyServiceAddr(staticHost, "127.0.0.1");
}
@Test
public void testSocketAddrWithNameToStaticName() {
String staticHost = "host1";
NetUtils.addStaticResolution(staticHost, "localhost");
verifyServiceAddr(staticHost, "127.0.0.1");
}
@Test
public void testSocketAddrWithNameToStaticIP() {
String staticHost = "host3";
NetUtils.addStaticResolution(staticHost, "255.255.255.255");
verifyServiceAddr(staticHost, "255.255.255.255");
}
@Test
public void testSocketAddrWithChangeIP() {
String staticHost = "host4";
NetUtils.addStaticResolution(staticHost, "255.255.255.255");
verifyServiceAddr(staticHost, "255.255.255.255");
NetUtils.addStaticResolution(staticHost, "255.255.255.254");
verifyServiceAddr(staticHost, "255.255.255.254");
}
// this is a bizarre case, but it's if a test tries to remap an ip address
@Test
public void testSocketAddrWithIPToStaticIP() {
String staticHost = "1.2.3.4";
NetUtils.addStaticResolution(staticHost, "255.255.255.255");
verifyServiceAddr(staticHost, "255.255.255.255");
}
@Test
public void testGetAuthenticationMethod() {
Configuration conf = new Configuration();
// default is simple
conf.unset(HADOOP_SECURITY_AUTHENTICATION);
assertEquals(SIMPLE, SecurityUtil.getAuthenticationMethod(conf));
// simple
conf.set(HADOOP_SECURITY_AUTHENTICATION, "simple");
assertEquals(SIMPLE, SecurityUtil.getAuthenticationMethod(conf));
// kerberos
conf.set(HADOOP_SECURITY_AUTHENTICATION, "kerberos");
assertEquals(KERBEROS, SecurityUtil.getAuthenticationMethod(conf));
// bad value
conf.set(HADOOP_SECURITY_AUTHENTICATION, "kaboom");
String error = null;
try {
SecurityUtil.getAuthenticationMethod(conf);
} catch (Exception e) {
error = e.toString();
}
assertEquals("java.lang.IllegalArgumentException: " +
"Invalid attribute value for " +
HADOOP_SECURITY_AUTHENTICATION + " of kaboom", error);
}
@Test
public void testSetAuthenticationMethod() {
Configuration conf = new Configuration();
// default
SecurityUtil.setAuthenticationMethod(null, conf);
assertEquals("simple", conf.get(HADOOP_SECURITY_AUTHENTICATION));
// simple
SecurityUtil.setAuthenticationMethod(SIMPLE, conf);
assertEquals("simple", conf.get(HADOOP_SECURITY_AUTHENTICATION));
// kerberos
SecurityUtil.setAuthenticationMethod(KERBEROS, conf);
assertEquals("kerberos", conf.get(HADOOP_SECURITY_AUTHENTICATION));
}
@Test
public void testAuthPlainPasswordProperty() throws Exception {
Configuration conf = new Configuration();
conf.set(CommonConfigurationKeys.ZK_AUTH, ZK_AUTH_VALUE);
List<ZKAuthInfo> zkAuths = SecurityUtil.getZKAuthInfos(conf,
CommonConfigurationKeys.ZK_AUTH);
assertEquals(1, zkAuths.size());
ZKAuthInfo zkAuthInfo = zkAuths.get(0);
assertEquals("a_scheme", zkAuthInfo.getScheme());
assertArrayEquals("a_password".getBytes(), zkAuthInfo.getAuth());
}
@Test
public void testAuthPlainTextFile() throws Exception {
Configuration conf = new Configuration();
File passwordTxtFile = File.createTempFile(
getClass().getSimpleName() + ".testAuthAtPathNotation-", ".txt");
Files.asCharSink(passwordTxtFile, StandardCharsets.UTF_8)
.write(ZK_AUTH_VALUE);
try {
conf.set(CommonConfigurationKeys.ZK_AUTH,
"@" + passwordTxtFile.getAbsolutePath());
List<ZKAuthInfo> zkAuths = SecurityUtil.getZKAuthInfos(conf,
CommonConfigurationKeys.ZK_AUTH);
assertEquals(1, zkAuths.size());
ZKAuthInfo zkAuthInfo = zkAuths.get(0);
assertEquals("a_scheme", zkAuthInfo.getScheme());
assertArrayEquals("a_password".getBytes(), zkAuthInfo.getAuth());
} finally {
boolean deleted = passwordTxtFile.delete();
assertTrue(deleted);
}
}
@Test
public void testAuthLocalJceks() throws Exception {
File localJceksFile = File.createTempFile(
getClass().getSimpleName() +".testAuthLocalJceks-", ".localjceks");
populateLocalJceksTestFile(localJceksFile.getAbsolutePath());
try {
String localJceksUri = "localjceks://file/" +
localJceksFile.getAbsolutePath();
Configuration conf = new Configuration();
conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH,
localJceksUri);
List<ZKAuthInfo> zkAuths = SecurityUtil.getZKAuthInfos(conf,
CommonConfigurationKeys.ZK_AUTH);
assertEquals(1, zkAuths.size());
ZKAuthInfo zkAuthInfo = zkAuths.get(0);
assertEquals("a_scheme", zkAuthInfo.getScheme());
assertArrayEquals("a_password".getBytes(), zkAuthInfo.getAuth());
} finally {
boolean deleted = localJceksFile.delete();
assertTrue(deleted);
}
}
private void populateLocalJceksTestFile(String path) throws IOException {
Configuration conf = new Configuration();
conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH,
"localjceks://file/" + path);
CredentialProvider provider =
CredentialProviderFactory.getProviders(conf).get(0);
assertEquals(LocalJavaKeyStoreProvider.class.getName(),
provider.getClass().getName());
provider.createCredentialEntry(CommonConfigurationKeys.ZK_AUTH,
ZK_AUTH_VALUE.toCharArray());
provider.flush();
}
@Test
public void testInitiateHostResolver() throws Exception {
// 1. useIP is false and cache interval is 0
Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_TOKEN_SERVICE_USE_IP, false);
conf.setTimeDuration(
CommonConfigurationKeys.HADOOP_SECURITY_HOSTNAME_CACHE_EXPIRE_INTERVAL_SECONDS,
0, TimeUnit.SECONDS);
SecurityUtil.setConfiguration(conf);
SecurityUtil.HostResolver hostResolver = SecurityUtil.hostResolver;
assertTrue(hostResolver instanceof SecurityUtil.QualifiedHostResolver,
"Resolver should be a QualifiedHostResolver");
SecurityUtil.CacheableHostResolver cacheableHostResolver =
(SecurityUtil.QualifiedHostResolver) hostResolver;
assertNull(cacheableHostResolver.getCache(),
"Cache should be null when caching interval is less than or equal 0");
// 2. useIP is false and cache interval is 10
conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_TOKEN_SERVICE_USE_IP, false);
conf.setTimeDuration(
CommonConfigurationKeys.HADOOP_SECURITY_HOSTNAME_CACHE_EXPIRE_INTERVAL_SECONDS,
10, TimeUnit.SECONDS);
SecurityUtil.setConfiguration(conf);
hostResolver = SecurityUtil.hostResolver;
assertTrue(hostResolver instanceof SecurityUtil.QualifiedHostResolver,
"Resolver should be a QualifiedHostResolver");
cacheableHostResolver = (SecurityUtil.QualifiedHostResolver) hostResolver;
assertNotNull(cacheableHostResolver.getCache(),
"Cache should be set when caching interval is enabled");
// 3. useIP is true and cache interval is 0
conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_TOKEN_SERVICE_USE_IP, true);
conf.setTimeDuration(
CommonConfigurationKeys.HADOOP_SECURITY_HOSTNAME_CACHE_EXPIRE_INTERVAL_SECONDS,
0, TimeUnit.SECONDS);
SecurityUtil.setConfiguration(conf);
hostResolver = SecurityUtil.hostResolver;
assertTrue(hostResolver instanceof SecurityUtil.StandardHostResolver,
"Resolver should be a StandardHostResolver");
cacheableHostResolver = (SecurityUtil.StandardHostResolver) hostResolver;
assertNull(cacheableHostResolver.getCache(),
"Cache should be null when caching interval is less than or equal 0");
// 4. useIP is true and cache interval is 10
conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_TOKEN_SERVICE_USE_IP, true);
conf.setTimeDuration(
CommonConfigurationKeys.HADOOP_SECURITY_HOSTNAME_CACHE_EXPIRE_INTERVAL_SECONDS,
10, TimeUnit.SECONDS);
SecurityUtil.setConfiguration(conf);
hostResolver = SecurityUtil.hostResolver;
assertTrue(hostResolver instanceof SecurityUtil.StandardHostResolver,
"Resolver should be a StandardHostResolver");
cacheableHostResolver = (SecurityUtil.StandardHostResolver) hostResolver;
assertNotNull(cacheableHostResolver.getCache(),
"Cache should be set when caching interval is enabled");
}
/**
* Test caching behavior in QualifiedHostResolver when caching is enabled.
*/
@Test
public void testQualifiedHostResolverCachingEnabled() throws Exception {
// Create a QualifiedHostResolver with expiry interval > 0
SecurityUtil.QualifiedHostResolver
resolver = new SecurityUtil.QualifiedHostResolver(1);
testCacheableResolve(resolver);
}
/**
* Test caching behavior in StandardHostResolver when caching is enabled.
*/
@Test
public void testStandardHostResolverCachingEnabled() throws Exception {
// Create a StandardHostResolver with expiry interval > 0
SecurityUtil.StandardHostResolver
resolver = new SecurityUtil.StandardHostResolver(1);
testCacheableResolve(resolver);
}
private void testCacheableResolve(SecurityUtil.CacheableHostResolver resolver)
throws Exception {
// Call getByName twice with the same host
InetAddress addr1 = resolver.getByName("127.0.0.1");
InetAddress addr2 = resolver.getByName("127.0.0.1");
assertNotNull(addr1);
assertNotNull(addr2);
// Both addresses should be the same instance (cached value)
assertSame(addr1, addr2);
// wait for timeout of cache item
Thread.sleep(1500);
InetAddress addr3 = resolver.getByName("127.0.0.1");
assertNotNull(addr3);
assertNotSame(addr1, addr3);
}
/**
* Test resolving non-existent hostname, show throw UnknownHostException.
*/
@Test
public void testInvalidHostThrowsException() {
SecurityUtil.StandardHostResolver
standardHostResolver = new SecurityUtil.StandardHostResolver(10);
String invalidHost = "invalid_host_name_which_does_not_exist";
assertThrows(UnknownHostException.class, () -> {
standardHostResolver.getByName(invalidHost);
}, "Resolving an invalid host should throw UnknownHostException");
SecurityUtil.QualifiedHostResolver
qualifiedHostResolver = new SecurityUtil.QualifiedHostResolver(10);
assertThrows(UnknownHostException.class, () -> {
qualifiedHostResolver.getByName(invalidHost);
}, "Resolving an invalid host should throw UnknownHostException");
}
}
| TestSecurityUtil |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/utils/VectorSearchUtil.java | {
"start": 2166,
"end": 6322
} | class ____ extends FunctionCallUtil {
public static boolean isAsyncVectorSearch(
TableSourceTable searchTable,
Map<String, String> runtimeConfig,
Collection<Integer> searchColumns) {
Configuration queryConf = Configuration.fromMap(runtimeConfig);
boolean syncFound = false;
boolean asyncFound = false;
VectorSearchTableSource.VectorSearchRuntimeProvider provider =
createVectorSearchRuntimeProvider(searchTable, searchColumns, queryConf);
if (provider instanceof AsyncVectorSearchFunctionProvider) {
asyncFound = true;
}
if (provider instanceof VectorSearchFunctionProvider) {
syncFound = true;
}
if (!asyncFound && !syncFound) {
throw new TableException(
String.format(
"Can not find valid implementation for search function for table %s.",
searchTable.contextResolvedTable().getIdentifier().asSummaryString()));
}
Optional<Boolean> requiredMode =
queryConf.getOptional(VectorSearchRuntimeConfigOptions.ASYNC);
if (!requiredMode.isPresent()) {
return asyncFound;
} else if (requiredMode.get()) {
if (!asyncFound) {
throw new TableException(
String.format(
"Require async mode, but vector search provider %s doesn't support async mode.",
provider.getClass().getName()));
}
return true;
} else {
if (!syncFound) {
throw new TableException(
String.format(
"Require sync mode, but vector search provider %s doesn't support sync mode.",
provider.getClass().getName()));
}
return false;
}
}
public static VectorSearchTableSource.VectorSearchRuntimeProvider
createVectorSearchRuntimeProvider(
TableSourceTable searchTable,
Collection<Integer> searchColumns,
ReadableConfig runtimeConfig) {
int[][] indices = searchColumns.stream().map(i -> new int[] {i}).toArray(int[][]::new);
VectorSearchTableSource tableSource = (VectorSearchTableSource) searchTable.tableSource();
VectorSearchRuntimeProviderContext providerContext =
new VectorSearchRuntimeProviderContext(indices, runtimeConfig);
return tableSource.getSearchRuntimeProvider(providerContext);
}
public static AsyncOptions getMergedVectorSearchAsyncOptions(
Map<String, String> runtimeConfig,
TableConfig config,
ChangelogMode inputChangelogMode) {
Configuration queryConf = Configuration.fromMap(runtimeConfig);
int asyncBufferCapacity =
coalesce(
queryConf.get(ASYNC_MAX_CONCURRENT_OPERATIONS),
config.get(
ExecutionConfigOptions
.TABLE_EXEC_ASYNC_VECTOR_SEARCH_MAX_CONCURRENT_OPERATIONS));
long asyncTimeout =
coalesce(
queryConf.get(ASYNC_TIMEOUT),
config.get(
ExecutionConfigOptions
.TABLE_EXEC_ASYNC_VECTOR_SEARCH_TIMEOUT))
.toMillis();
AsyncDataStream.OutputMode asyncOutputMode =
convert(
inputChangelogMode,
coalesce(
queryConf.get(ASYNC_OUTPUT_MODE),
config.get(
ExecutionConfigOptions
.TABLE_EXEC_ASYNC_VECTOR_SEARCH_OUTPUT_MODE)));
return new AsyncOptions(asyncBufferCapacity, asyncTimeout, false, asyncOutputMode);
}
}
| VectorSearchUtil |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/metrics/groups/TaskMetricGroupTest.java | {
"start": 1927,
"end": 8946
} | class ____ {
private MetricRegistryImpl registry;
@BeforeEach
void setup() {
registry =
new MetricRegistryImpl(
MetricRegistryTestUtils.defaultMetricRegistryConfiguration());
}
@AfterEach
void teardown() throws Exception {
if (registry != null) {
registry.closeAsync().get();
}
}
// ------------------------------------------------------------------------
// scope tests
// -----------------------------------------------------------------------
@Test
void testGenerateScopeDefault() {
TaskManagerMetricGroup tmGroup =
TaskManagerMetricGroup.createTaskManagerMetricGroup(
registry, "theHostName", new ResourceID("test-tm-id"));
TaskMetricGroup taskGroup =
tmGroup.addJob(new JobID(), "myJobName")
.addTask(createExecutionAttemptId(new JobVertexID(), 13, 2), "aTaskName");
assertThat(taskGroup.getScopeComponents())
.containsExactly(
"theHostName", "taskmanager", "test-tm-id", "myJobName", "aTaskName", "13");
assertThat(taskGroup.getMetricIdentifier("name"))
.isEqualTo("theHostName.taskmanager.test-tm-id.myJobName.aTaskName.13.name");
}
@Test
void testGenerateScopeCustom() throws Exception {
Configuration cfg = new Configuration();
cfg.set(MetricOptions.SCOPE_NAMING_TM, "abc");
cfg.set(MetricOptions.SCOPE_NAMING_TM_JOB, "def");
cfg.set(MetricOptions.SCOPE_NAMING_TASK, "<tm_id>.<job_id>.<task_id>.<task_attempt_id>");
MetricRegistryImpl registry =
new MetricRegistryImpl(MetricRegistryTestUtils.fromConfiguration(cfg));
JobID jid = new JobID();
JobVertexID vertexId = new JobVertexID();
ExecutionAttemptID executionId = createExecutionAttemptId(vertexId, 13, 2);
TaskManagerMetricGroup tmGroup =
TaskManagerMetricGroup.createTaskManagerMetricGroup(
registry, "theHostName", new ResourceID("test-tm-id"));
TaskMetricGroup taskGroup =
tmGroup.addJob(jid, "myJobName").addTask(executionId, "aTaskName");
assertThat(taskGroup.getScopeComponents())
.containsExactly(
"test-tm-id", jid.toString(), vertexId.toString(), executionId.toString());
assertThat(taskGroup.getMetricIdentifier("name"))
.isEqualTo(String.format("test-tm-id.%s.%s.%s.name", jid, vertexId, executionId));
registry.closeAsync().get();
}
@Test
void testGenerateScopeWilcard() throws Exception {
Configuration cfg = new Configuration();
cfg.set(MetricOptions.SCOPE_NAMING_TASK, "*.<task_attempt_id>.<subtask_index>");
MetricRegistryImpl registry =
new MetricRegistryImpl(MetricRegistryTestUtils.fromConfiguration(cfg));
ExecutionAttemptID executionId = createExecutionAttemptId(new JobVertexID(), 13, 1);
TaskManagerMetricGroup tmGroup =
TaskManagerMetricGroup.createTaskManagerMetricGroup(
registry, "theHostName", new ResourceID("test-tm-id"));
TaskMetricGroup taskGroup =
tmGroup.addJob(new JobID(), "myJobName").addTask(executionId, "aTaskName");
assertThat(taskGroup.getScopeComponents())
.containsExactly(
"theHostName",
"taskmanager",
"test-tm-id",
"myJobName",
executionId.toString(),
"13");
assertThat(taskGroup.getMetricIdentifier("name"))
.isEqualTo(
"theHostName.taskmanager.test-tm-id.myJobName." + executionId + ".13.name");
registry.closeAsync().get();
}
@Test
void testCreateQueryServiceMetricInfo() {
JobID jid = new JobID();
JobVertexID vid = new JobVertexID();
ExecutionAttemptID eid = createExecutionAttemptId(vid, 4, 5);
TaskManagerMetricGroup tm =
TaskManagerMetricGroup.createTaskManagerMetricGroup(
registry, "host", new ResourceID("id"));
TaskMetricGroup task = tm.addJob(jid, "jobname").addTask(eid, "taskName");
QueryScopeInfo.TaskQueryScopeInfo info =
task.createQueryServiceMetricInfo(new DummyCharacterFilter());
assertThat(info.scope).isEmpty();
assertThat(info.jobID).isEqualTo(jid.toString());
assertThat(info.vertexID).isEqualTo(vid.toString());
assertThat(info.subtaskIndex).isEqualTo(4);
}
@Test
void testTaskMetricGroupCleanup() throws Exception {
CountingMetricRegistry registry = new CountingMetricRegistry(new Configuration());
TaskManagerMetricGroup taskManagerMetricGroup =
TaskManagerMetricGroup.createTaskManagerMetricGroup(
registry, "localhost", new ResourceID("0"));
int initialMetricsCount = registry.getNumberRegisteredMetrics();
TaskMetricGroup taskMetricGroup =
taskManagerMetricGroup
.addJob(new JobID(), "job")
.addTask(createExecutionAttemptId(), "task");
// the io metric should have registered predefined metrics
assertThat(registry.getNumberRegisteredMetrics()).isGreaterThan(initialMetricsCount);
taskMetricGroup.close();
// now all registered metrics should have been unregistered
assertThat(registry.getNumberRegisteredMetrics()).isEqualTo(initialMetricsCount);
registry.closeAsync().get();
}
@Test
void testOperatorNameTruncation() throws Exception {
Configuration cfg = new Configuration();
cfg.set(MetricOptions.SCOPE_NAMING_OPERATOR, ScopeFormat.SCOPE_OPERATOR_NAME);
MetricRegistryImpl registry =
new MetricRegistryImpl(MetricRegistryTestUtils.fromConfiguration(cfg));
TaskManagerMetricGroup tm =
TaskManagerMetricGroup.createTaskManagerMetricGroup(
registry, "host", new ResourceID("id"));
TaskMetricGroup taskMetricGroup =
tm.addJob(new JobID(), "jobname").addTask(createExecutionAttemptId(), "task");
String originalName = new String(new char[100]).replace("\0", "-");
InternalOperatorMetricGroup operatorMetricGroup =
taskMetricGroup.getOrAddOperator(originalName);
String storedName = operatorMetricGroup.getScopeComponents()[0];
assertThat(storedName.length()).isEqualTo(ConfigConstants.METRICS_OPERATOR_NAME_MAX_LENGTH);
assertThat(originalName.substring(0, ConfigConstants.METRICS_OPERATOR_NAME_MAX_LENGTH))
.isEqualTo(storedName);
registry.closeAsync().get();
}
private static | TaskMetricGroupTest |
java | spring-projects__spring-framework | spring-websocket/src/main/java/org/springframework/web/socket/server/HandshakeHandler.java | {
"start": 1238,
"end": 2323
} | interface ____ {
/**
* Initiate the handshake.
* @param request the current request
* @param response the current response
* @param wsHandler the handler to process WebSocket messages; see
* {@link PerConnectionWebSocketHandler} for providing a handler with
* per-connection lifecycle.
* @param attributes the attributes from the HTTP handshake to associate with the WebSocket
* session; the provided attributes are copied, the original map is not used.
* @return whether the handshake negotiation was successful or not. In either case the
* response status, headers, and body will have been updated to reflect the
* result of the negotiation
* @throws HandshakeFailureException thrown when handshake processing failed to
* complete due to an internal, unrecoverable error, i.e. a server error as
* opposed to a failure to successfully negotiate the handshake.
*/
boolean doHandshake(ServerHttpRequest request, ServerHttpResponse response, WebSocketHandler wsHandler,
Map<String, Object> attributes) throws HandshakeFailureException;
}
| HandshakeHandler |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/common/runtime/src/main/java/org/jboss/resteasy/reactive/common/providers/serialisers/MessageReaderUtil.java | {
"start": 211,
"end": 981
} | class ____ {
public static final String UTF8_CHARSET = StandardCharsets.UTF_8.name();
public static String charsetFromMediaType(MediaType mediaType) {
if (mediaType == null) {
return UTF8_CHARSET;
}
String charset = mediaType.getParameters().get(MediaType.CHARSET_PARAMETER);
if (charset != null) {
return charset;
}
return UTF8_CHARSET;
}
public static byte[] readBytes(InputStream entityStream) throws IOException {
return entityStream.readAllBytes();
}
public static String readString(InputStream entityStream, MediaType mediaType) throws IOException {
return new String(readBytes(entityStream), charsetFromMediaType(mediaType));
}
}
| MessageReaderUtil |
java | spring-projects__spring-framework | spring-beans/src/main/java/org/springframework/beans/factory/config/ListFactoryBean.java | {
"start": 1155,
"end": 1535
} | class ____ extends AbstractFactoryBean<List<Object>> {
private @Nullable List<?> sourceList;
@SuppressWarnings("rawtypes")
private @Nullable Class<? extends List> targetListClass;
/**
* Set the source List, typically populated via XML "list" elements.
*/
public void setSourceList(List<?> sourceList) {
this.sourceList = sourceList;
}
/**
* Set the | ListFactoryBean |
java | apache__camel | dsl/camel-yaml-dsl/camel-yaml-dsl-deserializers/src/generated/java/org/apache/camel/dsl/yaml/deserializers/ModelDeserializers.java | {
"start": 1141278,
"end": 1142057
} | class ____ the exception to create using the message.", displayName = "Exception Type"),
@YamlProperty(name = "id", type = "string", description = "Sets the id of this node", displayName = "Id"),
@YamlProperty(name = "message", type = "string", description = "To create a new exception instance and use the given message as caused message (supports simple language)", displayName = "Message"),
@YamlProperty(name = "note", type = "string", description = "Sets the note of this node", displayName = "Note"),
@YamlProperty(name = "ref", type = "string", description = "Reference to the exception instance to lookup from the registry to throw", displayName = "Ref")
}
)
public static | of |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/converted/converter/ExplicitJavaTypeDescriptorTest.java | {
"start": 4016,
"end": 4762
} | class ____ {
@Id
private Integer id;
@Convert( converter = MutableConverterImpl.class )
private MutableState mutableState;
@Convert( converter = ImmutableConverterImpl.class )
private ImmutableState immutableState;
@Convert( converter = PseudoMutableConverterImpl.class )
private PseudoMutableState immutableMutableState;
public TheEntity() {
}
public TheEntity(Integer id) {
this.id = id;
this.mutableState = new MutableState( id.toString() );
this.immutableState = new ImmutableState( id.toString() );
this.immutableMutableState = new PseudoMutableState( id.toString() );
}
}
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Purely mutable state
public static | TheEntity |
java | apache__flink | flink-libraries/flink-cep/src/test/java/org/apache/flink/cep/utils/NFATestUtilities.java | {
"start": 2083,
"end": 2767
} | class ____ implements Comparator<List<Event>> {
@Override
public int compare(List<Event> o1, List<Event> o2) {
int sizeComp = Integer.compare(o1.size(), o2.size());
if (sizeComp == 0) {
EventComparator comp = new EventComparator();
for (int i = 0; i < o1.size(); i++) {
int eventComp = comp.compare(o1.get(i), o2.get(i));
if (eventComp != 0) {
return eventComp;
}
}
return 0;
} else {
return sizeComp;
}
}
}
private static | ListEventComparator |
java | quarkusio__quarkus | extensions/redis-client/runtime/src/test/java/io/quarkus/redis/datasource/TransactionalHashCommandsTest.java | {
"start": 627,
"end": 6444
} | class ____ extends DatasourceTestBase {
private RedisDataSource blocking;
private ReactiveRedisDataSource reactive;
public static final String KEY = "tx-hash-key";
@BeforeEach
void initialize() {
blocking = new BlockingRedisDataSourceImpl(vertx, redis, api, Duration.ofSeconds(60));
reactive = new ReactiveRedisDataSourceImpl(vertx, redis, api);
}
@AfterEach
public void clear() {
blocking.flushall();
}
@Test
public void hgetBlocking() {
TransactionResult result = blocking.withTransaction(tx -> {
TransactionalHashCommands<String, String, String> hash = tx.hash(String.class);
assertThat(hash.getDataSource()).isEqualTo(tx);
hash.hget(KEY, "field"); // 0 -> null
hash.hset(KEY, "field", "hello"); // 1 -> true
hash.hget(KEY, "field"); // 2 -> "hello
hash.hdel(KEY, "field", "field2"); // 3 -> 1
hash.hget(KEY, "field"); // 4 -> null
});
assertThat(result.size()).isEqualTo(5);
assertThat(result.discarded()).isFalse();
assertThat((Void) result.get(0)).isNull();
assertThat((Boolean) result.get(1)).isTrue();
assertThat((String) result.get(2)).isEqualTo("hello");
assertThat((int) result.get(3)).isEqualTo(1);
assertThat((Void) result.get(4)).isNull();
}
@Test
public void hgetBlockingWithWatch() {
TransactionResult result = blocking.withTransaction(tx -> {
TransactionalHashCommands<String, String, String> hash = tx.hash(String.class);
hash.hget(KEY, "field"); // 0 -> null
hash.hset(KEY, "field", "hello"); // 1 -> true
hash.hget(KEY, "field"); // 2 -> "hello
hash.hdel(KEY, "field", "field2"); // 3 -> 1
hash.hget(KEY, "field"); // 4 -> null
}, KEY);
assertThat(result.size()).isEqualTo(5);
assertThat(result.discarded()).isFalse();
assertThat((Void) result.get(0)).isNull();
assertThat((Boolean) result.get(1)).isTrue();
assertThat((String) result.get(2)).isEqualTo("hello");
assertThat((int) result.get(3)).isEqualTo(1);
assertThat((Void) result.get(4)).isNull();
}
@Test
public void hgetBlockingWithWatchAndDiscard() {
TransactionResult result = blocking.withTransaction(tx -> {
TransactionalHashCommands<String, String, String> hash = tx.hash(String.class);
hash.hget(KEY, "field"); // 0 -> null
hash.hset(KEY, "field", "hello"); // 1 -> true
hash.hget(KEY, "field"); // 2 -> "hello
// Update the key - that will discard the transaction
blocking.hash(String.class).hset(KEY, "toto", "updated");
hash.hdel(KEY, "field", "field2"); // 3 -> 1
hash.hget(KEY, "field"); // 4 -> null
}, KEY);
assertThat(result.size()).isEqualTo(0);
assertThat(result.discarded()).isTrue();
}
@Test
public void hgetReactive() {
TransactionResult result = reactive.withTransaction(tx -> {
ReactiveTransactionalHashCommands<String, String, String> hash = tx.hash(String.class);
return hash.hget(KEY, "field") // 0 -> null
.chain(() -> hash.hset(KEY, "field", "hello")) // 1 -> true
.chain(() -> hash.hget(KEY, "field")) // 2 -> "hello
.chain(() -> hash.hdel(KEY, "field", "field2")) // 3 -> 1
.chain(() -> hash.hget(KEY, "field")); // 4 -> null
}).await().atMost(Duration.ofSeconds(5));
assertThat(result.size()).isEqualTo(5);
assertThat(result.discarded()).isFalse();
assertThat((Void) result.get(0)).isNull();
assertThat((Boolean) result.get(1)).isTrue();
assertThat((String) result.get(2)).isEqualTo("hello");
assertThat((int) result.get(3)).isEqualTo(1);
assertThat((Void) result.get(4)).isNull();
}
@Test
public void hgetReactiveWithWatch() {
TransactionResult result = reactive.withTransaction(tx -> {
ReactiveTransactionalHashCommands<String, String, String> hash = tx.hash(String.class);
return hash.hget(KEY, "field") // 0 -> null
.chain(() -> hash.hset(KEY, "field", "hello")) // 1 -> true
.chain(() -> hash.hget(KEY, "field")) // 2 -> "hello
.chain(() -> hash.hdel(KEY, "field", "field2")) // 3 -> 1
.chain(() -> hash.hget(KEY, "field")); // 4 -> null
}, KEY).await().atMost(Duration.ofSeconds(5));
assertThat(result.size()).isEqualTo(5);
assertThat(result.discarded()).isFalse();
assertThat((Void) result.get(0)).isNull();
assertThat((Boolean) result.get(1)).isTrue();
assertThat((String) result.get(2)).isEqualTo("hello");
assertThat((int) result.get(3)).isEqualTo(1);
assertThat((Void) result.get(4)).isNull();
}
@Test
public void hgetReactiveWithWatchAndDiscard() {
TransactionResult result = reactive.withTransaction(tx -> {
ReactiveTransactionalHashCommands<String, String, String> hash = tx.hash(String.class);
return hash.hget(KEY, "field")
.chain(() -> hash.hset(KEY, "field", "hello"))
.chain(() -> hash.hget(KEY, "field"))
.chain(() -> reactive.hash(String.class).hset(KEY, "a", "b"))
.chain(() -> hash.hdel(KEY, "field", "field2"))
.chain(() -> hash.hget(KEY, "field"));
}, KEY).await().atMost(Duration.ofSeconds(5));
assertThat(result.size()).isEqualTo(0);
assertThat(result.discarded()).isTrue();
}
}
| TransactionalHashCommandsTest |
java | spring-cloud__spring-cloud-gateway | spring-cloud-gateway-server-webflux/src/test/java/org/springframework/cloud/gateway/route/RedisRouteDefinitionRepositoryTests.java | {
"start": 5733,
"end": 5949
} | class ____ extends AbstractGatewayFilterFactory<Object> {
@Override
public GatewayFilter apply(Object config) {
return (exchange, chain) -> chain.filter(exchange);
}
}
public static | TestGatewayFilterFactory |
java | elastic__elasticsearch | x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlAsyncGetResultsAction.java | {
"start": 512,
"end": 817
} | class ____ extends ActionType<SqlQueryResponse> {
public static final SqlAsyncGetResultsAction INSTANCE = new SqlAsyncGetResultsAction();
public static final String NAME = SQL_ASYNC_GET_RESULT_ACTION_NAME;
private SqlAsyncGetResultsAction() {
super(NAME);
}
}
| SqlAsyncGetResultsAction |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/GetClassOnClassTest.java | {
"start": 2895,
"end": 3096
} | class ____ {
public static boolean getClass(Object a) {
return true;
}
}
}\
""")
.doTest();
}
}
| DummyObject |
java | apache__kafka | connect/runtime/src/test/java/org/apache/kafka/connect/connector/policy/AllowlistConnectorClientConfigOverridePolicyTest.java | {
"start": 1236,
"end": 2770
} | class ____ extends BaseConnectorClientConfigOverridePolicyTest {
private static final List<String> ALL_CONFIGS = Stream.of(
ProducerConfig.configNames(),
ConsumerConfig.configNames(),
AdminClientConfig.configNames())
.flatMap(Collection::stream)
.toList();
private AllowlistConnectorClientConfigOverridePolicy policy;
@BeforeEach
public void setUp() {
policy = new AllowlistConnectorClientConfigOverridePolicy();
}
@Override
protected ConnectorClientConfigOverridePolicy policyToTest() {
return policy;
}
@Test
public void testDenyAllByDefault() {
for (String config : ALL_CONFIGS) {
testInvalidOverride(Map.of(config, new Object()));
}
}
@Test
public void testAllowConfigs() {
Set<String> allowedConfigs = Set.of(
ProducerConfig.ACKS_CONFIG,
ConsumerConfig.CLIENT_ID_CONFIG,
AdminClientConfig.CONNECTIONS_MAX_IDLE_MS_CONFIG
);
policy.configure(Map.of(AllowlistConnectorClientConfigOverridePolicy.ALLOWLIST_CONFIG, String.join(",", allowedConfigs)));
for (String config : ALL_CONFIGS) {
if (!allowedConfigs.contains(config)) {
testInvalidOverride(Map.of(config, new Object()));
} else {
testValidOverride(Map.of(config, new Object()));
}
}
}
}
| AllowlistConnectorClientConfigOverridePolicyTest |
java | quarkusio__quarkus | extensions/resteasy-classic/resteasy/deployment/src/test/java/io/quarkus/resteasy/test/root/ApplicationPathHttpRootTest.java | {
"start": 1426,
"end": 1480
} | class ____ extends Application {
}
}
| BaseApplication |
java | apache__dubbo | dubbo-config/dubbo-config-api/src/main/java/org/apache/dubbo/config/utils/SimpleReferenceCache.java | {
"start": 11870,
"end": 11971
} | interface ____ {
String generateKey(ReferenceConfigBase<?> referenceConfig);
}
}
| KeyGenerator |
java | micronaut-projects__micronaut-core | http/src/main/java/io/micronaut/http/HttpAttributes.java | {
"start": 5280,
"end": 5368
} | class ____ {
public static final String PREFIX = "micronaut.http";
}
}
| Constants |
java | apache__kafka | connect/transforms/src/main/java/org/apache/kafka/connect/transforms/TimestampRouter.java | {
"start": 2667,
"end": 4473
} | interface ____ {
String TOPIC_FORMAT = "topic.format";
String TIMESTAMP_FORMAT = "timestamp.format";
}
private String topicFormat;
private ThreadLocal<SimpleDateFormat> timestampFormat;
@Override
public String version() {
return AppInfoParser.getVersion();
}
@Override
public void configure(Map<String, ?> props) {
final SimpleConfig config = new SimpleConfig(CONFIG_DEF, props);
topicFormat = config.getString(ConfigName.TOPIC_FORMAT);
final String timestampFormatStr = config.getString(ConfigName.TIMESTAMP_FORMAT);
timestampFormat = ThreadLocal.withInitial(() -> {
final SimpleDateFormat fmt = new SimpleDateFormat(timestampFormatStr);
fmt.setTimeZone(TimeZone.getTimeZone("UTC"));
return fmt;
});
}
@Override
public R apply(R record) {
final Long timestamp = record.timestamp();
if (timestamp == null) {
throw new DataException("Timestamp missing on record: " + record);
}
final String formattedTimestamp = timestampFormat.get().format(new Date(timestamp));
final String replace1 = TOPIC.matcher(topicFormat).replaceAll(Matcher.quoteReplacement(record.topic()));
final String updatedTopic = TIMESTAMP.matcher(replace1).replaceAll(Matcher.quoteReplacement(formattedTimestamp));
return record.newRecord(
updatedTopic, record.kafkaPartition(),
record.keySchema(), record.key(),
record.valueSchema(), record.value(),
record.timestamp()
);
}
@Override
public void close() {
timestampFormat.remove();
}
@Override
public ConfigDef config() {
return CONFIG_DEF;
}
}
| ConfigName |
java | apache__flink | flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/typeutils/BigDecimalTypeInfo.java | {
"start": 1381,
"end": 2992
} | class ____ extends BasicTypeInfo<BigDecimal> {
private static final long serialVersionUID = 1L;
public static BigDecimalTypeInfo of(int precision, int scale) {
return new BigDecimalTypeInfo(precision, scale);
}
public static BigDecimalTypeInfo of(BigDecimal value) {
return of(value.precision(), value.scale());
}
private final int precision;
private final int scale;
public BigDecimalTypeInfo(int precision, int scale) {
super(
BigDecimal.class,
new Class<?>[] {},
BigDecSerializer.INSTANCE,
BigDecComparator.class);
this.precision = precision;
this.scale = scale;
}
@Override
public String toString() {
return String.format("Decimal(%d,%d)", precision(), scale());
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof BigDecimalTypeInfo)) {
return false;
}
BigDecimalTypeInfo that = (BigDecimalTypeInfo) obj;
return this.precision() == that.precision() && this.scale() == that.scale();
}
@Override
public int hashCode() {
int h0 = this.getClass().getCanonicalName().hashCode();
return Arrays.hashCode(new int[] {h0, precision(), scale()});
}
@Override
public boolean shouldAutocastTo(BasicTypeInfo<?> to) {
return (to.getTypeClass() == BigDecimal.class) || super.shouldAutocastTo(to);
}
public int precision() {
return precision;
}
public int scale() {
return scale;
}
}
| BigDecimalTypeInfo |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/issue_1200/Issue1229.java | {
"start": 269,
"end": 1206
} | class ____ extends TestCase {
public void test_for_issue() throws Exception {
final Object parsed = JSON.parse("{\"data\":{}}");
assertTrue(parsed instanceof JSONObject);
assertTrue(((JSONObject)parsed).get("data") instanceof JSONObject);
final Result<Data> result = JSON.parseObject("{\"data\":{}}", new TypeReference<Result<Data>>(){});
assertNotNull(result.data);
assertTrue(result.data instanceof Data);
final Result<List<Data>> result2 = JSON.parseObject("{\"data\":[]}", new TypeReference<Result<List<Data>>>(){});
assertNotNull(result2.data);
assertTrue(result2.data instanceof List);
assertEquals(0, result2.data.size());
}
public void parseErr() throws Exception {
JSON.parseObject("{\"data\":{}}", new TypeReference<Result<List<Data>>>(){});
fail("should be failed due to error json");
}
public static | Issue1229 |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicy.java | {
"start": 8445,
"end": 10786
} | class ____ extends FilterStoredFieldsReader {
private final BitSet recoverySourceToKeep;
private final String recoverySourceField;
private final boolean pruneIdField;
RecoverySourcePruningStoredFieldsReader(
StoredFieldsReader in,
BitSet recoverySourceToKeep,
@Nullable String recoverySourceField,
boolean pruneIdField
) {
super(in);
assert recoverySourceField != null || pruneIdField : "nothing to prune";
this.recoverySourceToKeep = recoverySourceToKeep;
this.recoverySourceField = recoverySourceField;
this.pruneIdField = pruneIdField;
}
@Override
public void document(int docID, StoredFieldVisitor visitor) throws IOException {
if (recoverySourceToKeep != null && recoverySourceToKeep.get(docID)) {
super.document(docID, visitor);
} else {
super.document(docID, new FilterStoredFieldVisitor(visitor) {
@Override
public Status needsField(FieldInfo fieldInfo) throws IOException {
if (fieldInfo.name.equals(recoverySourceField)) {
return Status.NO;
}
if (pruneIdField && IdFieldMapper.NAME.equals(fieldInfo.name)) {
return Status.NO;
}
return super.needsField(fieldInfo);
}
});
}
}
@Override
public StoredFieldsReader getMergeInstance() {
return new RecoverySourcePruningStoredFieldsReader(
in.getMergeInstance(),
recoverySourceToKeep,
recoverySourceField,
pruneIdField
);
}
@Override
public StoredFieldsReader clone() {
return new RecoverySourcePruningStoredFieldsReader(in.clone(), recoverySourceToKeep, recoverySourceField, pruneIdField);
}
}
}
}
| RecoverySourcePruningStoredFieldsReader |
java | hibernate__hibernate-orm | hibernate-envers/src/main/java/org/hibernate/envers/internal/entities/mapper/PropertyMapper.java | {
"start": 614,
"end": 2837
} | interface ____ extends ModifiedFlagMapperSupport, DynamicComponentMapperSupport {
/**
* Maps properties to the given map, basing on differences between properties of new and old objects.
*
* @param session The current session.
* @param data Data to map to.
* @param newObj New state of the entity.
* @param oldObj Old state of the entity.
*
* @return True if there are any differences between the states represented by newObj and oldObj.
*/
boolean mapToMapFromEntity(SharedSessionContractImplementor session, Map<String, Object> data, Object newObj, Object oldObj);
/**
* Maps properties from the given map to the given object.
*
* @param enversService The EnversService.
* @param obj Object to map to.
* @param data Data to map from.
* @param primaryKey Primary key of the object to which we map (for relations)
* @param versionsReader VersionsReader for reading relations
* @param revision Revision at which the object is read, for reading relations
*/
void mapToEntityFromMap(
EnversService enversService,
Object obj,
Map data,
Object primaryKey,
AuditReaderImplementor versionsReader,
Number revision);
Object mapToEntityFromMap(
EnversService enversService,
Map data,
Object primaryKey,
AuditReaderImplementor versionsReader,
Number revision);
/**
* Maps collection changes.
*
* @param session The current session.
* @param referencingPropertyName Name of the field, which holds the collection in the entity.
* @param newColl New collection, after updates.
* @param oldColl Old collection, before updates.
* @param id Id of the object owning the collection.
*
* @return List of changes that need to be performed on the persistent store.
*/
List<PersistentCollectionChangeData> mapCollectionChanges(
SharedSessionContractImplementor session,
String referencingPropertyName,
PersistentCollection newColl,
Serializable oldColl, Object id);
void mapModifiedFlagsToMapFromEntity(
SharedSessionContractImplementor session,
Map<String, Object> data,
Object newObj,
Object oldObj);
void mapModifiedFlagsToMapForCollectionChange(String collectionPropertyName, Map<String, Object> data);
}
| PropertyMapper |
java | google__guava | android/guava-testlib/src/com/google/common/testing/GcFinalization.java | {
"start": 3789,
"end": 4062
} | class ____ provides testing utilities. It is not designed for direct use in production or
* for benchmarking.
*
* @author mike nonemacher
* @author Martin Buchholz
* @since 11.0
*/
@GwtIncompatible
@J2ktIncompatible
@J2ObjCIncompatible // gc
@NullMarked
public final | only |
java | playframework__playframework | web/play-java-forms/src/main/java/play/data/validation/Constraints.java | {
"start": 11983,
"end": 12368
} | interface ____ {
String message() default MinLengthValidator.message;
Class<?>[] groups() default {};
Class<? extends Payload>[] payload() default {};
long value();
/** Defines several {@code @MinLength} annotations on the same element. */
@Target({METHOD, FIELD, ANNOTATION_TYPE, CONSTRUCTOR, PARAMETER, TYPE_USE})
@Retention(RUNTIME)
public @ | MinLength |
java | reactor__reactor-core | reactor-core/src/main/java/reactor/core/publisher/FluxArray.java | {
"start": 4832,
"end": 7750
} | class ____<T>
implements InnerProducer<T>, SynchronousSubscription<T> {
final ConditionalSubscriber<? super T> actual;
final T[] array;
int index;
volatile boolean cancelled;
volatile long requested;
@SuppressWarnings("rawtypes")
static final AtomicLongFieldUpdater<ArrayConditionalSubscription> REQUESTED =
AtomicLongFieldUpdater.newUpdater(ArrayConditionalSubscription.class,
"requested");
ArrayConditionalSubscription(ConditionalSubscriber<? super T> actual, T[] array) {
this.actual = actual;
this.array = array;
}
@Override
public CoreSubscriber<? super T> actual() {
return actual;
}
@Override
public void request(long n) {
if (Operators.validate(n)) {
if (Operators.addCap(REQUESTED, this, n) == 0) {
if (n == Long.MAX_VALUE) {
fastPath();
}
else {
slowPath(n);
}
}
}
}
void slowPath(long n) {
final T[] a = array;
final int len = a.length;
final ConditionalSubscriber<? super T> s = actual;
int i = index;
int e = 0;
for (; ; ) {
if (cancelled) {
return;
}
while (i != len && e != n) {
T t = a[i];
if (t == null) {
s.onError(new NullPointerException("The " + i + "th array element was null"));
return;
}
boolean b = s.tryOnNext(t);
if (cancelled) {
return;
}
i++;
if (b) {
e++;
}
}
if (i == len) {
s.onComplete();
return;
}
n = requested;
if (n == e) {
index = i;
n = REQUESTED.addAndGet(this, -e);
if (n == 0) {
return;
}
e = 0;
}
}
}
void fastPath() {
final T[] a = array;
final int len = a.length;
final Subscriber<? super T> s = actual;
for (int i = index; i != len; i++) {
if (cancelled) {
return;
}
T t = a[i];
if (t == null) {
s.onError(new NullPointerException("The " + i + "th array element was null"));
return;
}
s.onNext(t);
}
if (cancelled) {
return;
}
s.onComplete();
}
@Override
public void cancel() {
cancelled = true;
}
@Override
public @Nullable Object scanUnsafe(Attr key) {
if (key == Attr.TERMINATED) return isEmpty();
if (key == Attr.BUFFERED) return size();
if (key == Attr.CANCELLED) return cancelled;
if (key == Attr.REQUESTED_FROM_DOWNSTREAM) return requested;
return InnerProducer.super.scanUnsafe(key);
}
@Override
public @Nullable T poll() {
int i = index;
T[] a = array;
if (i != a.length) {
T t = Objects.requireNonNull(a[i], "Array returned null value");
index = i + 1;
return t;
}
return null;
}
@Override
public boolean isEmpty() {
return index == array.length;
}
@Override
public void clear() {
index = array.length;
}
@Override
public int size() {
return array.length - index;
}
}
}
| ArrayConditionalSubscription |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/ReservationDefinitionInfo.java | {
"start": 1148,
"end": 1293
} | class ____ represent a reservation definition.
*/
@XmlRootElement(name = "reservation-definition")
@XmlAccessorType(XmlAccessType.FIELD)
public | that |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/Allocation.java | {
"start": 1309,
"end": 5848
} | class ____ {
final List<Container> containers;
final Set<ContainerId> strictContainers;
final Set<ContainerId> fungibleContainers;
final List<ResourceRequest> fungibleResources;
final List<NMToken> nmTokens;
final List<Container> increasedContainers;
final List<Container> decreasedContainers;
final List<Container> promotedContainers;
final List<Container> demotedContainers;
private final List<Container> previousAttemptContainers;
private Resource resourceLimit;
private List<RejectedSchedulingRequest> rejectedRequest;
public Allocation(List<Container> containers, Resource resourceLimit,
Set<ContainerId> strictContainers, Set<ContainerId> fungibleContainers,
List<ResourceRequest> fungibleResources) {
this(containers, resourceLimit,strictContainers, fungibleContainers,
fungibleResources, null);
}
public Allocation(List<Container> containers, Resource resourceLimit,
Set<ContainerId> strictContainers, Set<ContainerId> fungibleContainers,
List<ResourceRequest> fungibleResources, List<NMToken> nmTokens) {
this(containers, resourceLimit, strictContainers, fungibleContainers,
fungibleResources, nmTokens, null, null, null, null, null, null);
}
public Allocation(List<Container> containers, Resource resourceLimit,
Set<ContainerId> strictContainers, Set<ContainerId> fungibleContainers,
List<ResourceRequest> fungibleResources, List<NMToken> nmTokens,
List<Container> increasedContainers, List<Container> decreasedContainer) {
this(containers, resourceLimit, strictContainers, fungibleContainers,
fungibleResources, nmTokens, increasedContainers, decreasedContainer,
null, null, null, null);
}
public Allocation(List<Container> containers, Resource resourceLimit,
Set<ContainerId> strictContainers, Set<ContainerId> fungibleContainers,
List<ResourceRequest> fungibleResources, List<NMToken> nmTokens,
List<Container> increasedContainers, List<Container> decreasedContainer,
List<Container> promotedContainers, List<Container> demotedContainer,
List<Container> previousAttemptContainers, List<RejectedSchedulingRequest>
rejectedRequest) {
this.containers = containers;
this.resourceLimit = resourceLimit;
this.strictContainers = strictContainers;
this.fungibleContainers = fungibleContainers;
this.fungibleResources = fungibleResources;
this.nmTokens = nmTokens;
this.increasedContainers = increasedContainers;
this.decreasedContainers = decreasedContainer;
this.promotedContainers = promotedContainers;
this.demotedContainers = demotedContainer;
this.previousAttemptContainers = previousAttemptContainers;
this.rejectedRequest = rejectedRequest;
}
public List<Container> getContainers() {
return containers;
}
public Resource getResourceLimit() {
return resourceLimit;
}
public Set<ContainerId> getStrictContainerPreemptions() {
return strictContainers;
}
public Set<ContainerId> getContainerPreemptions() {
return fungibleContainers;
}
public List<ResourceRequest> getResourcePreemptions() {
return fungibleResources;
}
public List<NMToken> getNMTokens() {
return nmTokens;
}
public List<Container> getIncreasedContainers() {
return increasedContainers;
}
public List<Container> getDecreasedContainers() {
return decreasedContainers;
}
public List<Container> getPromotedContainers() {
return promotedContainers;
}
public List<Container> getDemotedContainers() {
return demotedContainers;
}
public List<Container> getPreviousAttemptContainers() {
return previousAttemptContainers;
}
public List<RejectedSchedulingRequest> getRejectedRequest() {
return rejectedRequest;
}
@VisibleForTesting
public void setResourceLimit(Resource resource) {
this.resourceLimit = resource;
}
@Override
public String toString() {
return "Allocation{" + "containers=" + containers + ", strictContainers="
+ strictContainers + ", fungibleContainers=" + fungibleContainers
+ ", fungibleResources=" + fungibleResources + ", nmTokens=" + nmTokens
+ ", increasedContainers=" + increasedContainers
+ ", decreasedContainers=" + decreasedContainers
+ ", promotedContainers=" + promotedContainers + ", demotedContainers="
+ demotedContainers + ", previousAttemptContainers="
+ previousAttemptContainers + ", resourceLimit=" + resourceLimit + '}';
}
}
| Allocation |
java | google__dagger | javatests/dagger/internal/codegen/BindsDependsOnSubcomponentValidationTest.java | {
"start": 11417,
"end": 12210
} | class ____ implements Foo {",
" @Inject FooImpl(Long l) {}",
"}");
CompilerTests.daggerCompiler(
parentComponent, parentModule, childComponent, childModule, iface, impl)
.withProcessingOptions(
ImmutableMap.<String, String>builder()
.putAll(compilerMode.processorOptions())
// TODO(erichang): make this flag the default and remove this
.put("dagger.strictMultibindingValidation", "enabled")
.build())
.compile(
subject -> {
subject.hasErrorCount(1);
subject.hasErrorContaining("Long cannot be provided without an @Inject constructor")
.onSource(parentComponent)
.onLineContaining(" | FooImpl |
java | apache__rocketmq | namesrv/src/main/java/org/apache/rocketmq/namesrv/processor/ClusterTestRequestProcessor.java | {
"start": 1697,
"end": 4102
} | class ____ extends ClientRequestProcessor {
private static final Logger log = LoggerFactory.getLogger(LoggerName.NAMESRV_LOGGER_NAME);
private final DefaultMQAdminExt adminExt;
private final String productEnvName;
public ClusterTestRequestProcessor(NamesrvController namesrvController, String productEnvName) {
super(namesrvController);
this.productEnvName = productEnvName;
adminExt = new DefaultMQAdminExt();
adminExt.setInstanceName("CLUSTER_TEST_NS_INS_" + productEnvName);
adminExt.setUnitName(productEnvName);
try {
adminExt.start();
} catch (MQClientException e) {
log.error("Failed to start processor", e);
}
}
@Override
public RemotingCommand getRouteInfoByTopic(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
final GetRouteInfoRequestHeader requestHeader =
(GetRouteInfoRequestHeader) request.decodeCommandCustomHeader(GetRouteInfoRequestHeader.class);
TopicRouteData topicRouteData = this.namesrvController.getRouteInfoManager().pickupTopicRouteData(requestHeader.getTopic());
if (topicRouteData != null) {
String orderTopicConf =
this.namesrvController.getKvConfigManager().getKVConfig(NamesrvUtil.NAMESPACE_ORDER_TOPIC_CONFIG,
requestHeader.getTopic());
topicRouteData.setOrderTopicConf(orderTopicConf);
} else {
try {
topicRouteData = adminExt.examineTopicRouteInfo(requestHeader.getTopic());
} catch (Exception e) {
log.info("get route info by topic from product environment failed. envName={},", productEnvName);
}
}
if (topicRouteData != null) {
byte[] content = topicRouteData.encode();
response.setBody(content);
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
response.setCode(ResponseCode.TOPIC_NOT_EXIST);
response.setRemark("No topic route info in name server for the topic: " + requestHeader.getTopic()
+ FAQUrl.suggestTodo(FAQUrl.APPLY_TOPIC_URL));
return response;
}
}
| ClusterTestRequestProcessor |
java | apache__camel | core/camel-console/src/main/java/org/apache/camel/impl/console/EndpointDevConsole.java | {
"start": 1377,
"end": 5311
} | class ____ extends AbstractDevConsole {
public EndpointDevConsole() {
super("camel", "endpoint", "Endpoints", "Endpoint Registry information");
}
@Override
protected String doCallText(Map<String, Object> options) {
StringBuilder sb = new StringBuilder();
// runtime registry is optional but if enabled we have additional statistics to use in output
List<RuntimeEndpointRegistry.Statistic> stats = null;
RuntimeEndpointRegistry runtimeReg = getCamelContext().getRuntimeEndpointRegistry();
if (runtimeReg != null) {
stats = runtimeReg.getEndpointStatistics();
}
EndpointRegistry reg = getCamelContext().getEndpointRegistry();
sb.append(
String.format(" Endpoints: %s (static: %s dynamic: %s)\n", reg.size(), reg.staticSize(), reg.dynamicSize()));
sb.append(String.format(" Maximum Cache Size: %s\n", reg.getMaximumCacheSize()));
Collection<Endpoint> col = reg.getReadOnlyValues();
if (!col.isEmpty()) {
for (Endpoint e : col) {
boolean stub = e.getComponent().getClass().getSimpleName().equals("StubComponent");
boolean remote = e.isRemote();
String uri = e.toString();
if (!uri.startsWith("stub:") && stub) {
// shadow-stub
uri = uri + " (stub)";
}
var stat = findStats(stats, e.getEndpointUri());
if (stat.isPresent()) {
var st = stat.get();
sb.append(String.format("\n %s (remote: %s direction: %s, usage: %s)", uri, remote, st.getDirection(),
st.getHits()));
} else {
sb.append(String.format("\n %s (remote: %s)", uri, remote));
}
}
}
sb.append("\n");
return sb.toString();
}
@Override
@SuppressWarnings("unchecked")
protected JsonObject doCallJson(Map<String, Object> options) {
JsonObject root = new JsonObject();
// runtime registry is optional but if enabled we have additional statistics to use in output
List<RuntimeEndpointRegistry.Statistic> stats = null;
RuntimeEndpointRegistry runtimeReg = getCamelContext().getRuntimeEndpointRegistry();
if (runtimeReg != null) {
stats = runtimeReg.getEndpointStatistics();
}
EndpointRegistry reg = getCamelContext().getEndpointRegistry();
root.put("size", reg.size());
root.put("staticSize", reg.staticSize());
root.put("dynamicSize", reg.dynamicSize());
root.put("maximumCacheSize", reg.getMaximumCacheSize());
final List<JsonObject> list = new ArrayList<>();
root.put("endpoints", list);
Collection<Endpoint> col = reg.getReadOnlyValues();
for (Endpoint e : col) {
JsonObject jo = new JsonObject();
jo.put("uri", e.getEndpointUri());
jo.put("remote", e.isRemote());
boolean stub = e.getComponent().getClass().getSimpleName().equals("StubComponent");
jo.put("stub", stub);
var stat = findStats(stats, e.getEndpointUri());
if (stat.isPresent()) {
var st = stat.get();
jo.put("direction", st.getDirection());
jo.put("hits", st.getHits());
jo.put("routeId", st.getRouteId());
}
list.add(jo);
}
return root;
}
private static Optional<RuntimeEndpointRegistry.Statistic> findStats(
List<RuntimeEndpointRegistry.Statistic> stats, String uri) {
if (stats == null) {
return Optional.empty();
}
return stats.stream()
.filter(s -> uri.equals(s.getUri()))
.findFirst();
}
}
| EndpointDevConsole |
java | apache__flink | flink-tests/src/test/java/org/apache/flink/test/scheduling/PipelinedRegionSchedulingITCase.java | {
"start": 10963,
"end": 11553
} | class ____ extends AbstractInvokable {
private static final AtomicBoolean hasFailed = new AtomicBoolean(false);
public OneTimeFailingReceiverWithPartitionException(Environment environment) {
super(environment);
}
@Override
public void invoke() throws Exception {
if (hasFailed.compareAndSet(false, true)) {
throw new PartitionNotFoundException(
getEnvironment().getInputGate(0).getChannel(1).getPartitionId());
}
}
}
}
| OneTimeFailingReceiverWithPartitionException |
java | quarkusio__quarkus | extensions/vertx-http/runtime/src/main/java/io/quarkus/vertx/http/runtime/security/QuarkusHttpUser.java | {
"start": 718,
"end": 5390
} | class ____ implements User {
/**
* Only used when proactive auth is disabled
*/
public static final String DEFERRED_IDENTITY_KEY = "io.quarkus.vertx.http.deferred-identity";
/**
* The key that stores a BiConsumer that handles auth failures
*
* This can be overridden by downstream handlers such as Undertow to control auth failure handling.
*/
public static final String AUTH_FAILURE_HANDLER = "io.quarkus.vertx.http.auth-failure-handler";
private final SecurityIdentity securityIdentity;
public QuarkusHttpUser(SecurityIdentity securityIdentity) {
this.securityIdentity = securityIdentity;
}
@Override
public JsonObject attributes() {
// Vert.x 4 Migration: Check this, probably wrong.
return principal();
}
@Override
public User isAuthorized(Authorization authority, Handler<AsyncResult<Boolean>> resultHandler) {
return null;
}
@Override
@Deprecated
public User isAuthorized(String authority, Handler<AsyncResult<Boolean>> resultHandler) {
resultHandler.handle(Future.succeededFuture(securityIdentity.hasRole(authority)));
return this;
}
@Override
@Deprecated
public User clearCache() {
return this;
}
@Override
public JsonObject principal() {
JsonObject ret = new JsonObject();
ret.put("username", securityIdentity.getPrincipal().getName());
return ret;
}
@Override
@Deprecated
public void setAuthProvider(AuthProvider authProvider) {
}
public SecurityIdentity getSecurityIdentity() {
return securityIdentity;
}
/**
* Gets the current user from the routing context. This method may block if proactive authentication is disabled,
* as it may need to perform a potentially blocking operation.
* If an IPM is provided this method will return the anonymous
* identity if there is no active user, otherwise it will return null if there is no user.
*/
public static SecurityIdentity getSecurityIdentityBlocking(RoutingContext routingContext,
IdentityProviderManager identityProviderManager) {
QuarkusHttpUser existing = (QuarkusHttpUser) routingContext.user();
if (existing != null) {
return existing.getSecurityIdentity();
}
Uni<SecurityIdentity> deferred = routingContext.get(DEFERRED_IDENTITY_KEY);
if (deferred != null) {
return deferred.await().indefinitely();
}
if (identityProviderManager != null) {
return identityProviderManager
.authenticate(setRoutingContextAttribute(new AnonymousAuthenticationRequest(), routingContext))
.await()
.indefinitely();
}
return null;
}
@Override
public User merge(User other) {
if (other == null) {
return this;
}
principal()
// merge in the rhs
.mergeIn(other.principal());
return this;
}
/**
* Gets the current user from the routing context. If an IPM is provided this method will return the anonymous
* identity if there is no active user, otherwise the Uni will resolve to null if there is no user.
*/
public static Uni<SecurityIdentity> getSecurityIdentity(RoutingContext routingContext,
IdentityProviderManager identityProviderManager) {
Uni<SecurityIdentity> deferred = routingContext.get(DEFERRED_IDENTITY_KEY);
if (deferred != null) {
return deferred;
}
QuarkusHttpUser existing = (QuarkusHttpUser) routingContext.user();
if (existing != null) {
return Uni.createFrom().item(existing.getSecurityIdentity());
}
if (identityProviderManager != null) {
return identityProviderManager
.authenticate(setRoutingContextAttribute(new AnonymousAuthenticationRequest(), routingContext));
}
return Uni.createFrom().nullItem();
}
static Uni<SecurityIdentity> setIdentity(Uni<SecurityIdentity> identityUni, RoutingContext routingContext) {
routingContext.setUser(null);
routingContext.put(QuarkusHttpUser.DEFERRED_IDENTITY_KEY, identityUni);
return identityUni;
}
public static SecurityIdentity setIdentity(SecurityIdentity identity, RoutingContext routingContext) {
routingContext.setUser(new QuarkusHttpUser(identity));
routingContext.put(QuarkusHttpUser.DEFERRED_IDENTITY_KEY, Uni.createFrom().item(identity));
return identity;
}
}
| QuarkusHttpUser |
java | apache__hadoop | hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/SASKeyGeneratorInterface.java | {
"start": 987,
"end": 1342
} | interface ____ expected to be
* implemented in two modes:
* 1) Local Mode: In this mode SAS Keys are generated
* in same address space as the WASB. This will be primarily used for
* testing purposes.
* 2) Remote Mode: In this mode SAS Keys are generated in a sepearte process
* other than WASB and will be communicated via client.
*/
public | is |
java | apache__camel | components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/interceptor/ContainerWideInterceptorTest.java | {
"start": 1335,
"end": 3444
} | class ____ extends TestSupport {
private CamelContext camel1;
private CamelContext camel2;
private ApplicationContext ac;
private ContainerWideInterceptor myInterceptor;
@Override
@BeforeEach
public void setUp() throws Exception {
super.setUp();
ac = new ClassPathXmlApplicationContext("/org/apache/camel/spring/interceptor/ContainerWideInterceptorTest.xml");
camel1 = ac.getBean("camel1", CamelContext.class);
camel2 = ac.getBean("camel2", CamelContext.class);
myInterceptor = ac.getBean("myInterceptor", ContainerWideInterceptor.class);
}
@Override
@AfterEach
public void tearDown() throws Exception {
super.tearDown();
camel2.stop();
camel1.stop();
}
@Test
public void testOne() throws Exception {
int start = myInterceptor.getCount();
MockEndpoint result = camel1.getEndpoint("mock:result", MockEndpoint.class);
result.expectedBodiesReceived("Hello World");
ProducerTemplate template = camel1.createProducerTemplate();
template.start();
template.sendBody("direct:one", "Hello World");
template.stop();
result.assertIsSatisfied();
// lets see if the counter is +1 since last (has 1 step in the route)
int delta = myInterceptor.getCount() - start;
assertEquals(1, delta, "Should have been counted +1");
}
@Test
public void testTwo() throws Exception {
int start = myInterceptor.getCount();
MockEndpoint result = camel2.getEndpoint("mock:result", MockEndpoint.class);
result.expectedBodiesReceived("Bye World");
ProducerTemplate template = camel2.createProducerTemplate();
template.start();
template.sendBody("direct:two", "Bye World");
template.stop();
result.assertIsSatisfied();
// lets see if the counter is +2 since last (has 2 steps in the route)
int delta = myInterceptor.getCount() - start;
assertEquals(2, delta, "Should have been counted +2");
}
}
| ContainerWideInterceptorTest |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/nodes/exec/common/CommonExecAsyncCorrelate.java | {
"start": 3025,
"end": 7286
} | class ____ extends ExecNodeBase<RowData>
implements SingleTransformationTranslator<RowData> {
public static final String ASYNC_CORRELATE_TRANSFORMATION = "async-correlate";
public static final String FIELD_NAME_JOIN_TYPE = "joinType";
public static final String FIELD_NAME_FUNCTION_CALL = "functionCall";
@JsonProperty(FIELD_NAME_JOIN_TYPE)
private final FlinkJoinType joinType;
@JsonProperty(FIELD_NAME_FUNCTION_CALL)
private final RexCall invocation;
public CommonExecAsyncCorrelate(
int id,
ExecNodeContext context,
ReadableConfig persistedConfig,
FlinkJoinType joinType,
RexCall invocation,
List<InputProperty> inputProperties,
RowType outputType,
String description) {
super(id, context, persistedConfig, inputProperties, outputType, description);
checkArgument(inputProperties.size() == 1);
this.joinType = joinType;
this.invocation = invocation;
}
@Override
protected Transformation<RowData> translateToPlanInternal(
PlannerBase planner, ExecNodeConfig config) {
final ExecEdge inputEdge = getInputEdges().get(0);
final Transformation<RowData> inputTransform =
(Transformation<RowData>) inputEdge.translateToPlan(planner);
final OneInputTransformation<RowData, RowData> transform =
createAsyncOneInputTransformation(
inputTransform, config, planner.getFlinkContext().getClassLoader());
return transform;
}
private OneInputTransformation<RowData, RowData> createAsyncOneInputTransformation(
Transformation<RowData> inputTransform,
ExecNodeConfig config,
ClassLoader classLoader) {
final ExecEdge inputEdge = getInputEdges().get(0);
RowType inputRowType =
RowType.of(inputEdge.getOutputType().getChildren().toArray(new LogicalType[0]));
InternalTypeInfo<RowData> asyncOperatorResultTypeInfo =
InternalTypeInfo.of(getOutputType());
OneInputStreamOperatorFactory<RowData, RowData> factory =
getAsyncFunctionOperator(config, classLoader, inputRowType);
return ExecNodeUtil.createOneInputTransformation(
inputTransform,
createTransformationMeta(ASYNC_CORRELATE_TRANSFORMATION, config),
factory,
asyncOperatorResultTypeInfo,
inputTransform.getParallelism(),
false);
}
private OneInputStreamOperatorFactory<RowData, RowData> getAsyncFunctionOperator(
ExecNodeConfig config, ClassLoader classLoader, RowType inputRowType) {
RowType resultTypeInfo = (RowType) FlinkTypeFactory.toLogicalType(invocation.getType());
GeneratedFunction<AsyncFunction<RowData, Object>> generatedFunction =
AsyncCorrelateCodeGenerator.generateFunction(
"AsyncTableFunction",
inputRowType,
resultTypeInfo,
invocation,
config,
classLoader);
DataStructureConverter<RowData, Object> fetcherConverter =
cast(
DataStructureConverters.getConverter(
TypeConversions.fromLogicalToDataType(
FlinkTypeFactory.toLogicalType(invocation.getType()))));
AsyncCorrelateRunner func = new AsyncCorrelateRunner(generatedFunction, fetcherConverter);
FunctionCallUtil.AsyncOptions options = AsyncTableUtil.getAsyncOptions(config);
return new AsyncWaitOperatorFactory<>(
func,
options.asyncTimeout,
options.asyncBufferCapacity,
options.asyncOutputMode,
AsyncTableUtil.getResultRetryStrategy(config));
}
@SuppressWarnings("unchecked")
private DataStructureConverter<RowData, Object> cast(
DataStructureConverter<Object, Object> converter) {
return (DataStructureConverter<RowData, Object>) (Object) converter;
}
}
| CommonExecAsyncCorrelate |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/constructor/nestedsource/ArtistToChartEntry.java | {
"start": 591,
"end": 2025
} | interface ____ {
ArtistToChartEntry MAPPER = Mappers.getMapper( ArtistToChartEntry.class );
@Mappings({
@Mapping(target = "chartName", source = "chart.name"),
@Mapping(target = "songTitle", source = "song.title"),
@Mapping(target = "artistName", source = "song.artist.name"),
@Mapping(target = "recordedAt", source = "song.artist.label.studio.name"),
@Mapping(target = "city", source = "song.artist.label.studio.city"),
@Mapping(target = "position", source = "position")
})
ChartEntry map(Chart chart, Song song, Integer position);
@Mappings({
@Mapping(target = "chartName", ignore = true),
@Mapping(target = "songTitle", source = "title"),
@Mapping(target = "artistName", source = "artist.name"),
@Mapping(target = "recordedAt", source = "artist.label.studio.name"),
@Mapping(target = "city", source = "artist.label.studio.city"),
@Mapping(target = "position", ignore = true)
})
ChartEntry map(Song song);
@Mappings({
@Mapping(target = "chartName", source = "name"),
@Mapping(target = "songTitle", ignore = true),
@Mapping(target = "artistName", ignore = true),
@Mapping(target = "recordedAt", ignore = true),
@Mapping(target = "city", ignore = true),
@Mapping(target = "position", ignore = true)
})
ChartEntry map(Chart name);
}
| ArtistToChartEntry |
java | google__dagger | dagger-compiler/main/java/dagger/internal/codegen/binding/DependencyVariableNamer.java | {
"start": 1387,
"end": 3170
} | class ____ {
private static final Pattern LAZY_PROVIDER_PATTERN = Pattern.compile("lazy(\\w+)Provider");
static String name(DependencyRequest dependency) {
if (!dependency.requestElement().isPresent()) {
return simpleVariableName(dependency.key().type().xprocessing().getTypeElement());
}
String variableName = getSimpleName(dependency.requestElement().get().xprocessing());
if (Ascii.isUpperCase(variableName.charAt(0))) {
variableName = toLowerCamel(variableName);
}
switch (dependency.kind()) {
case INSTANCE:
return variableName;
case LAZY:
return variableName.startsWith("lazy") && !variableName.equals("lazy")
? toLowerCamel(variableName.substring(4))
: variableName;
case PROVIDER_OF_LAZY:
Matcher matcher = LAZY_PROVIDER_PATTERN.matcher(variableName);
if (matcher.matches()) {
return toLowerCamel(matcher.group(1));
}
// fall through
case PROVIDER:
return variableName.endsWith("Provider") && !variableName.equals("Provider")
? variableName.substring(0, variableName.length() - 8)
: variableName;
case PRODUCED:
return variableName.startsWith("produced") && !variableName.equals("produced")
? toLowerCamel(variableName.substring(8))
: variableName;
case PRODUCER:
return variableName.endsWith("Producer") && !variableName.equals("Producer")
? variableName.substring(0, variableName.length() - 8)
: variableName;
default:
throw new AssertionError();
}
}
private static String toLowerCamel(String name) {
return CaseFormat.UPPER_CAMEL.to(CaseFormat.LOWER_CAMEL, name);
}
}
| DependencyVariableNamer |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/runtime/AbstractLongScriptFieldQuery.java | {
"start": 764,
"end": 846
} | class ____ building queries based on {@link AbstractLongFieldScript}.
*/
abstract | for |
java | spring-projects__spring-framework | spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/method/annotation/HandlerMethodAnnotationDetectionTests.java | {
"start": 10831,
"end": 11433
} | class ____<A, B, C> {
@InitBinder
public abstract void initBinder(WebDataBinder dataBinder, A thePattern);
@ModelAttribute
public abstract void initModel(B date, Model model);
@RequestMapping(value="/path1/path2", method=RequestMethod.POST)
@ModelAttribute("attr2")
public abstract Date handle(C date, Model model) throws Exception;
@ExceptionHandler(Exception.class)
@ResponseBody
public abstract String handleException(Exception exception);
}
/**
* CONTROLLER WITH PARAMETERIZED BASE CLASS
* <p>All annotations can be on methods in the abstract | MappingGenericAbstractClass |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/NonRuntimeAnnotationTest.java | {
"start": 2153,
"end": 2251
} | interface ____ {}
/** Annotation that is implicitly NOT retained at runtime */
public @ | NonRuntime |
java | hibernate__hibernate-orm | hibernate-testing/src/main/java/org/hibernate/testing/orm/domain/gambit/EmbeddedIdEntity.java | {
"start": 362,
"end": 707
} | class ____ {
@EmbeddedId
private EmbeddedIdEntityId id;
private String data;
public EmbeddedIdEntityId getId() {
return id;
}
public void setId(EmbeddedIdEntityId id) {
this.id = id;
}
public String getData() {
return data;
}
public void setData(String data) {
this.data = data;
}
@Embeddable
public static | EmbeddedIdEntity |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java | {
"start": 2722,
"end": 13690
} | class ____ {
protected static final Logger LOG =
LoggerFactory.getLogger(JobSubmitter.class);
private static final String SHUFFLE_KEYGEN_ALGORITHM = "HmacSHA1";
private FileSystem jtFs;
private ClientProtocol submitClient;
private String submitHostName;
private String submitHostAddress;
JobSubmitter(FileSystem submitFs, ClientProtocol submitClient)
throws IOException {
this.submitClient = submitClient;
this.jtFs = submitFs;
}
/**
* configure the jobconf of the user with the command line options of
* -libjars, -files, -archives.
* @param job
* @throws IOException
*/
private void copyAndConfigureFiles(Job job, Path jobSubmitDir)
throws IOException {
Configuration conf = job.getConfiguration();
boolean useWildcards = conf.getBoolean(Job.USE_WILDCARD_FOR_LIBJARS,
Job.DEFAULT_USE_WILDCARD_FOR_LIBJARS);
JobResourceUploader rUploader = new JobResourceUploader(jtFs, useWildcards);
rUploader.uploadResources(job, jobSubmitDir);
// Get the working directory. If not set, sets it to filesystem working dir
// This code has been added so that working directory reset before running
// the job. This is necessary for backward compatibility as other systems
// might use the public API JobConf#setWorkingDirectory to reset the working
// directory.
job.getWorkingDirectory();
}
/**
* Internal method for submitting jobs to the system.
*
* <p>The job submission process involves:
* <ol>
* <li>
* Checking the input and output specifications of the job.
* </li>
* <li>
* Computing the {@link InputSplit}s for the job.
* </li>
* <li>
* Setup the requisite accounting information for the
* {@link DistributedCache} of the job, if necessary.
* </li>
* <li>
* Copying the job's jar and configuration to the map-reduce system
* directory on the distributed file-system.
* </li>
* <li>
* Submitting the job to the <code>JobTracker</code> and optionally
* monitoring it's status.
* </li>
* </ol></p>
* @param job the configuration to submit
* @param cluster the handle to the Cluster
* @throws ClassNotFoundException
* @throws InterruptedException
* @throws IOException
*/
JobStatus submitJobInternal(Job job, Cluster cluster)
throws ClassNotFoundException, InterruptedException, IOException {
//validate the jobs output specs
checkSpecs(job);
Configuration conf = job.getConfiguration();
addMRFrameworkToDistributedCache(conf);
Path jobStagingArea = JobSubmissionFiles.getStagingDir(cluster, conf);
//configure the command line options correctly on the submitting dfs
InetAddress ip = InetAddress.getLocalHost();
if (ip != null) {
submitHostAddress = ip.getHostAddress();
submitHostName = ip.getHostName();
conf.set(MRJobConfig.JOB_SUBMITHOST,submitHostName);
conf.set(MRJobConfig.JOB_SUBMITHOSTADDR,submitHostAddress);
}
JobID jobId = submitClient.getNewJobID();
job.setJobID(jobId);
Path submitJobDir = new Path(jobStagingArea, jobId.toString());
JobStatus status = null;
try {
conf.set(MRJobConfig.USER_NAME,
UserGroupInformation.getCurrentUser().getShortUserName());
conf.set("hadoop.http.filter.initializers",
"org.apache.hadoop.yarn.server.webproxy.amfilter.AmFilterInitializer");
conf.set(MRJobConfig.MAPREDUCE_JOB_DIR, submitJobDir.toString());
LOG.debug("Configuring job " + jobId + " with " + submitJobDir
+ " as the submit dir");
// get delegation token for the dir
TokenCache.obtainTokensForNamenodes(job.getCredentials(),
new Path[] { submitJobDir }, conf);
populateTokenCache(conf, job.getCredentials());
// generate a secret to authenticate shuffle transfers
if (TokenCache.getShuffleSecretKey(job.getCredentials()) == null) {
KeyGenerator keyGen;
try {
keyGen = KeyGenerator.getInstance(SHUFFLE_KEYGEN_ALGORITHM);
int shuffleKeyLength =
conf.getInt(MRJobConfig.SHUFFLE_KEY_LENGTH, MRJobConfig.DEFAULT_SHUFFLE_KEY_LENGTH);
keyGen.init(shuffleKeyLength);
} catch (NoSuchAlgorithmException e) {
throw new IOException("Error generating shuffle secret key", e);
}
SecretKey shuffleKey = keyGen.generateKey();
TokenCache.setShuffleSecretKey(shuffleKey.getEncoded(),
job.getCredentials());
}
if (CryptoUtils.isEncryptedSpillEnabled(conf)) {
conf.setInt(MRJobConfig.MR_AM_MAX_ATTEMPTS, 1);
LOG.warn("Max job attempts set to 1 since encrypted intermediate" +
"data spill is enabled");
}
copyAndConfigureFiles(job, submitJobDir);
Path submitJobFile = JobSubmissionFiles.getJobConfPath(submitJobDir);
// Create the splits for the job
LOG.debug("Creating splits at " + jtFs.makeQualified(submitJobDir));
int maps = writeSplits(job, submitJobDir);
conf.setInt(MRJobConfig.NUM_MAPS, maps);
LOG.info("number of splits:" + maps);
int maxMaps = conf.getInt(MRJobConfig.JOB_MAX_MAP,
MRJobConfig.DEFAULT_JOB_MAX_MAP);
if (maxMaps >= 0 && maxMaps < maps) {
throw new IllegalArgumentException("The number of map tasks " + maps +
" exceeded limit " + maxMaps);
}
// write "queue admins of the queue to which job is being submitted"
// to job file.
String queue = conf.get(MRJobConfig.QUEUE_NAME,
JobConf.DEFAULT_QUEUE_NAME);
AccessControlList acl = submitClient.getQueueAdmins(queue);
conf.set(toFullPropertyName(queue,
QueueACL.ADMINISTER_JOBS.getAclName()), acl.getAclString());
// removing jobtoken referrals before copying the jobconf to HDFS
// as the tasks don't need this setting, actually they may break
// because of it if present as the referral will point to a
// different job.
TokenCache.cleanUpTokenReferral(conf);
if (conf.getBoolean(
MRJobConfig.JOB_TOKEN_TRACKING_IDS_ENABLED,
MRJobConfig.DEFAULT_JOB_TOKEN_TRACKING_IDS_ENABLED)) {
// Add HDFS tracking ids
ArrayList<String> trackingIds = new ArrayList<String>();
for (Token<? extends TokenIdentifier> t :
job.getCredentials().getAllTokens()) {
trackingIds.add(t.decodeIdentifier().getTrackingId());
}
conf.setStrings(MRJobConfig.JOB_TOKEN_TRACKING_IDS,
trackingIds.toArray(new String[trackingIds.size()]));
}
// Set reservation info if it exists
ReservationId reservationId = job.getReservationId();
if (reservationId != null) {
conf.set(MRJobConfig.RESERVATION_ID, reservationId.toString());
}
// Write job file to submit dir
writeConf(conf, submitJobFile);
//
// Now, actually submit the job (using the submit name)
//
printTokens(jobId, job.getCredentials());
status = submitClient.submitJob(
jobId, submitJobDir.toString(), job.getCredentials());
if (status != null) {
return status;
} else {
throw new IOException("Could not launch job");
}
} finally {
if (status == null) {
LOG.info("Cleaning up the staging area " + submitJobDir);
if (jtFs != null && submitJobDir != null)
jtFs.delete(submitJobDir, true);
}
}
}
private void checkSpecs(Job job) throws ClassNotFoundException,
InterruptedException, IOException {
JobConf jConf = (JobConf)job.getConfiguration();
// Check the output specification
if (jConf.getNumReduceTasks() == 0 ?
jConf.getUseNewMapper() : jConf.getUseNewReducer()) {
org.apache.hadoop.mapreduce.OutputFormat<?, ?> output =
ReflectionUtils.newInstance(job.getOutputFormatClass(),
job.getConfiguration());
output.checkOutputSpecs(job);
} else {
jConf.getOutputFormat().checkOutputSpecs(jtFs, jConf);
}
}
private void writeConf(Configuration conf, Path jobFile)
throws IOException {
// Write job file to JobTracker's fs
FSDataOutputStream out =
FileSystem.create(jtFs, jobFile,
new FsPermission(JobSubmissionFiles.JOB_FILE_PERMISSION));
try {
conf.writeXml(out);
} finally {
out.close();
}
}
private void printTokens(JobID jobId,
Credentials credentials) throws IOException {
LOG.info("Submitting tokens for job: " + jobId);
LOG.info("Executing with tokens: {}", credentials.getAllTokens());
}
@SuppressWarnings("unchecked")
private <T extends InputSplit>
int writeNewSplits(JobContext job, Path jobSubmitDir) throws IOException,
InterruptedException, ClassNotFoundException {
Configuration conf = job.getConfiguration();
InputFormat<?, ?> input =
ReflectionUtils.newInstance(job.getInputFormatClass(), conf);
List<InputSplit> splits = input.getSplits(job);
T[] array = (T[]) splits.toArray(new InputSplit[splits.size()]);
// sort the splits into order based on size, so that the biggest
// go first
Arrays.sort(array, new SplitComparator());
JobSplitWriter.createSplitFiles(jobSubmitDir, conf,
jobSubmitDir.getFileSystem(conf), array);
return array.length;
}
private int writeSplits(org.apache.hadoop.mapreduce.JobContext job,
Path jobSubmitDir) throws IOException,
InterruptedException, ClassNotFoundException {
JobConf jConf = (JobConf)job.getConfiguration();
int maps;
if (jConf.getUseNewMapper()) {
maps = writeNewSplits(job, jobSubmitDir);
} else {
maps = writeOldSplits(jConf, jobSubmitDir);
}
return maps;
}
//method to write splits for old api mapper.
private int writeOldSplits(JobConf job, Path jobSubmitDir)
throws IOException {
org.apache.hadoop.mapred.InputSplit[] splits =
job.getInputFormat().getSplits(job, job.getNumMapTasks());
// sort the splits into order based on size, so that the biggest
// go first
Arrays.sort(splits, new Comparator<org.apache.hadoop.mapred.InputSplit>() {
public int compare(org.apache.hadoop.mapred.InputSplit a,
org.apache.hadoop.mapred.InputSplit b) {
try {
long left = a.getLength();
long right = b.getLength();
if (left == right) {
return 0;
} else if (left < right) {
return 1;
} else {
return -1;
}
} catch (IOException ie) {
throw new RuntimeException("Problem getting input split size", ie);
}
}
});
JobSplitWriter.createSplitFiles(jobSubmitDir, job,
jobSubmitDir.getFileSystem(job), splits);
return splits.length;
}
private static | JobSubmitter |
java | google__guava | guava-testlib/src/com/google/common/collect/testing/TestsForSetsInJavaUtil.java | {
"start": 2152,
"end": 20049
} | class ____ {
public static Test suite() {
return new TestsForSetsInJavaUtil().allTests();
}
public Test allTests() {
TestSuite suite = new TestSuite("java.util Sets");
suite.addTest(testsForCheckedNavigableSet());
suite.addTest(testsForEmptySet());
suite.addTest(testsForEmptyNavigableSet());
suite.addTest(testsForEmptySortedSet());
suite.addTest(testsForSingletonSet());
suite.addTest(testsForHashSet());
suite.addTest(testsForLinkedHashSet());
suite.addTest(testsForEnumSet());
suite.addTest(testsForSynchronizedNavigableSet());
suite.addTest(testsForTreeSetNatural());
suite.addTest(testsForTreeSetWithComparator());
suite.addTest(testsForCopyOnWriteArraySet());
suite.addTest(testsForUnmodifiableSet());
suite.addTest(testsForUnmodifiableNavigableSet());
suite.addTest(testsForCheckedSet());
suite.addTest(testsForCheckedSortedSet());
suite.addTest(testsForAbstractSet());
suite.addTest(testsForBadlyCollidingHashSet());
suite.addTest(testsForConcurrentSkipListSetNatural());
suite.addTest(testsForConcurrentSkipListSetWithComparator());
return suite;
}
protected Collection<Method> suppressForCheckedNavigableSet() {
return emptySet();
}
protected Collection<Method> suppressForEmptySet() {
return emptySet();
}
protected Collection<Method> suppressForEmptyNavigableSet() {
return emptySet();
}
protected Collection<Method> suppressForEmptySortedSet() {
return emptySet();
}
protected Collection<Method> suppressForSingletonSet() {
return emptySet();
}
protected Collection<Method> suppressForHashSet() {
return emptySet();
}
protected Collection<Method> suppressForLinkedHashSet() {
return emptySet();
}
protected Collection<Method> suppressForEnumSet() {
return emptySet();
}
protected Collection<Method> suppressForSynchronizedNavigableSet() {
return emptySet();
}
protected Collection<Method> suppressForTreeSetNatural() {
return emptySet();
}
protected Collection<Method> suppressForTreeSetWithComparator() {
return emptySet();
}
protected Collection<Method> suppressForCopyOnWriteArraySet() {
return asList(
getSpliteratorNotImmutableCollectionAllowsAddMethod(),
getSpliteratorNotImmutableCollectionAllowsRemoveMethod());
}
protected Collection<Method> suppressForUnmodifiableSet() {
return emptySet();
}
protected Collection<Method> suppressForUnmodifiableNavigableSet() {
return emptySet();
}
protected Collection<Method> suppressForCheckedSet() {
return emptySet();
}
protected Collection<Method> suppressForCheckedSortedSet() {
return emptySet();
}
protected Collection<Method> suppressForAbstractSet() {
return emptySet();
}
protected Collection<Method> suppressForConcurrentSkipListSetNatural() {
return emptySet();
}
protected Collection<Method> suppressForConcurrentSkipListSetWithComparator() {
return emptySet();
}
public Test testsForCheckedNavigableSet() {
return SortedSetTestSuiteBuilder.using(
new TestStringSortedSetGenerator() {
@Override
public NavigableSet<String> create(String[] elements) {
NavigableSet<String> innerSet = new TreeSet<>();
Collections.addAll(innerSet, elements);
return Collections.checkedNavigableSet(innerSet, String.class);
}
})
.named("checkedNavigableSet/TreeSet, natural")
.withFeatures(
SetFeature.GENERAL_PURPOSE,
CollectionFeature.KNOWN_ORDER,
CollectionFeature.SERIALIZABLE,
CollectionFeature.FAILS_FAST_ON_CONCURRENT_MODIFICATION,
CollectionFeature.RESTRICTS_ELEMENTS,
CollectionSize.ANY)
.suppressing(suppressForCheckedNavigableSet())
.createTestSuite();
}
public Test testsForEmptySet() {
return SetTestSuiteBuilder.using(
new TestStringSetGenerator() {
@Override
public Set<String> create(String[] elements) {
return emptySet();
}
})
.named("emptySet")
.withFeatures(CollectionFeature.SERIALIZABLE, CollectionSize.ZERO)
.suppressing(suppressForEmptySet())
.createTestSuite();
}
public Test testsForEmptyNavigableSet() {
return SetTestSuiteBuilder.using(
new TestStringSortedSetGenerator() {
@Override
public NavigableSet<String> create(String[] elements) {
return Collections.emptyNavigableSet();
}
})
.named("emptyNavigableSet")
.withFeatures(CollectionFeature.SERIALIZABLE, CollectionSize.ZERO)
.suppressing(suppressForEmptyNavigableSet())
.createTestSuite();
}
public Test testsForEmptySortedSet() {
return SetTestSuiteBuilder.using(
new TestStringSortedSetGenerator() {
@Override
public SortedSet<String> create(String[] elements) {
return Collections.emptySortedSet();
}
})
.named("emptySortedSet")
.withFeatures(CollectionFeature.SERIALIZABLE, CollectionSize.ZERO)
.suppressing(suppressForEmptySortedSet())
.createTestSuite();
}
public Test testsForSingletonSet() {
return SetTestSuiteBuilder.using(
new TestStringSetGenerator() {
@Override
public Set<String> create(String[] elements) {
return singleton(elements[0]);
}
})
.named("singleton")
.withFeatures(
CollectionFeature.SERIALIZABLE,
CollectionFeature.ALLOWS_NULL_VALUES,
CollectionSize.ONE)
.suppressing(suppressForSingletonSet())
.createTestSuite();
}
public Test testsForHashSet() {
return SetTestSuiteBuilder.using(
new TestStringSetGenerator() {
@Override
public Set<String> create(String[] elements) {
return new HashSet<>(MinimalCollection.of(elements));
}
})
.named("HashSet")
.withFeatures(
SetFeature.GENERAL_PURPOSE,
CollectionFeature.SERIALIZABLE,
CollectionFeature.ALLOWS_NULL_VALUES,
CollectionFeature.FAILS_FAST_ON_CONCURRENT_MODIFICATION,
CollectionSize.ANY)
.suppressing(suppressForHashSet())
.createTestSuite();
}
public Test testsForLinkedHashSet() {
return SetTestSuiteBuilder.using(
new TestStringSetGenerator() {
@Override
public Set<String> create(String[] elements) {
return new LinkedHashSet<>(MinimalCollection.of(elements));
}
})
.named("LinkedHashSet")
.withFeatures(
SetFeature.GENERAL_PURPOSE,
CollectionFeature.SERIALIZABLE,
CollectionFeature.ALLOWS_NULL_VALUES,
CollectionFeature.KNOWN_ORDER,
CollectionFeature.FAILS_FAST_ON_CONCURRENT_MODIFICATION,
CollectionSize.ANY)
.suppressing(suppressForLinkedHashSet())
.createTestSuite();
}
public Test testsForEnumSet() {
return SetTestSuiteBuilder.using(
new TestEnumSetGenerator() {
@Override
public Set<AnEnum> create(AnEnum[] elements) {
return (elements.length == 0)
? EnumSet.noneOf(AnEnum.class)
: EnumSet.copyOf(MinimalCollection.of(elements));
}
})
.named("EnumSet")
.withFeatures(
SetFeature.GENERAL_PURPOSE,
CollectionFeature.SERIALIZABLE,
CollectionFeature.KNOWN_ORDER,
CollectionFeature.RESTRICTS_ELEMENTS,
CollectionSize.ANY)
.suppressing(suppressForEnumSet())
.createTestSuite();
}
/**
* Tests regular NavigableSet behavior of synchronizedNavigableSet(treeSet); does not test the
* fact that it's synchronized.
*/
public Test testsForSynchronizedNavigableSet() {
return NavigableSetTestSuiteBuilder.using(
new TestStringSortedSetGenerator() {
@Override
public SortedSet<String> create(String[] elements) {
NavigableSet<String> delegate = new TreeSet<>(MinimalCollection.of(elements));
return Collections.synchronizedNavigableSet(delegate);
}
})
.named("synchronizedNavigableSet/TreeSet, natural")
.withFeatures(
SetFeature.GENERAL_PURPOSE,
CollectionFeature.SERIALIZABLE,
CollectionFeature.KNOWN_ORDER,
CollectionFeature.FAILS_FAST_ON_CONCURRENT_MODIFICATION,
CollectionSize.ANY)
.suppressing(suppressForSynchronizedNavigableSet())
.createTestSuite();
}
public Test testsForTreeSetNatural() {
return NavigableSetTestSuiteBuilder.using(
new TestStringSortedSetGenerator() {
@Override
public SortedSet<String> create(String[] elements) {
return new TreeSet<>(MinimalCollection.of(elements));
}
})
.named("TreeSet, natural")
.withFeatures(
SetFeature.GENERAL_PURPOSE,
CollectionFeature.SERIALIZABLE,
CollectionFeature.KNOWN_ORDER,
CollectionFeature.FAILS_FAST_ON_CONCURRENT_MODIFICATION,
CollectionSize.ANY)
.suppressing(suppressForTreeSetNatural())
.createTestSuite();
}
public Test testsForTreeSetWithComparator() {
return NavigableSetTestSuiteBuilder.using(
new TestStringSortedSetGenerator() {
@Override
public SortedSet<String> create(String[] elements) {
SortedSet<String> set = new TreeSet<>(arbitraryNullFriendlyComparator());
Collections.addAll(set, elements);
return set;
}
})
.named("TreeSet, with comparator")
.withFeatures(
SetFeature.GENERAL_PURPOSE,
CollectionFeature.SERIALIZABLE,
CollectionFeature.ALLOWS_NULL_VALUES,
CollectionFeature.KNOWN_ORDER,
CollectionFeature.FAILS_FAST_ON_CONCURRENT_MODIFICATION,
CollectionSize.ANY)
.suppressing(suppressForTreeSetWithComparator())
.createTestSuite();
}
public Test testsForCopyOnWriteArraySet() {
return SetTestSuiteBuilder.using(
new TestStringSetGenerator() {
@Override
public Set<String> create(String[] elements) {
return new CopyOnWriteArraySet<>(MinimalCollection.of(elements));
}
})
.named("CopyOnWriteArraySet")
.withFeatures(
CollectionFeature.SUPPORTS_ADD,
CollectionFeature.SUPPORTS_REMOVE,
CollectionFeature.SERIALIZABLE,
CollectionFeature.ALLOWS_NULL_VALUES,
CollectionFeature.KNOWN_ORDER,
CollectionSize.ANY)
.suppressing(suppressForCopyOnWriteArraySet())
.createTestSuite();
}
public Test testsForUnmodifiableSet() {
return SetTestSuiteBuilder.using(
new TestStringSetGenerator() {
@Override
public Set<String> create(String[] elements) {
Set<String> innerSet = new HashSet<>();
Collections.addAll(innerSet, elements);
return unmodifiableSet(innerSet);
}
})
.named("unmodifiableSet/HashSet")
.withFeatures(
CollectionFeature.NONE,
CollectionFeature.SERIALIZABLE,
CollectionFeature.ALLOWS_NULL_VALUES,
CollectionSize.ANY)
.suppressing(suppressForUnmodifiableSet())
.createTestSuite();
}
public Test testsForUnmodifiableNavigableSet() {
return SetTestSuiteBuilder.using(
new TestStringSortedSetGenerator() {
@Override
public NavigableSet<String> create(String[] elements) {
NavigableSet<String> innerSet = new TreeSet<>();
Collections.addAll(innerSet, elements);
return Collections.unmodifiableNavigableSet(innerSet);
}
})
.named("unmodifiableNavigableSet/TreeSet, natural")
.withFeatures(
CollectionFeature.KNOWN_ORDER,
CollectionFeature.RESTRICTS_ELEMENTS,
CollectionFeature.SERIALIZABLE,
CollectionSize.ANY)
.suppressing(suppressForUnmodifiableNavigableSet())
.createTestSuite();
}
public Test testsForCheckedSet() {
return SetTestSuiteBuilder.using(
new TestStringSetGenerator() {
@Override
public Set<String> create(String[] elements) {
Set<String> innerSet = new HashSet<>();
Collections.addAll(innerSet, elements);
return Collections.checkedSet(innerSet, String.class);
}
})
.named("checkedSet/HashSet")
.withFeatures(
SetFeature.GENERAL_PURPOSE,
CollectionFeature.SERIALIZABLE,
CollectionFeature.ALLOWS_NULL_VALUES,
CollectionFeature.RESTRICTS_ELEMENTS,
CollectionSize.ANY)
.suppressing(suppressForCheckedSet())
.createTestSuite();
}
public Test testsForCheckedSortedSet() {
return SortedSetTestSuiteBuilder.using(
new TestStringSortedSetGenerator() {
@Override
public SortedSet<String> create(String[] elements) {
SortedSet<String> innerSet = new TreeSet<>();
Collections.addAll(innerSet, elements);
return Collections.checkedSortedSet(innerSet, String.class);
}
})
.named("checkedSortedSet/TreeSet, natural")
.withFeatures(
SetFeature.GENERAL_PURPOSE,
CollectionFeature.KNOWN_ORDER,
CollectionFeature.SERIALIZABLE,
CollectionFeature.FAILS_FAST_ON_CONCURRENT_MODIFICATION,
CollectionFeature.RESTRICTS_ELEMENTS,
CollectionSize.ANY)
.suppressing(suppressForCheckedSortedSet())
.createTestSuite();
}
public Test testsForAbstractSet() {
return SetTestSuiteBuilder.using(
new TestStringSetGenerator() {
@Override
protected Set<String> create(String[] elements) {
String[] deduped = dedupe(elements);
return new AbstractSet<String>() {
@Override
public int size() {
return deduped.length;
}
@Override
public Iterator<String> iterator() {
return MinimalCollection.of(deduped).iterator();
}
};
}
})
.named("AbstractSet")
.withFeatures(
CollectionFeature.NONE,
CollectionFeature.ALLOWS_NULL_VALUES,
CollectionFeature.KNOWN_ORDER, // in this case, anyway
CollectionSize.ANY)
.suppressing(suppressForAbstractSet())
.createTestSuite();
}
public Test testsForBadlyCollidingHashSet() {
return SetTestSuiteBuilder.using(
new TestCollidingSetGenerator() {
@Override
public Set<Object> create(Object... elements) {
return new HashSet<>(MinimalCollection.of(elements));
}
})
.named("badly colliding HashSet")
.withFeatures(
SetFeature.GENERAL_PURPOSE,
CollectionFeature.ALLOWS_NULL_VALUES,
CollectionSize.SEVERAL)
.suppressing(suppressForHashSet())
.createTestSuite();
}
public Test testsForConcurrentSkipListSetNatural() {
return SetTestSuiteBuilder.using(
new TestStringSortedSetGenerator() {
@Override
public SortedSet<String> create(String[] elements) {
return new ConcurrentSkipListSet<>(MinimalCollection.of(elements));
}
})
.named("ConcurrentSkipListSet, natural")
.withFeatures(
SetFeature.GENERAL_PURPOSE,
CollectionFeature.SERIALIZABLE,
CollectionFeature.KNOWN_ORDER,
CollectionSize.ANY)
.suppressing(suppressForConcurrentSkipListSetNatural())
.createTestSuite();
}
public Test testsForConcurrentSkipListSetWithComparator() {
return SetTestSuiteBuilder.using(
new TestStringSortedSetGenerator() {
@Override
public SortedSet<String> create(String[] elements) {
SortedSet<String> set =
new ConcurrentSkipListSet<>(arbitraryNullFriendlyComparator());
Collections.addAll(set, elements);
return set;
}
})
.named("ConcurrentSkipListSet, with comparator")
.withFeatures(
SetFeature.GENERAL_PURPOSE,
CollectionFeature.SERIALIZABLE,
CollectionFeature.KNOWN_ORDER,
CollectionSize.ANY)
.suppressing(suppressForConcurrentSkipListSetWithComparator())
.createTestSuite();
}
private static String[] dedupe(String[] elements) {
Set<String> tmp = new LinkedHashSet<>();
Collections.addAll(tmp, elements);
return tmp.toArray(new String[0]);
}
static <T> Comparator<T> arbitraryNullFriendlyComparator() {
return new NullFriendlyComparator<>();
}
private static final | TestsForSetsInJavaUtil |
java | eclipse-vertx__vert.x | vertx-core/src/main/java/io/vertx/core/eventbus/impl/clustered/DefaultNodeSelector.java | {
"start": 1418,
"end": 2382
} | class ____ {
final AtomicInteger wip = new AtomicInteger(1);
final Queue<Action> queue = PlatformDependent.newMpscQueue();
Object value;
private void signal(Object value, int amount) {
while (amount > 0) {
for (int i = 0;i < amount;i++) {
Action a = queue.poll();
assert a != null;
if (a instanceof Select) {
Select<?> s = (Select<?>) a;
if (value instanceof RoundRobinSelector) {
s.resolve((RoundRobinSelector) value);
} else {
s.fail((Throwable) value);
}
} else if (a instanceof Update) {
value = ((Update)a).selector;
} else {
throw new UnsupportedOperationException();
}
}
// We write the value before writing wip to ensure visibility since wip is read
this.value = value;
amount = wip.addAndGet(-amount);
}
}
}
private | Node |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTestUtils.java | {
"start": 977,
"end": 4066
} | class ____ {
public static LiveVersionMap newLiveVersionMap(LiveVersionMapArchive archive) {
return new LiveVersionMap(archive);
}
public static DeleteVersionValue newDeleteVersionValue(long version, long seqNo, long term, long time) {
return new DeleteVersionValue(version, seqNo, term, time);
}
public static IndexVersionValue newIndexVersionValue(Translog.Location location, long version, long seqNo, long term) {
return new IndexVersionValue(location, version, seqNo, term);
}
public static VersionValue get(LiveVersionMap map, String id) {
try (Releasable r = acquireLock(map, uid(id))) {
return map.getUnderLock(uid(id));
}
}
public static void putIndex(LiveVersionMap map, String id, IndexVersionValue version) {
try (Releasable r = acquireLock(map, uid(id))) {
map.putIndexUnderLock(uid(id), version);
}
}
public static void maybePutIndex(LiveVersionMap map, String id, IndexVersionValue version) {
try (Releasable r = acquireLock(map, uid(id))) {
map.maybePutIndexUnderLock(uid(id), version);
}
}
public static void putDelete(LiveVersionMap map, String id, DeleteVersionValue version) {
try (Releasable r = acquireLock(map, uid(id))) {
map.putDeleteUnderLock(uid(id), version);
}
}
public static void pruneTombstones(LiveVersionMap map, long maxTimestampToPrune, long maxSeqNoToPrune) {
map.pruneTombstones(maxTimestampToPrune, maxSeqNoToPrune);
}
public static long reclaimableRefreshRamBytes(LiveVersionMap map) {
return map.reclaimableRefreshRamBytes();
}
public static long refreshingBytes(LiveVersionMap map) {
return map.getRefreshingBytes();
}
public static IndexVersionValue randomIndexVersionValue() {
return new IndexVersionValue(randomTranslogLocation(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong());
}
public static Translog.Location randomTranslogLocation() {
if (randomBoolean()) {
return null;
} else {
return new Translog.Location(randomNonNegativeLong(), randomNonNegativeLong(), randomInt());
}
}
public static int versionLookupSize(LiveVersionMap.VersionLookup lookup) {
return lookup.size();
}
private static Releasable acquireLock(LiveVersionMap map, BytesRef uid) {
return map.acquireLock(uid);
}
public static BytesRef uid(String id) {
return new Term(IdFieldMapper.NAME, Uid.encodeId(id)).bytes();
}
public static boolean isUnsafe(LiveVersionMap map) {
return map.isUnsafe();
}
public static boolean isSafeAccessRequired(LiveVersionMap map) {
return map.isSafeAccessRequired();
}
public static void enforceSafeAccess(LiveVersionMap map) {
map.enforceSafeAccess();
}
public static LiveVersionMapArchive getArchive(LiveVersionMap map) {
return map.getArchive();
}
}
| LiveVersionMapTestUtils |
java | apache__kafka | clients/src/test/java/org/apache/kafka/clients/consumer/ConsumerPartitionAssignorTest.java | {
"start": 5332,
"end": 6506
} | class ____ implements ConsumerPartitionAssignor, Configurable {
private Map<String, ?> configs = null;
@Override
public GroupAssignment assign(Cluster metadata, GroupSubscription groupSubscription) {
return null;
}
@Override
public String name() {
// use the RangeAssignor's name to cause naming conflict
return new RangeAssignor().name();
}
@Override
public void configure(Map<String, ?> configs) {
this.configs = configs;
}
}
private ConsumerConfig initConsumerConfigWithClassTypes(List<Object> classTypes) {
Properties props = new Properties();
props.put(KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
props.put(VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
props.put(ConsumerConfig.PARTITION_ASSIGNMENT_STRATEGY_CONFIG, classTypes);
props.put(ConsumerConfig.GROUP_PROTOCOL_CONFIG, GroupProtocol.CLASSIC.name());
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
return new ConsumerConfig(props);
}
}
| TestConsumerPartitionAssignor |
java | quarkusio__quarkus | integration-tests/maven/src/test/resources-filtered/projects/multimodule-filtered-classifier/app/src/main/java/org/acme/App.java | {
"start": 154,
"end": 272
} | class ____ {
@Inject
BigBean bean;
@GET
public String get() {
return bean.getName();
}
}
| App |
java | apache__camel | components/camel-aws/camel-aws-xray/src/test/java/org/apache/camel/component/aws/xray/ErrorHandlingTest.java | {
"start": 5608,
"end": 6144
} | class ____ implements Processor {
@Override
public void process(Exchange exchange) {
Exception ex = exchange.getProperty(Exchange.EXCEPTION_CAUGHT, Exception.class);
LOG.debug("Processing caught exception {}", ex.getLocalizedMessage());
exchange.getIn().getHeaders().put("HandledError", ex.getLocalizedMessage());
}
@Override
public String toString() {
return "ExceptionProcessor";
}
}
@XRayTrace
public static | ExceptionProcessor |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/core/env/SimpleCommandLinePropertySourceTests.java | {
"start": 909,
"end": 4795
} | class ____ {
@Test
void withDefaultName() {
PropertySource<?> ps = new SimpleCommandLinePropertySource();
assertThat(ps.getName()).isEqualTo(CommandLinePropertySource.COMMAND_LINE_PROPERTY_SOURCE_NAME);
}
@Test
void withCustomName() {
PropertySource<?> ps = new SimpleCommandLinePropertySource("ps1", new String[0]);
assertThat(ps.getName()).isEqualTo("ps1");
}
@Test
void withNoArgs() {
PropertySource<?> ps = new SimpleCommandLinePropertySource();
assertThat(ps.containsProperty("foo")).isFalse();
assertThat(ps.getProperty("foo")).isNull();
}
@Test
void withOptionArgsOnly() {
CommandLinePropertySource<?> ps = new SimpleCommandLinePropertySource("--o1=v1", "--o2");
assertThat(ps.containsProperty("o1")).isTrue();
assertThat(ps.containsProperty("o2")).isTrue();
assertThat(ps.containsProperty("o3")).isFalse();
assertThat(ps.getProperty("o1")).isEqualTo("v1");
assertThat(ps.getProperty("o2")).isEmpty();
assertThat(ps.getProperty("o3")).isNull();
}
@Test // gh-34282
void withRepeatedOptionArgs() {
CommandLinePropertySource<?> ps = new SimpleCommandLinePropertySource("--o1=v1", "--o1=v2", "--o1=v3");
assertThat(ps.containsProperty("o1")).isTrue();
assertThat(ps.containsProperty("o2")).isFalse();
assertThat(ps.getProperty("o1")).isEqualTo("v1,v2,v3");
assertThat(ps.getProperty("o2")).isNull();
}
@Test // gh-24464
void withOptionalArg_andArgIsEmpty() {
EnumerablePropertySource<?> ps = new SimpleCommandLinePropertySource("--foo=");
assertThat(ps.containsProperty("foo")).isTrue();
assertThat(ps.getProperty("foo")).isEqualTo("");
}
@Test
void withDefaultNonOptionArgsNameAndNoNonOptionArgsPresent() {
EnumerablePropertySource<?> ps = new SimpleCommandLinePropertySource("--o1=v1", "--o2");
assertThat(ps.containsProperty("nonOptionArgs")).isFalse();
assertThat(ps.containsProperty("o1")).isTrue();
assertThat(ps.containsProperty("o2")).isTrue();
assertThat(ps.containsProperty("nonOptionArgs")).isFalse();
assertThat(ps.getProperty("nonOptionArgs")).isNull();
assertThat(ps.getPropertyNames()).hasSize(2);
}
@Test
void withDefaultNonOptionArgsNameAndNonOptionArgsPresent() {
CommandLinePropertySource<?> ps = new SimpleCommandLinePropertySource("--o1=v1", "noa1", "--o2", "noa2");
assertThat(ps.containsProperty("nonOptionArgs")).isTrue();
assertThat(ps.containsProperty("o1")).isTrue();
assertThat(ps.containsProperty("o2")).isTrue();
String nonOptionArgs = ps.getProperty("nonOptionArgs");
assertThat(nonOptionArgs).isEqualTo("noa1,noa2");
}
@Test
void withCustomNonOptionArgsNameAndNoNonOptionArgsPresent() {
CommandLinePropertySource<?> ps = new SimpleCommandLinePropertySource("--o1=v1", "noa1", "--o2", "noa2");
ps.setNonOptionArgsPropertyName("NOA");
assertThat(ps.containsProperty("nonOptionArgs")).isFalse();
assertThat(ps.containsProperty("NOA")).isTrue();
assertThat(ps.containsProperty("o1")).isTrue();
assertThat(ps.containsProperty("o2")).isTrue();
String nonOptionArgs = ps.getProperty("NOA");
assertThat(nonOptionArgs).isEqualTo("noa1,noa2");
}
@Test
void covertNonOptionArgsToStringArrayAndList() {
CommandLinePropertySource<?> ps = new SimpleCommandLinePropertySource("--o1=v1", "noa1", "--o2", "noa2");
StandardEnvironment env = new StandardEnvironment();
env.getPropertySources().addFirst(ps);
String nonOptionArgs = env.getProperty("nonOptionArgs");
assertThat(nonOptionArgs).isEqualTo("noa1,noa2");
String[] nonOptionArgsArray = env.getProperty("nonOptionArgs", String[].class);
assertThat(nonOptionArgsArray[0]).isEqualTo("noa1");
assertThat(nonOptionArgsArray[1]).isEqualTo("noa2");
@SuppressWarnings("unchecked")
List<String> nonOptionArgsList = env.getProperty("nonOptionArgs", List.class);
assertThat(nonOptionArgsList).containsExactly("noa1", "noa2");
}
}
| SimpleCommandLinePropertySourceTests |
java | netty__netty | resolver-dns/src/main/java/io/netty/resolver/dns/UnixResolverDnsServerAddressStreamProvider.java | {
"start": 1783,
"end": 11588
} | class ____ implements DnsServerAddressStreamProvider {
private static final InternalLogger logger =
InternalLoggerFactory.getInstance(UnixResolverDnsServerAddressStreamProvider.class);
private static final Pattern WHITESPACE_PATTERN = Pattern.compile("\\s+");
private static final String RES_OPTIONS = System.getenv("RES_OPTIONS");
private static final String ETC_RESOLV_CONF_FILE = "/etc/resolv.conf";
private static final String ETC_RESOLVER_DIR = "/etc/resolver";
private static final String NAMESERVER_ROW_LABEL = "nameserver";
private static final String SORTLIST_ROW_LABEL = "sortlist";
private static final String OPTIONS_ROW_LABEL = "options ";
private static final String OPTIONS_ROTATE_FLAG = "rotate";
private static final String DOMAIN_ROW_LABEL = "domain";
private static final String SEARCH_ROW_LABEL = "search";
private static final String PORT_ROW_LABEL = "port";
private final DnsServerAddresses defaultNameServerAddresses;
private final Map<String, DnsServerAddresses> domainToNameServerStreamMap;
/**
* Attempt to parse {@code /etc/resolv.conf} and files in the {@code /etc/resolver} directory by default.
* A failure to parse will return {@link DefaultDnsServerAddressStreamProvider}.
*/
static DnsServerAddressStreamProvider parseSilently() {
try {
UnixResolverDnsServerAddressStreamProvider nameServerCache =
new UnixResolverDnsServerAddressStreamProvider(ETC_RESOLV_CONF_FILE, ETC_RESOLVER_DIR);
return nameServerCache.mayOverrideNameServers() ? nameServerCache
: DefaultDnsServerAddressStreamProvider.INSTANCE;
} catch (Exception e) {
if (logger.isDebugEnabled()) {
logger.debug("failed to parse {} and/or {}", ETC_RESOLV_CONF_FILE, ETC_RESOLVER_DIR, e);
}
return DefaultDnsServerAddressStreamProvider.INSTANCE;
}
}
/**
* Parse a file of the format <a href="https://linux.die.net/man/5/resolver">/etc/resolv.conf</a> which may contain
* the default DNS server to use, and also overrides for individual domains. Also parse list of files of the format
* <a href="
* https://developer.apple.com/legacy/library/documentation/Darwin/Reference/ManPages/man5/resolver.5.html">
* /etc/resolver</a> which may contain multiple files to override the name servers used for multiple domains.
* @param etcResolvConf <a href="https://linux.die.net/man/5/resolver">/etc/resolv.conf</a>.
* @param etcResolverFiles List of files of the format defined in
* <a href="
* https://developer.apple.com/legacy/library/documentation/Darwin/Reference/ManPages/man5/resolver.5.html">
* /etc/resolver</a>.
* @throws IOException If an error occurs while parsing the input files.
*/
public UnixResolverDnsServerAddressStreamProvider(File etcResolvConf, File... etcResolverFiles) throws IOException {
Map<String, DnsServerAddresses> etcResolvConfMap = parse(checkNotNull(etcResolvConf, "etcResolvConf"));
final boolean useEtcResolverFiles = etcResolverFiles != null && etcResolverFiles.length != 0;
domainToNameServerStreamMap = useEtcResolverFiles ? parse(etcResolverFiles) : etcResolvConfMap;
DnsServerAddresses defaultNameServerAddresses
= etcResolvConfMap.get(etcResolvConf.getName());
if (defaultNameServerAddresses == null) {
Collection<DnsServerAddresses> values = etcResolvConfMap.values();
if (values.isEmpty()) {
throw new IllegalArgumentException(etcResolvConf + " didn't provide any name servers");
}
this.defaultNameServerAddresses = values.iterator().next();
} else {
this.defaultNameServerAddresses = defaultNameServerAddresses;
}
if (useEtcResolverFiles) {
domainToNameServerStreamMap.putAll(etcResolvConfMap);
}
}
/**
* Parse a file of the format <a href="https://linux.die.net/man/5/resolver">/etc/resolv.conf</a> which may contain
* the default DNS server to use, and also overrides for individual domains. Also parse a directory of the format
* <a href="
* https://developer.apple.com/legacy/library/documentation/Darwin/Reference/ManPages/man5/resolver.5.html">
* /etc/resolver</a> which may contain multiple files to override the name servers used for multiple domains.
* @param etcResolvConf <a href="https://linux.die.net/man/5/resolver">/etc/resolv.conf</a>.
* @param etcResolverDir Directory containing files of the format defined in
* <a href="
* https://developer.apple.com/legacy/library/documentation/Darwin/Reference/ManPages/man5/resolver.5.html">
* /etc/resolver</a>.
* @throws IOException If an error occurs while parsing the input files.
*/
public UnixResolverDnsServerAddressStreamProvider(String etcResolvConf, String etcResolverDir) throws IOException {
this(etcResolvConf == null ? null : new File(etcResolvConf),
etcResolverDir == null ? null : new File(etcResolverDir).listFiles());
}
@Override
public DnsServerAddressStream nameServerAddressStream(String hostname) {
for (;;) {
int i = hostname.indexOf('.', 1);
if (i < 0 || i == hostname.length() - 1) {
return defaultNameServerAddresses.stream();
}
DnsServerAddresses addresses = domainToNameServerStreamMap.get(hostname);
if (addresses != null) {
return addresses.stream();
}
hostname = hostname.substring(i + 1);
}
}
private boolean mayOverrideNameServers() {
return !domainToNameServerStreamMap.isEmpty() || defaultNameServerAddresses.stream().next() != null;
}
private static Map<String, DnsServerAddresses> parse(File... etcResolverFiles) throws IOException {
Map<String, DnsServerAddresses> domainToNameServerStreamMap =
new HashMap<String, DnsServerAddresses>(etcResolverFiles.length << 1);
boolean rotateGlobal = RES_OPTIONS != null && RES_OPTIONS.contains(OPTIONS_ROTATE_FLAG);
for (File etcResolverFile : etcResolverFiles) {
if (!etcResolverFile.isFile()) {
continue;
}
FileReader fr = new FileReader(etcResolverFile);
BufferedReader br = null;
try {
br = new BufferedReader(fr);
List<InetSocketAddress> addresses = new ArrayList<InetSocketAddress>(2);
String domainName = etcResolverFile.getName();
boolean rotate = rotateGlobal;
int port = DNS_PORT;
String line;
while ((line = br.readLine()) != null) {
line = line.trim();
try {
char c;
if (line.isEmpty() || (c = line.charAt(0)) == '#' || c == ';') {
continue;
}
if (!rotate && line.startsWith(OPTIONS_ROW_LABEL)) {
rotate = line.contains(OPTIONS_ROTATE_FLAG);
} else if (line.startsWith(NAMESERVER_ROW_LABEL)) {
int i = indexOfNonWhiteSpace(line, NAMESERVER_ROW_LABEL.length());
if (i < 0) {
throw new IllegalArgumentException("error parsing label " + NAMESERVER_ROW_LABEL +
" in file " + etcResolverFile + ". value: " + line);
}
String maybeIP;
int x = indexOfWhiteSpace(line, i);
if (x == -1) {
maybeIP = line.substring(i);
} else {
// ignore comments
int idx = indexOfNonWhiteSpace(line, x);
if (idx == -1 || line.charAt(idx) != '#') {
throw new IllegalArgumentException("error parsing label " + NAMESERVER_ROW_LABEL +
" in file " + etcResolverFile + ". value: " + line);
}
maybeIP = line.substring(i, x);
}
// There may be a port appended onto the IP address so we attempt to extract it.
if (!NetUtil.isValidIpV4Address(maybeIP) && !NetUtil.isValidIpV6Address(maybeIP)) {
i = maybeIP.lastIndexOf('.');
if (i + 1 >= maybeIP.length()) {
throw new IllegalArgumentException("error parsing label " + NAMESERVER_ROW_LABEL +
" in file " + etcResolverFile + ". invalid IP value: " + line);
}
port = Integer.parseInt(maybeIP.substring(i + 1));
maybeIP = maybeIP.substring(0, i);
}
InetSocketAddress addr = SocketUtils.socketAddress(maybeIP, port);
// Check if the address is resolved and only if this is the case use it. Otherwise just
// ignore it. This is needed to filter out invalid entries, as if for example an ipv6
// address is used with a scope that represent a network | UnixResolverDnsServerAddressStreamProvider |
java | spring-projects__spring-boot | module/spring-boot-devtools/src/main/java/org/springframework/boot/devtools/livereload/LiveReloadServer.java | {
"start": 1470,
"end": 6761
} | class ____ {
/**
* The default live reload server port.
*/
public static final int DEFAULT_PORT = 35729;
private static final Log logger = LogFactory.getLog(LiveReloadServer.class);
private static final int READ_TIMEOUT = (int) TimeUnit.SECONDS.toMillis(4);
private final ExecutorService executor = Executors.newCachedThreadPool(new WorkerThreadFactory());
private final List<Connection> connections = new ArrayList<>();
private final Object monitor = new Object();
private final int port;
private final ThreadFactory threadFactory;
private @Nullable ServerSocket serverSocket;
private @Nullable Thread listenThread;
/**
* Create a new {@link LiveReloadServer} listening on the default port.
*/
public LiveReloadServer() {
this(DEFAULT_PORT);
}
/**
* Create a new {@link LiveReloadServer} listening on the default port with a specific
* {@link ThreadFactory}.
* @param threadFactory the thread factory
*/
public LiveReloadServer(ThreadFactory threadFactory) {
this(DEFAULT_PORT, threadFactory);
}
/**
* Create a new {@link LiveReloadServer} listening on the specified port.
* @param port the listen port
*/
public LiveReloadServer(int port) {
this(port, Thread::new);
}
/**
* Create a new {@link LiveReloadServer} listening on the specified port with a
* specific {@link ThreadFactory}.
* @param port the listen port
* @param threadFactory the thread factory
*/
public LiveReloadServer(int port, ThreadFactory threadFactory) {
this.port = port;
this.threadFactory = threadFactory;
}
/**
* Start the livereload server and accept incoming connections.
* @return the port on which the server is listening
* @throws IOException in case of I/O errors
*/
public int start() throws IOException {
synchronized (this.monitor) {
Assert.state(!isStarted(), "Server already started");
logger.debug(LogMessage.format("Starting live reload server on port %s", this.port));
this.serverSocket = new ServerSocket(this.port);
int localPort = this.serverSocket.getLocalPort();
this.listenThread = this.threadFactory.newThread(this::acceptConnections);
this.listenThread.setDaemon(true);
this.listenThread.setName("Live Reload Server");
this.listenThread.start();
return localPort;
}
}
/**
* Return if the server has been started.
* @return {@code true} if the server is running
*/
public boolean isStarted() {
synchronized (this.monitor) {
return this.listenThread != null;
}
}
/**
* Return the port that the server is listening on.
* @return the server port
*/
public int getPort() {
return this.port;
}
private void acceptConnections() {
Assert.state(this.serverSocket != null, "'serverSocket' must not be null");
do {
try {
Socket socket = this.serverSocket.accept();
socket.setSoTimeout(READ_TIMEOUT);
this.executor.execute(new ConnectionHandler(socket));
}
catch (SocketTimeoutException ex) {
// Ignore
}
catch (Exception ex) {
if (logger.isDebugEnabled()) {
logger.debug("LiveReload server error", ex);
}
}
}
while (!this.serverSocket.isClosed());
}
/**
* Gracefully stop the livereload server.
* @throws IOException in case of I/O errors
*/
public void stop() throws IOException {
synchronized (this.monitor) {
if (this.listenThread != null) {
closeAllConnections();
try {
this.executor.shutdown();
this.executor.awaitTermination(1, TimeUnit.MINUTES);
}
catch (InterruptedException ex) {
Thread.currentThread().interrupt();
}
Assert.state(this.serverSocket != null, "'serverSocket' must not be null");
this.serverSocket.close();
try {
this.listenThread.join();
}
catch (InterruptedException ex) {
Thread.currentThread().interrupt();
}
this.listenThread = null;
this.serverSocket = null;
}
}
}
private void closeAllConnections() throws IOException {
synchronized (this.connections) {
for (Connection connection : this.connections) {
connection.close();
}
}
}
/**
* Trigger livereload of all connected clients.
*/
public void triggerReload() {
synchronized (this.monitor) {
synchronized (this.connections) {
for (Connection connection : this.connections) {
try {
connection.triggerReload();
}
catch (Exception ex) {
logger.debug("Unable to send reload message", ex);
}
}
}
}
}
private void addConnection(Connection connection) {
synchronized (this.connections) {
this.connections.add(connection);
}
}
private void removeConnection(Connection connection) {
synchronized (this.connections) {
this.connections.remove(connection);
}
}
/**
* Factory method used to create the {@link Connection}.
* @param socket the source socket
* @param inputStream the socket input stream
* @param outputStream the socket output stream
* @return a connection
* @throws IOException in case of I/O errors
*/
protected Connection createConnection(Socket socket, InputStream inputStream, OutputStream outputStream)
throws IOException {
return new Connection(socket, inputStream, outputStream);
}
/**
* {@link Runnable} to handle a single connection.
*
* @see Connection
*/
private | LiveReloadServer |
java | mockito__mockito | mockito-core/src/main/java/org/mockito/internal/InOrderImpl.java | {
"start": 1193,
"end": 4627
} | class ____ implements InOrder, InOrderContext {
private final MockitoCore mockitoCore = new MockitoCore();
private final List<Object> mocksToBeVerifiedInOrder = new ArrayList<>();
private final InOrderContext inOrderContext = new InOrderContextImpl();
public List<Object> getMocksToBeVerifiedInOrder() {
return mocksToBeVerifiedInOrder;
}
public InOrderImpl(List<?> mocksToBeVerifiedInOrder) {
this.mocksToBeVerifiedInOrder.addAll(mocksToBeVerifiedInOrder);
}
@Override
public <T> T verify(T mock) {
return this.verify(mock, VerificationModeFactory.times(1));
}
@Override
public <T> T verify(T mock, VerificationMode mode) {
if (mock == null) {
throw nullPassedToVerify();
}
MockingDetails mockingDetails = mockingDetails(mock);
if (!mockingDetails.isMock()) {
throw notAMockPassedToVerify(mock.getClass());
}
if (!this.objectIsMockToBeVerified(mock)) {
throw inOrderRequiresFamiliarMock();
}
if (mode instanceof VerificationWrapper) {
return mockitoCore.verify(
mock,
new VerificationWrapperInOrderWrapper((VerificationWrapper<?>) mode, this));
} else if (!(mode instanceof VerificationInOrderMode)) {
throw new MockitoException(
mode.getClass().getSimpleName() + " is not implemented to work with InOrder");
}
return mockitoCore.verify(mock, new InOrderWrapper((VerificationInOrderMode) mode, this));
}
@Override
public void verify(
MockedStatic<?> mockedStatic,
MockedStatic.Verification verification,
VerificationMode mode) {
if (mode instanceof VerificationWrapper) {
mockedStatic.verify(
verification,
new VerificationWrapperInOrderWrapper((VerificationWrapper<?>) mode, this));
} else if (mode instanceof VerificationInOrderMode) {
mockedStatic.verify(
verification, new InOrderWrapper((VerificationInOrderMode) mode, this));
} else {
throw new MockitoException(
mode.getClass().getSimpleName() + " is not implemented to work with InOrder");
}
}
// We can't use `this.mocksToBeVerifiedInOrder.contains`, since that in turn calls `.equals` on
// the mock. Since mocks can be spies and spies get their real equals method calls called, the
// result is that Mockito incorrectly would register an invocation on a mock. This normally
// wouldn't be a problem, unless the user explicitly verifies that no interactions are performed
// on the mock, which would start to fail for the equals invocation.
private boolean objectIsMockToBeVerified(Object mock) {
for (Object inOrderMock : this.mocksToBeVerifiedInOrder) {
if (inOrderMock == mock) {
return true;
}
}
return false;
}
@Override
public boolean isVerified(Invocation i) {
return inOrderContext.isVerified(i);
}
@Override
public void markVerified(Invocation i) {
inOrderContext.markVerified(i);
}
@Override
public void verifyNoMoreInteractions() {
mockitoCore.verifyNoMoreInteractionsInOrder(mocksToBeVerifiedInOrder, this);
}
}
| InOrderImpl |
java | elastic__elasticsearch | x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/GeoShapeCentroidAggregator.java | {
"start": 1738,
"end": 7863
} | class ____ extends MetricsAggregator {
private final GeoShapeValuesSource valuesSource;
private DoubleArray lonSum, lonCompensations, latSum, latCompensations, weightSum, weightCompensations;
private LongArray counts;
private ByteArray dimensionalShapeTypes;
public GeoShapeCentroidAggregator(
String name,
AggregationContext context,
Aggregator parent,
ValuesSourceConfig valuesSourceConfig,
Map<String, Object> metadata
) throws IOException {
super(name, context, parent, metadata);
assert valuesSourceConfig.hasValues();
this.valuesSource = (GeoShapeValuesSource) valuesSourceConfig.getValuesSource();
lonSum = bigArrays().newDoubleArray(1, true);
lonCompensations = bigArrays().newDoubleArray(1, true);
latSum = bigArrays().newDoubleArray(1, true);
latCompensations = bigArrays().newDoubleArray(1, true);
weightSum = bigArrays().newDoubleArray(1, true);
weightCompensations = bigArrays().newDoubleArray(1, true);
counts = bigArrays().newLongArray(1, true);
dimensionalShapeTypes = bigArrays().newByteArray(1, true);
}
@Override
public LeafBucketCollector getLeafCollector(AggregationExecutionContext aggCtx, LeafBucketCollector sub) {
final GeoShapeValues values = valuesSource.shapeValues(aggCtx.getLeafReaderContext());
final CompensatedSum compensatedSumLat = new CompensatedSum(0, 0);
final CompensatedSum compensatedSumLon = new CompensatedSum(0, 0);
final CompensatedSum compensatedSumWeight = new CompensatedSum(0, 0);
return new LeafBucketCollectorBase(sub, values) {
@Override
public void collect(int doc, long bucket) throws IOException {
if (values.advanceExact(doc)) {
maybeResize(bucket);
// increment by the number of points for this document
counts.increment(bucket, 1);
// Compute the sum of double values with Kahan summation algorithm which is more
// accurate than naive summation.
final DimensionalShapeType shapeType = DimensionalShapeType.fromOrdinalByte(dimensionalShapeTypes.get(bucket));
final GeoShapeValues.GeoShapeValue value = values.value();
final int compares = shapeType.compareTo(value.dimensionalShapeType());
// update the sum
if (compares < 0) {
// shape with higher dimensional value
final double coordinateWeight = value.weight();
compensatedSumLat.reset(coordinateWeight * value.getY(), 0.0);
compensatedSumLon.reset(coordinateWeight * value.getX(), 0.0);
compensatedSumWeight.reset(coordinateWeight, 0.0);
dimensionalShapeTypes.set(bucket, (byte) value.dimensionalShapeType().ordinal());
} else if (compares == 0) {
// shape with the same dimensional value
compensatedSumLat.reset(latSum.get(bucket), latCompensations.get(bucket));
compensatedSumLon.reset(lonSum.get(bucket), lonCompensations.get(bucket));
compensatedSumWeight.reset(weightSum.get(bucket), weightCompensations.get(bucket));
final double coordinateWeight = value.weight();
compensatedSumLat.add(coordinateWeight * value.getY());
compensatedSumLon.add(coordinateWeight * value.getX());
compensatedSumWeight.add(coordinateWeight);
} else {
// do not modify centroid calculation since shape is of lower dimension than the running dimension
return;
}
lonSum.set(bucket, compensatedSumLon.value());
lonCompensations.set(bucket, compensatedSumLon.delta());
latSum.set(bucket, compensatedSumLat.value());
latCompensations.set(bucket, compensatedSumLat.delta());
weightSum.set(bucket, compensatedSumWeight.value());
weightCompensations.set(bucket, compensatedSumWeight.delta());
}
}
private void maybeResize(long bucket) {
latSum = bigArrays().grow(latSum, bucket + 1);
lonSum = bigArrays().grow(lonSum, bucket + 1);
weightSum = bigArrays().grow(weightSum, bucket + 1);
lonCompensations = bigArrays().grow(lonCompensations, bucket + 1);
latCompensations = bigArrays().grow(latCompensations, bucket + 1);
weightCompensations = bigArrays().grow(weightCompensations, bucket + 1);
counts = bigArrays().grow(counts, bucket + 1);
dimensionalShapeTypes = bigArrays().grow(dimensionalShapeTypes, bucket + 1);
}
};
}
@Override
public InternalAggregation buildAggregation(long bucket) {
if (bucket >= counts.size()) {
return buildEmptyAggregation();
}
final long bucketCount = counts.get(bucket);
final double bucketWeight = weightSum.get(bucket);
final GeoPoint bucketCentroid = (bucketWeight > 0)
? new GeoPoint(latSum.get(bucket) / bucketWeight, lonSum.get(bucket) / bucketWeight)
: null;
return new InternalGeoCentroid(name, bucketCentroid, bucketCount, metadata());
}
@Override
public InternalAggregation buildEmptyAggregation() {
return InternalGeoCentroid.empty(name, metadata());
}
@Override
public void doClose() {
Releasables.close(
latSum,
latCompensations,
lonSum,
lonCompensations,
counts,
weightSum,
weightCompensations,
dimensionalShapeTypes
);
}
}
| GeoShapeCentroidAggregator |
java | elastic__elasticsearch | x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/authorization/PreconfiguredEndpointModelAdapter.java | {
"start": 930,
"end": 1895
} | class ____ {
public static List<Model> getModels(Set<String> inferenceIds, ElasticInferenceServiceComponents elasticInferenceServiceComponents) {
return inferenceIds.stream()
.sorted()
.filter(EIS_PRECONFIGURED_ENDPOINT_IDS::contains)
.map(id -> createModel(InternalPreconfiguredEndpoints.getWithInferenceId(id), elasticInferenceServiceComponents))
.toList();
}
public static Model createModel(
InternalPreconfiguredEndpoints.MinimalModel minimalModel,
ElasticInferenceServiceComponents elasticInferenceServiceComponents
) {
return new ElasticInferenceServiceModel(
minimalModel.configurations(),
new ModelSecrets(EmptySecretSettings.INSTANCE),
minimalModel.rateLimitServiceSettings(),
elasticInferenceServiceComponents
);
}
private PreconfiguredEndpointModelAdapter() {}
}
| PreconfiguredEndpointModelAdapter |
java | elastic__elasticsearch | x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/common/DocumentConversionUtilsTests.java | {
"start": 1010,
"end": 5198
} | class ____ extends ESTestCase {
private static final String INDEX = "some-index";
private static final String PIPELINE = "some-pipeline";
private static final String ID = "some-id";
private static final Map<String, Object> DOCUMENT = Map.ofEntries(
entry("field-1", "field-1-value"),
entry("field-2", "field-2-value"),
entry("field-3", "field-3-value"),
entry("_internal-field-1", "internal-field-1-value"),
entry("_internal-field-2", "internal-field-2-value")
);
private static final Map<String, Object> DOCUMENT_WITHOUT_ID = Map.ofEntries(
entry("field-1", "field-1-value"),
entry("field-2", "field-2-value"),
entry("field-3", "field-3-value"),
entry("_internal-field-1", "internal-field-1-value"),
entry("_internal-field-2", "internal-field-2-value")
);
private static final Map<String, Object> DOCUMENT_WITHOUT_INTERNAL_FIELDS = Map.ofEntries(
entry("field-1", "field-1-value"),
entry("field-2", "field-2-value"),
entry("field-3", "field-3-value")
);
public void testConvertDocumentToIndexRequest_MissingId() {
Exception e = expectThrows(
Exception.class,
() -> DocumentConversionUtils.convertDocumentToIndexRequest(null, Collections.emptyMap(), INDEX, PIPELINE)
);
assertThat(e.getMessage(), is(equalTo("Expected a document id but got null.")));
}
public void testConvertDocumentToIndexRequest() {
IndexRequest indexRequest = DocumentConversionUtils.convertDocumentToIndexRequest(ID, DOCUMENT, INDEX, PIPELINE);
assertThat(indexRequest.index(), is(equalTo(INDEX)));
assertThat(indexRequest.id(), is(equalTo(ID)));
assertThat(indexRequest.getPipeline(), is(equalTo(PIPELINE)));
assertThat(indexRequest.sourceAsMap(), is(equalTo(DOCUMENT_WITHOUT_ID)));
}
public void testConvertDocumentToIndexRequest_WithNullIndex() {
IndexRequest indexRequest = DocumentConversionUtils.convertDocumentToIndexRequest(ID, DOCUMENT, null, PIPELINE);
assertThat(indexRequest.index(), is(nullValue()));
assertThat(indexRequest.id(), is(equalTo(ID)));
assertThat(indexRequest.getPipeline(), is(equalTo(PIPELINE)));
assertThat(indexRequest.sourceAsMap(), is(equalTo(DOCUMENT_WITHOUT_ID)));
}
public void testConvertDocumentToIndexRequest_WithNullPipeline() {
IndexRequest indexRequest = DocumentConversionUtils.convertDocumentToIndexRequest(ID, DOCUMENT, INDEX, null);
assertThat(indexRequest.index(), is(equalTo(INDEX)));
assertThat(indexRequest.id(), is(equalTo(ID)));
assertThat(indexRequest.getPipeline(), is(nullValue()));
assertThat(indexRequest.sourceAsMap(), is(equalTo(DOCUMENT_WITHOUT_ID)));
}
public void testRemoveInternalFields() {
assertThat(DocumentConversionUtils.removeInternalFields(DOCUMENT), is(equalTo(DOCUMENT_WITHOUT_INTERNAL_FIELDS)));
}
public void testExtractFieldMappings() {
FieldCapabilitiesResponse response = FieldCapabilitiesResponse.builder()
.withIndices(new String[] { "some-index" })
.withFields(
Map.ofEntries(
entry("field-1", Map.of("keyword", createFieldCapabilities("field-1", "keyword"))),
entry(
"field-2",
Map.of("long", createFieldCapabilities("field-2", "long"), "keyword", createFieldCapabilities("field-2", "keyword"))
)
)
)
.build();
assertThat(
DocumentConversionUtils.extractFieldMappings(response),
allOf(hasEntry("field-1", "keyword"), hasEntry(is(equalTo("field-2")), is(oneOf("long", "keyword"))))
);
}
private static FieldCapabilities createFieldCapabilities(String name, String type) {
return new FieldCapabilitiesBuilder(name, type).indices(Strings.EMPTY_ARRAY)
.nonSearchableIndices(Strings.EMPTY_ARRAY)
.nonAggregatableIndices(Strings.EMPTY_ARRAY)
.build();
}
}
| DocumentConversionUtilsTests |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/sql/ast/tree/delete/DeleteStatement.java | {
"start": 624,
"end": 1926
} | class ____ extends AbstractUpdateOrDeleteStatement {
public static final String DEFAULT_ALIAS = "to_delete_";
public DeleteStatement(NamedTableReference targetTable, Predicate restriction) {
this( null, targetTable, new FromClause(), restriction, Collections.emptyList() );
}
public DeleteStatement(
NamedTableReference targetTable,
Predicate restriction,
List<ColumnReference> returningColumns) {
this( null, targetTable, new FromClause(), restriction, returningColumns );
}
public DeleteStatement(NamedTableReference targetTable, FromClause fromClause, Predicate restriction) {
this( null, targetTable, fromClause, restriction, Collections.emptyList() );
}
public DeleteStatement(
NamedTableReference targetTable,
FromClause fromClause,
Predicate restriction,
List<ColumnReference> returningColumns) {
this( null, targetTable, fromClause, restriction, returningColumns );
}
public DeleteStatement(
CteContainer cteContainer,
NamedTableReference targetTable,
FromClause fromClause,
Predicate restriction,
List<ColumnReference> returningColumns) {
super( cteContainer, targetTable, fromClause, restriction, returningColumns );
}
@Override
public void accept(SqlAstWalker walker) {
walker.visitDeleteStatement( this );
}
}
| DeleteStatement |
java | apache__camel | components/camel-zipfile/src/test/java/org/apache/camel/processor/aggregate/zipfile/ZipSplitAggregateTransactedIssueTest.java | {
"start": 4622,
"end": 4974
} | class ____ implements AggregationStrategy {
@Override
public Exchange aggregate(Exchange oldExchange, Exchange newExchange) {
String name = newExchange.getMessage().getHeader("CamelFileName", String.class);
LOG.info("Aggregating {}", name);
return newExchange;
}
}
}
| StringAggregationStrategy |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/annotation/ReflectionUtilsIntegrationTests.java | {
"start": 1071,
"end": 1723
} | class ____ {
@Test
void getUniqueDeclaredMethods_withCovariantReturnType_andCglibRewrittenMethodNames() {
Class<?> cglibLeaf = new ConfigurationClassEnhancer().enhance(Leaf.class, null);
int m1MethodCount = 0;
Method[] methods = ReflectionUtils.getUniqueDeclaredMethods(cglibLeaf);
for (Method method : methods) {
if (method.getName().equals("m1")) {
m1MethodCount++;
}
}
assertThat(m1MethodCount).isEqualTo(1);
for (Method method : methods) {
if (method.getName().contains("m1")) {
assertThat(Integer.class).isEqualTo(method.getReturnType());
}
}
}
@Configuration
abstract static | ReflectionUtilsIntegrationTests |
java | resilience4j__resilience4j | resilience4j-spring-boot2/src/test/java/io/github/resilience4j/circuitbreaker/CompositeHealthResponse.java | {
"start": 84,
"end": 526
} | class ____ {
private String status;
private Map<String, HealthResponse> details;
public Map<String, HealthResponse> getDetails() {
return details;
}
public void setDetails(Map<String, HealthResponse> details) {
this.details = details;
}
public String getStatus() {
return status;
}
public void setStatus(String status) {
this.status = status;
}
} | CompositeHealthResponse |
java | apache__camel | components/camel-mapstruct/src/main/java/org/apache/camel/component/mapstruct/MapstructProducer.java | {
"start": 1040,
"end": 1542
} | class ____ extends DefaultProducer {
private final Processor converter;
public MapstructProducer(MapstructEndpoint endpoint, Class<?> clazz, boolean mandatory) {
super(endpoint);
// we use convert body that will use type converter to find the mapstruct mapper
this.converter = new ConvertBodyProcessor(clazz, null, mandatory);
}
@Override
public void process(Exchange exchange) throws Exception {
converter.process(exchange);
}
}
| MapstructProducer |
java | netty__netty | buffer/src/main/java/io/netty/buffer/EmptyByteBuf.java | {
"start": 1370,
"end": 23862
} | class ____ extends ByteBuf {
static final int EMPTY_BYTE_BUF_HASH_CODE = 1;
private static final ByteBuffer EMPTY_BYTE_BUFFER = ByteBuffer.allocateDirect(0);
private static final long EMPTY_BYTE_BUFFER_ADDRESS;
static {
long emptyByteBufferAddress = 0;
try {
if (PlatformDependent.hasUnsafe()) {
emptyByteBufferAddress = PlatformDependent.directBufferAddress(EMPTY_BYTE_BUFFER);
}
} catch (Throwable t) {
// Ignore
}
EMPTY_BYTE_BUFFER_ADDRESS = emptyByteBufferAddress;
}
private final ByteBufAllocator alloc;
private final ByteOrder order;
private final String str;
private EmptyByteBuf swapped;
public EmptyByteBuf(ByteBufAllocator alloc) {
this(alloc, ByteOrder.BIG_ENDIAN);
}
private EmptyByteBuf(ByteBufAllocator alloc, ByteOrder order) {
this.alloc = ObjectUtil.checkNotNull(alloc, "alloc");
this.order = order;
str = StringUtil.simpleClassName(this) + (order == ByteOrder.BIG_ENDIAN? "BE" : "LE");
}
@Override
public int capacity() {
return 0;
}
@Override
public ByteBuf capacity(int newCapacity) {
throw new ReadOnlyBufferException();
}
@Override
public ByteBufAllocator alloc() {
return alloc;
}
@Override
public ByteOrder order() {
return order;
}
@Override
public ByteBuf unwrap() {
return null;
}
@Override
public ByteBuf asReadOnly() {
return Unpooled.unmodifiableBuffer(this);
}
@Override
public boolean isReadOnly() {
return false;
}
@Override
public boolean isDirect() {
return true;
}
@Override
public int maxCapacity() {
return 0;
}
@Override
public ByteBuf order(ByteOrder endianness) {
if (ObjectUtil.checkNotNull(endianness, "endianness") == order()) {
return this;
}
EmptyByteBuf swapped = this.swapped;
if (swapped != null) {
return swapped;
}
this.swapped = swapped = new EmptyByteBuf(alloc(), endianness);
return swapped;
}
@Override
public int readerIndex() {
return 0;
}
@Override
public ByteBuf readerIndex(int readerIndex) {
return checkIndex(readerIndex);
}
@Override
public int writerIndex() {
return 0;
}
@Override
public ByteBuf writerIndex(int writerIndex) {
return checkIndex(writerIndex);
}
@Override
public ByteBuf setIndex(int readerIndex, int writerIndex) {
checkIndex(readerIndex);
checkIndex(writerIndex);
return this;
}
@Override
public int readableBytes() {
return 0;
}
@Override
public int writableBytes() {
return 0;
}
@Override
public int maxWritableBytes() {
return 0;
}
@Override
public boolean isReadable() {
return false;
}
@Override
public boolean isWritable() {
return false;
}
@Override
public ByteBuf clear() {
return this;
}
@Override
public ByteBuf markReaderIndex() {
return this;
}
@Override
public ByteBuf resetReaderIndex() {
return this;
}
@Override
public ByteBuf markWriterIndex() {
return this;
}
@Override
public ByteBuf resetWriterIndex() {
return this;
}
@Override
public ByteBuf discardReadBytes() {
return this;
}
@Override
public ByteBuf discardSomeReadBytes() {
return this;
}
@Override
public ByteBuf ensureWritable(int minWritableBytes) {
checkPositiveOrZero(minWritableBytes, "minWritableBytes");
if (minWritableBytes != 0) {
throw new IndexOutOfBoundsException();
}
return this;
}
@Override
public int ensureWritable(int minWritableBytes, boolean force) {
checkPositiveOrZero(minWritableBytes, "minWritableBytes");
if (minWritableBytes == 0) {
return 0;
}
return 1;
}
@Override
public boolean getBoolean(int index) {
throw new IndexOutOfBoundsException();
}
@Override
public byte getByte(int index) {
throw new IndexOutOfBoundsException();
}
@Override
public short getUnsignedByte(int index) {
throw new IndexOutOfBoundsException();
}
@Override
public short getShort(int index) {
throw new IndexOutOfBoundsException();
}
@Override
public short getShortLE(int index) {
throw new IndexOutOfBoundsException();
}
@Override
public int getUnsignedShort(int index) {
throw new IndexOutOfBoundsException();
}
@Override
public int getUnsignedShortLE(int index) {
throw new IndexOutOfBoundsException();
}
@Override
public int getMedium(int index) {
throw new IndexOutOfBoundsException();
}
@Override
public int getMediumLE(int index) {
throw new IndexOutOfBoundsException();
}
@Override
public int getUnsignedMedium(int index) {
throw new IndexOutOfBoundsException();
}
@Override
public int getUnsignedMediumLE(int index) {
throw new IndexOutOfBoundsException();
}
@Override
public int getInt(int index) {
throw new IndexOutOfBoundsException();
}
@Override
public int getIntLE(int index) {
throw new IndexOutOfBoundsException();
}
@Override
public long getUnsignedInt(int index) {
throw new IndexOutOfBoundsException();
}
@Override
public long getUnsignedIntLE(int index) {
throw new IndexOutOfBoundsException();
}
@Override
public long getLong(int index) {
throw new IndexOutOfBoundsException();
}
@Override
public long getLongLE(int index) {
throw new IndexOutOfBoundsException();
}
@Override
public char getChar(int index) {
throw new IndexOutOfBoundsException();
}
@Override
public float getFloat(int index) {
throw new IndexOutOfBoundsException();
}
@Override
public double getDouble(int index) {
throw new IndexOutOfBoundsException();
}
@Override
public ByteBuf getBytes(int index, ByteBuf dst) {
return checkIndex(index, dst.writableBytes());
}
@Override
public ByteBuf getBytes(int index, ByteBuf dst, int length) {
return checkIndex(index, length);
}
@Override
public ByteBuf getBytes(int index, ByteBuf dst, int dstIndex, int length) {
return checkIndex(index, length);
}
@Override
public ByteBuf getBytes(int index, byte[] dst) {
return checkIndex(index, dst.length);
}
@Override
public ByteBuf getBytes(int index, byte[] dst, int dstIndex, int length) {
return checkIndex(index, length);
}
@Override
public ByteBuf getBytes(int index, ByteBuffer dst) {
return checkIndex(index, dst.remaining());
}
@Override
public ByteBuf getBytes(int index, OutputStream out, int length) {
return checkIndex(index, length);
}
@Override
public int getBytes(int index, GatheringByteChannel out, int length) {
checkIndex(index, length);
return 0;
}
@Override
public int getBytes(int index, FileChannel out, long position, int length) {
checkIndex(index, length);
return 0;
}
@Override
public CharSequence getCharSequence(int index, int length, Charset charset) {
checkIndex(index, length);
return null;
}
@Override
public ByteBuf setBoolean(int index, boolean value) {
throw new IndexOutOfBoundsException();
}
@Override
public ByteBuf setByte(int index, int value) {
throw new IndexOutOfBoundsException();
}
@Override
public ByteBuf setShort(int index, int value) {
throw new IndexOutOfBoundsException();
}
@Override
public ByteBuf setShortLE(int index, int value) {
throw new IndexOutOfBoundsException();
}
@Override
public ByteBuf setMedium(int index, int value) {
throw new IndexOutOfBoundsException();
}
@Override
public ByteBuf setMediumLE(int index, int value) {
throw new IndexOutOfBoundsException();
}
@Override
public ByteBuf setInt(int index, int value) {
throw new IndexOutOfBoundsException();
}
@Override
public ByteBuf setIntLE(int index, int value) {
throw new IndexOutOfBoundsException();
}
@Override
public ByteBuf setLong(int index, long value) {
throw new IndexOutOfBoundsException();
}
@Override
public ByteBuf setLongLE(int index, long value) {
throw new IndexOutOfBoundsException();
}
@Override
public ByteBuf setChar(int index, int value) {
throw new IndexOutOfBoundsException();
}
@Override
public ByteBuf setFloat(int index, float value) {
throw new IndexOutOfBoundsException();
}
@Override
public ByteBuf setDouble(int index, double value) {
throw new IndexOutOfBoundsException();
}
@Override
public ByteBuf setBytes(int index, ByteBuf src) {
throw new IndexOutOfBoundsException();
}
@Override
public ByteBuf setBytes(int index, ByteBuf src, int length) {
return checkIndex(index, length);
}
@Override
public ByteBuf setBytes(int index, ByteBuf src, int srcIndex, int length) {
return checkIndex(index, length);
}
@Override
public ByteBuf setBytes(int index, byte[] src) {
return checkIndex(index, src.length);
}
@Override
public ByteBuf setBytes(int index, byte[] src, int srcIndex, int length) {
return checkIndex(index, length);
}
@Override
public ByteBuf setBytes(int index, ByteBuffer src) {
return checkIndex(index, src.remaining());
}
@Override
public int setBytes(int index, InputStream in, int length) {
checkIndex(index, length);
return 0;
}
@Override
public int setBytes(int index, ScatteringByteChannel in, int length) {
checkIndex(index, length);
return 0;
}
@Override
public int setBytes(int index, FileChannel in, long position, int length) {
checkIndex(index, length);
return 0;
}
@Override
public ByteBuf setZero(int index, int length) {
return checkIndex(index, length);
}
@Override
public int setCharSequence(int index, CharSequence sequence, Charset charset) {
throw new IndexOutOfBoundsException();
}
@Override
public boolean readBoolean() {
throw new IndexOutOfBoundsException();
}
@Override
public byte readByte() {
throw new IndexOutOfBoundsException();
}
@Override
public short readUnsignedByte() {
throw new IndexOutOfBoundsException();
}
@Override
public short readShort() {
throw new IndexOutOfBoundsException();
}
@Override
public short readShortLE() {
throw new IndexOutOfBoundsException();
}
@Override
public int readUnsignedShort() {
throw new IndexOutOfBoundsException();
}
@Override
public int readUnsignedShortLE() {
throw new IndexOutOfBoundsException();
}
@Override
public int readMedium() {
throw new IndexOutOfBoundsException();
}
@Override
public int readMediumLE() {
throw new IndexOutOfBoundsException();
}
@Override
public int readUnsignedMedium() {
throw new IndexOutOfBoundsException();
}
@Override
public int readUnsignedMediumLE() {
throw new IndexOutOfBoundsException();
}
@Override
public int readInt() {
throw new IndexOutOfBoundsException();
}
@Override
public int readIntLE() {
throw new IndexOutOfBoundsException();
}
@Override
public long readUnsignedInt() {
throw new IndexOutOfBoundsException();
}
@Override
public long readUnsignedIntLE() {
throw new IndexOutOfBoundsException();
}
@Override
public long readLong() {
throw new IndexOutOfBoundsException();
}
@Override
public long readLongLE() {
throw new IndexOutOfBoundsException();
}
@Override
public char readChar() {
throw new IndexOutOfBoundsException();
}
@Override
public float readFloat() {
throw new IndexOutOfBoundsException();
}
@Override
public double readDouble() {
throw new IndexOutOfBoundsException();
}
@Override
public ByteBuf readBytes(int length) {
return checkLength(length);
}
@Override
public ByteBuf readSlice(int length) {
return checkLength(length);
}
@Override
public ByteBuf readRetainedSlice(int length) {
return checkLength(length);
}
@Override
public ByteBuf readBytes(ByteBuf dst) {
return checkLength(dst.writableBytes());
}
@Override
public ByteBuf readBytes(ByteBuf dst, int length) {
return checkLength(length);
}
@Override
public ByteBuf readBytes(ByteBuf dst, int dstIndex, int length) {
return checkLength(length);
}
@Override
public ByteBuf readBytes(byte[] dst) {
return checkLength(dst.length);
}
@Override
public ByteBuf readBytes(byte[] dst, int dstIndex, int length) {
return checkLength(length);
}
@Override
public ByteBuf readBytes(ByteBuffer dst) {
return checkLength(dst.remaining());
}
@Override
public ByteBuf readBytes(OutputStream out, int length) {
return checkLength(length);
}
@Override
public int readBytes(GatheringByteChannel out, int length) {
checkLength(length);
return 0;
}
@Override
public int readBytes(FileChannel out, long position, int length) {
checkLength(length);
return 0;
}
@Override
public CharSequence readCharSequence(int length, Charset charset) {
checkLength(length);
return StringUtil.EMPTY_STRING;
}
@Override
public String readString(int length, Charset charset) {
checkLength(length);
return StringUtil.EMPTY_STRING;
}
@Override
public ByteBuf skipBytes(int length) {
return checkLength(length);
}
@Override
public ByteBuf writeBoolean(boolean value) {
throw new IndexOutOfBoundsException();
}
@Override
public ByteBuf writeByte(int value) {
throw new IndexOutOfBoundsException();
}
@Override
public ByteBuf writeShort(int value) {
throw new IndexOutOfBoundsException();
}
@Override
public ByteBuf writeShortLE(int value) {
throw new IndexOutOfBoundsException();
}
@Override
public ByteBuf writeMedium(int value) {
throw new IndexOutOfBoundsException();
}
@Override
public ByteBuf writeMediumLE(int value) {
throw new IndexOutOfBoundsException();
}
@Override
public ByteBuf writeInt(int value) {
throw new IndexOutOfBoundsException();
}
@Override
public ByteBuf writeIntLE(int value) {
throw new IndexOutOfBoundsException();
}
@Override
public ByteBuf writeLong(long value) {
throw new IndexOutOfBoundsException();
}
@Override
public ByteBuf writeLongLE(long value) {
throw new IndexOutOfBoundsException();
}
@Override
public ByteBuf writeChar(int value) {
throw new IndexOutOfBoundsException();
}
@Override
public ByteBuf writeFloat(float value) {
throw new IndexOutOfBoundsException();
}
@Override
public ByteBuf writeDouble(double value) {
throw new IndexOutOfBoundsException();
}
@Override
public ByteBuf writeBytes(ByteBuf src) {
return checkLength(src.readableBytes());
}
@Override
public ByteBuf writeBytes(ByteBuf src, int length) {
return checkLength(length);
}
@Override
public ByteBuf writeBytes(ByteBuf src, int srcIndex, int length) {
return checkLength(length);
}
@Override
public ByteBuf writeBytes(byte[] src) {
return checkLength(src.length);
}
@Override
public ByteBuf writeBytes(byte[] src, int srcIndex, int length) {
return checkLength(length);
}
@Override
public ByteBuf writeBytes(ByteBuffer src) {
return checkLength(src.remaining());
}
@Override
public int writeBytes(InputStream in, int length) {
checkLength(length);
return 0;
}
@Override
public int writeBytes(ScatteringByteChannel in, int length) {
checkLength(length);
return 0;
}
@Override
public int writeBytes(FileChannel in, long position, int length) {
checkLength(length);
return 0;
}
@Override
public ByteBuf writeZero(int length) {
return checkLength(length);
}
@Override
public int writeCharSequence(CharSequence sequence, Charset charset) {
throw new IndexOutOfBoundsException();
}
@Override
public int indexOf(int fromIndex, int toIndex, byte value) {
checkIndex(fromIndex);
checkIndex(toIndex);
return -1;
}
@Override
public int bytesBefore(byte value) {
return -1;
}
@Override
public int bytesBefore(int length, byte value) {
checkLength(length);
return -1;
}
@Override
public int bytesBefore(int index, int length, byte value) {
checkIndex(index, length);
return -1;
}
@Override
public int forEachByte(ByteProcessor processor) {
return -1;
}
@Override
public int forEachByte(int index, int length, ByteProcessor processor) {
checkIndex(index, length);
return -1;
}
@Override
public int forEachByteDesc(ByteProcessor processor) {
return -1;
}
@Override
public int forEachByteDesc(int index, int length, ByteProcessor processor) {
checkIndex(index, length);
return -1;
}
@Override
public ByteBuf copy() {
return this;
}
@Override
public ByteBuf copy(int index, int length) {
return checkIndex(index, length);
}
@Override
public ByteBuf slice() {
return this;
}
@Override
public ByteBuf retainedSlice() {
return this;
}
@Override
public ByteBuf slice(int index, int length) {
return checkIndex(index, length);
}
@Override
public ByteBuf retainedSlice(int index, int length) {
return checkIndex(index, length);
}
@Override
public ByteBuf duplicate() {
return this;
}
@Override
public ByteBuf retainedDuplicate() {
return this;
}
@Override
public int nioBufferCount() {
return 1;
}
@Override
public ByteBuffer nioBuffer() {
return EMPTY_BYTE_BUFFER;
}
@Override
public ByteBuffer nioBuffer(int index, int length) {
checkIndex(index, length);
return nioBuffer();
}
@Override
public ByteBuffer[] nioBuffers() {
return new ByteBuffer[] { EMPTY_BYTE_BUFFER };
}
@Override
public ByteBuffer[] nioBuffers(int index, int length) {
checkIndex(index, length);
return nioBuffers();
}
@Override
public ByteBuffer internalNioBuffer(int index, int length) {
return EMPTY_BYTE_BUFFER;
}
@Override
public boolean hasArray() {
return true;
}
@Override
public byte[] array() {
return EmptyArrays.EMPTY_BYTES;
}
@Override
public int arrayOffset() {
return 0;
}
@Override
public boolean hasMemoryAddress() {
return EMPTY_BYTE_BUFFER_ADDRESS != 0;
}
@Override
public long memoryAddress() {
if (hasMemoryAddress()) {
return EMPTY_BYTE_BUFFER_ADDRESS;
} else {
throw new UnsupportedOperationException();
}
}
@Override
public boolean isContiguous() {
return true;
}
@Override
public String toString(Charset charset) {
return "";
}
@Override
public String toString(int index, int length, Charset charset) {
checkIndex(index, length);
return toString(charset);
}
@Override
public int hashCode() {
return EMPTY_BYTE_BUF_HASH_CODE;
}
@Override
public boolean equals(Object obj) {
return obj instanceof ByteBuf && !((ByteBuf) obj).isReadable();
}
@Override
public int compareTo(ByteBuf buffer) {
return buffer.isReadable()? -1 : 0;
}
@Override
public String toString() {
return str;
}
@Override
public boolean isReadable(int size) {
return false;
}
@Override
public boolean isWritable(int size) {
return false;
}
@Override
public int refCnt() {
return 1;
}
@Override
public ByteBuf retain() {
return this;
}
@Override
public ByteBuf retain(int increment) {
return this;
}
@Override
public ByteBuf touch() {
return this;
}
@Override
public ByteBuf touch(Object hint) {
return this;
}
@Override
public boolean release() {
return false;
}
@Override
public boolean release(int decrement) {
return false;
}
private ByteBuf checkIndex(int index) {
if (index != 0) {
throw new IndexOutOfBoundsException();
}
return this;
}
private ByteBuf checkIndex(int index, int length) {
checkPositiveOrZero(length, "length");
if (index != 0 || length != 0) {
throw new IndexOutOfBoundsException();
}
return this;
}
private ByteBuf checkLength(int length) {
checkPositiveOrZero(length, "length");
if (length != 0) {
throw new IndexOutOfBoundsException();
}
return this;
}
}
| EmptyByteBuf |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/spi/DefaultRuntimeConfiguration.java | {
"start": 171,
"end": 2061
} | class ____ implements RuntimeConfiguration {
final Duration readTimeout;
private final Body body;
private final Limits limits;
public DefaultRuntimeConfiguration(Duration readTimeout, boolean deleteUploadedFilesOnEnd, String uploadsDirectory,
List<String> fileContentTypes, Charset defaultCharset, OptionalLong maxBodySize, long maxFormAttributeSize,
int maxParameters) {
this.readTimeout = readTimeout;
body = new Body() {
final Body.MultiPart multiPart = new Body.MultiPart() {
@Override
public List<String> fileContentTypes() {
return fileContentTypes;
}
};
@Override
public boolean deleteUploadedFilesOnEnd() {
return deleteUploadedFilesOnEnd;
}
@Override
public String uploadsDirectory() {
return uploadsDirectory;
}
@Override
public Charset defaultCharset() {
return defaultCharset;
}
@Override
public MultiPart multiPart() {
return multiPart;
}
};
limits = new Limits() {
@Override
public OptionalLong maxBodySize() {
return maxBodySize;
}
@Override
public long maxFormAttributeSize() {
return maxFormAttributeSize;
}
@Override
public int maxParameters() {
return maxParameters;
}
};
}
@Override
public Duration readTimeout() {
return readTimeout;
}
@Override
public Body body() {
return body;
}
@Override
public Limits limits() {
return limits;
}
}
| DefaultRuntimeConfiguration |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/action/support/ReservedStateAwareHandledTransportActionTests.java | {
"start": 6113,
"end": 6261
} | class ____ extends ActionResponse {
@Override
public void writeTo(StreamOutput out) throws IOException {}
}
static | FakeResponse |
java | apache__flink | flink-formats/flink-csv/src/main/java/org/apache/flink/formats/csv/CsvCommons.java | {
"start": 1912,
"end": 5210
} | class ____ {
public static final String IDENTIFIER = "csv";
// ------------------------------------------------------------------------
// Validation
// ------------------------------------------------------------------------
static void validateFormatOptions(ReadableConfig tableOptions) {
final boolean hasQuoteCharacter = tableOptions.getOptional(QUOTE_CHARACTER).isPresent();
final boolean isDisabledQuoteCharacter = tableOptions.get(DISABLE_QUOTE_CHARACTER);
if (isDisabledQuoteCharacter && hasQuoteCharacter) {
throw new ValidationException(
"Format cannot define a quote character and disabled quote character at the same time.");
}
// Validate the option value must be a single char.
validateCharacterVal(tableOptions, FIELD_DELIMITER, true);
validateCharacterVal(tableOptions, ARRAY_ELEMENT_DELIMITER);
validateCharacterVal(tableOptions, QUOTE_CHARACTER);
validateCharacterVal(tableOptions, ESCAPE_CHARACTER);
}
/** Validates the option {@code option} value must be a Character. */
private static void validateCharacterVal(
ReadableConfig tableOptions, ConfigOption<String> option) {
validateCharacterVal(tableOptions, option, false);
}
/**
* Validates the option {@code option} value must be a Character.
*
* @param tableOptions the table options
* @param option the config option
* @param unescape whether to unescape the option value
*/
private static void validateCharacterVal(
ReadableConfig tableOptions, ConfigOption<String> option, boolean unescape) {
if (!tableOptions.getOptional(option).isPresent()) {
return;
}
final String value =
unescape
? StringEscapeUtils.unescapeJava(tableOptions.get(option))
: tableOptions.get(option);
if (value.length() != 1) {
throw new ValidationException(
String.format(
"Option '%s.%s' must be a string with single character, but was: %s",
IDENTIFIER, option.key(), tableOptions.get(option)));
}
}
public static Set<ConfigOption<?>> optionalOptions() {
Set<ConfigOption<?>> options = new HashSet<>();
options.add(FIELD_DELIMITER);
options.add(DISABLE_QUOTE_CHARACTER);
options.add(QUOTE_CHARACTER);
options.add(ALLOW_COMMENTS);
options.add(IGNORE_PARSE_ERRORS);
options.add(ARRAY_ELEMENT_DELIMITER);
options.add(ESCAPE_CHARACTER);
options.add(NULL_LITERAL);
options.add(WRITE_BIGDECIMAL_IN_SCIENTIFIC_NOTATION);
return options;
}
public static Set<ConfigOption<?>> forwardOptions() {
Set<ConfigOption<?>> options = new HashSet<>();
options.add(FIELD_DELIMITER);
options.add(DISABLE_QUOTE_CHARACTER);
options.add(QUOTE_CHARACTER);
options.add(ALLOW_COMMENTS);
options.add(ARRAY_ELEMENT_DELIMITER);
options.add(ESCAPE_CHARACTER);
options.add(NULL_LITERAL);
options.add(WRITE_BIGDECIMAL_IN_SCIENTIFIC_NOTATION);
return options;
}
}
| CsvCommons |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest/deployment/src/main/java/io/quarkus/resteasy/reactive/server/deployment/ServerSerialisersBuildItem.java | {
"start": 186,
"end": 498
} | class ____ extends SimpleBuildItem {
private final ServerSerialisers serialisers;
public ServerSerialisersBuildItem(ServerSerialisers serialisers) {
this.serialisers = serialisers;
}
public ServerSerialisers getSerialisers() {
return serialisers;
}
}
| ServerSerialisersBuildItem |
java | google__guice | core/src/com/google/inject/spi/ElementSource.java | {
"start": 5015,
"end": 5301
} | class ____ of root module.
*/
public List<String> getModuleClassNames() {
return moduleSource.getModuleClassNames();
}
/** Returns {@code getDeclaringSource().toString()} value. */
@Override
public String toString() {
return getDeclaringSource().toString();
}
}
| name |
java | eclipse-vertx__vert.x | vertx-core/src/test/java/io/vertx/test/fakeloadbalancer/FakeLoadBalancer.java | {
"start": 797,
"end": 1823
} | class ____<E> implements InteractionMetrics<FakeMetric> {
List<FakeMetric> metrics = new ArrayList<>();
public List<FakeMetric> metrics2() {
return metrics;
}
@Override
public FakeMetric initiateRequest() {
FakeMetric metric = new FakeMetric();
metrics.add(metric);
return metric;
}
@Override
public void reportFailure(FakeMetric metric, Throwable failure) {
metric.failure = failure;
}
@Override
public void reportRequestBegin(FakeMetric metric) {
metric.requestBegin = System.currentTimeMillis();
}
@Override
public void reportRequestEnd(FakeMetric metric) {
metric.requestEnd = System.currentTimeMillis();
}
@Override
public void reportResponseBegin(FakeMetric metric) {
metric.responseBegin = System.currentTimeMillis();
}
@Override
public void reportResponseEnd(FakeMetric metric) {
metric.responseEnd = System.currentTimeMillis();
}
}
public static | FakeLoadBalancerMetrics |
java | quarkusio__quarkus | extensions/quartz/deployment/src/test/java/io/quarkus/quartz/test/programmatic/ProgrammaticJobsTest.java | {
"start": 1287,
"end": 6812
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest test = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(Jobs.class));
@Inject
org.quartz.Scheduler quartzScheduler;
@Inject
Scheduler scheduler;
@Inject
MyService myService;
static final CountDownLatch SYNC_LATCH = new CountDownLatch(1);
static final CountDownLatch SYNC_CLASS_LATCH = new CountDownLatch(1);
static final CountDownLatch ASYNC_LATCH = new CountDownLatch(1);
static final AtomicInteger SKIPPED_EXECUTIONS = new AtomicInteger();
static final CountDownLatch ASYNC_CLASS_LATCH = new CountDownLatch(1);
@Test
public void testJobs() throws InterruptedException {
scheduler.newJob("alwaysSkip1")
.setInterval("1s")
.setSkipPredicate(ex -> true)
.setTask(ex -> SKIPPED_EXECUTIONS.incrementAndGet())
.schedule();
scheduler.newJob("alwaysSkip2")
.setInterval("1s")
.setTask(ex -> SKIPPED_EXECUTIONS.incrementAndGet())
.setSkipPredicate(AlwaysSkipPredicate.class)
.schedule();
Scheduler.JobDefinition<?> job1 = scheduler.newJob("foo")
.setInterval("1s")
.setTask(ec -> {
assertTrue(Arc.container().requestContext().isActive());
myService.countDown(SYNC_LATCH);
});
assertEquals("Sync task was already set",
assertThrows(IllegalStateException.class, () -> job1.setAsyncTask(ec -> null)).getMessage());
Scheduler.JobDefinition<?> job2 = scheduler.newJob("foo").setCron("0/5 * * * * ?");
assertEquals("Either sync or async task must be set",
assertThrows(IllegalStateException.class, () -> job2.schedule()).getMessage());
job2.setTask(ec -> {
});
Trigger trigger1 = job1.schedule();
assertNotNull(trigger1);
assertTrue(ProgrammaticJobsTest.SYNC_LATCH.await(5, TimeUnit.SECONDS));
assertEquals("Cannot modify a job that was already scheduled",
assertThrows(IllegalStateException.class, () -> job1.setCron("fff")).getMessage());
// Since job1 was already scheduled - job2 defines a non-unique identity
assertEquals("A job with this identity is already scheduled: foo",
assertThrows(IllegalStateException.class, () -> job2.schedule()).getMessage());
// Identity must be unique
assertEquals("A job with this identity is already scheduled: foo",
assertThrows(IllegalStateException.class, () -> scheduler.newJob("foo")).getMessage());
assertEquals("A job with this identity is already scheduled: bar",
assertThrows(IllegalStateException.class, () -> scheduler.newJob("bar")).getMessage());
// No-op
assertNull(scheduler.unscheduleJob("bar"));
assertNull(scheduler.unscheduleJob("nonexisting"));
assertNotNull(scheduler.unscheduleJob("foo"));
assertNotNull(scheduler.unscheduleJob("alwaysSkip1"));
assertNotNull(scheduler.unscheduleJob("alwaysSkip2"));
assertEquals(0, SKIPPED_EXECUTIONS.get());
// Jobs#dummy()
assertEquals(1, scheduler.getScheduledJobs().size());
}
@Test
public void testAsyncJob() throws InterruptedException, SchedulerException {
String identity = "fooAsync";
JobDefinition<?> asyncJob = scheduler.newJob(identity)
.setInterval("1s")
.setAsyncTask(ec -> {
assertTrue(Context.isOnEventLoopThread() && VertxContext.isOnDuplicatedContext());
assertTrue(Arc.container().requestContext().isActive());
myService.countDown(ASYNC_LATCH);
return Uni.createFrom().voidItem();
});
assertEquals("Async task was already set",
assertThrows(IllegalStateException.class, () -> asyncJob.setTask(ec -> {
})).getMessage());
Trigger trigger = asyncJob.schedule();
assertNotNull(trigger);
// JobKey is always built using the identity and "io.quarkus.scheduler.Scheduler" as the group name
JobDetail jobDetail = quartzScheduler.getJobDetail(new JobKey(identity, Scheduler.class.getName()));
assertNotNull(jobDetail);
// We only store metadata for DB store type
assertNull(jobDetail.getJobDataMap().get("scheduled_metadata"));
assertTrue(ProgrammaticJobsTest.ASYNC_LATCH.await(5, TimeUnit.SECONDS));
assertNotNull(scheduler.unscheduleJob("fooAsync"));
}
@Test
public void testClassJobs() throws InterruptedException {
scheduler.newJob("fooClass")
.setInterval("1s")
.setTask(JobClassTask.class)
.schedule();
assertTrue(ProgrammaticJobsTest.SYNC_CLASS_LATCH.await(5, TimeUnit.SECONDS));
assertNotNull(scheduler.unscheduleJob("fooClass"));
}
@Test
public void testClassAsyncJobs() throws InterruptedException {
scheduler.newJob("fooAsyncClass")
.setInterval("1s")
.setAsyncTask(JobClassAsyncTask.class)
.schedule();
assertTrue(ProgrammaticJobsTest.ASYNC_CLASS_LATCH.await(5, TimeUnit.SECONDS));
assertNotNull(scheduler.unscheduleJob("fooAsyncClass"));
}
static | ProgrammaticJobsTest |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/submitted/global_variables_defaults/XmlMapperTest.java | {
"start": 1163,
"end": 3447
} | class ____ {
@Test
void applyDefaultValueOnXmlMapper() throws IOException {
Properties props = new Properties();
props.setProperty(PropertyParser.KEY_ENABLE_DEFAULT_VALUE, "true");
Reader reader = Resources
.getResourceAsReader("org/apache/ibatis/submitted/global_variables_defaults/mybatis-config.xml");
SqlSessionFactory factory = new SqlSessionFactoryBuilder().build(reader, props);
Configuration configuration = factory.getConfiguration();
configuration.addMapper(XmlMapper.class);
SupportClasses.CustomCache cache = SupportClasses.Utils.unwrap(configuration.getCache(XmlMapper.class.getName()));
Assertions.assertThat(cache.getName()).isEqualTo("default");
try (SqlSession sqlSession = factory.openSession()) {
XmlMapper mapper = sqlSession.getMapper(XmlMapper.class);
Assertions.assertThat(mapper.ping()).isEqualTo("Hello");
Assertions.assertThat(mapper.selectOne()).isEqualTo("1");
Assertions.assertThat(mapper.selectFromVariable()).isEqualTo("9999");
}
}
@Test
void applyPropertyValueOnXmlMapper() throws IOException {
Properties props = new Properties();
props.setProperty(PropertyParser.KEY_ENABLE_DEFAULT_VALUE, "true");
props.setProperty("ping.sql", "SELECT 'Hi' FROM INFORMATION_SCHEMA.SYSTEM_USERS");
props.setProperty("cache.name", "custom");
props.setProperty("select.columns", "'5555'");
Reader reader = Resources
.getResourceAsReader("org/apache/ibatis/submitted/global_variables_defaults/mybatis-config.xml");
SqlSessionFactory factory = new SqlSessionFactoryBuilder().build(reader, props);
Configuration configuration = factory.getConfiguration();
configuration.addMapper(XmlMapper.class);
SupportClasses.CustomCache cache = SupportClasses.Utils.unwrap(configuration.getCache(XmlMapper.class.getName()));
Assertions.assertThat(cache.getName()).isEqualTo("custom");
try (SqlSession sqlSession = factory.openSession()) {
XmlMapper mapper = sqlSession.getMapper(XmlMapper.class);
Assertions.assertThat(mapper.ping()).isEqualTo("Hi");
Assertions.assertThat(mapper.selectOne()).isEqualTo("1");
Assertions.assertThat(mapper.selectFromVariable()).isEqualTo("5555");
}
}
public | XmlMapperTest |
java | apache__camel | components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsRouteWithObjectMessageTest.java | {
"start": 948,
"end": 1169
} | class ____ extends JmsRouteTest {
@Override
@Test
public void testSendAndReceiveMessage() throws Exception {
assertSendAndReceiveBody(new PurchaseOrder("Beer", 10));
}
}
| JmsRouteWithObjectMessageTest |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/rest/ServerlessApiProtections.java | {
"start": 509,
"end": 826
} | class ____ {
private volatile boolean enabled;
public ServerlessApiProtections(boolean enabled) {
this.enabled = enabled;
}
public boolean isEnabled() {
return enabled;
}
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
}
| ServerlessApiProtections |
java | quarkusio__quarkus | extensions/resteasy-classic/resteasy/deployment/src/test/java/io/quarkus/resteasy/test/multipart/MultipartResource.java | {
"start": 288,
"end": 639
} | class ____ {
@POST
@Consumes(MediaType.APPLICATION_FORM_URLENCODED)
public String hello(MultivaluedMap<String, String> formData) {
return formData.entrySet().stream()
.map(e -> e.getKey() + ":" + String.join(",", e.getValue()))
.collect(toList())
.toString();
}
}
| MultipartResource |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.