language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | apache__dubbo | dubbo-cluster/src/test/java/org/apache/dubbo/rpc/cluster/router/mesh/rule/virtualservice/match/DubboAttachmentMatchTest.java | {
"start": 1303,
"end": 9627
} | class ____ {
@Test
void dubboContextMatch() {
DubboAttachmentMatch dubboAttachmentMatch = new DubboAttachmentMatch();
Map<String, StringMatch> dubbocontextMatchMap = new HashMap<>();
StringMatch nameMatch = new StringMatch();
nameMatch.setExact("qinliujie");
dubbocontextMatchMap.put("name", nameMatch);
StringMatch machineGroupMatch = new StringMatch();
machineGroupMatch.setExact("test_host");
dubbocontextMatchMap.put("machineGroup", machineGroupMatch);
dubboAttachmentMatch.setDubboContext(dubbocontextMatchMap);
Map<String, String> invokeDubboContextMap = new HashMap<>();
invokeDubboContextMap.put("name", "qinliujie");
invokeDubboContextMap.put("machineGroup", "test_host");
invokeDubboContextMap.put("other", "other");
RpcInvocation rpcInvocation = new RpcInvocation();
rpcInvocation.setAttachments(invokeDubboContextMap);
assertTrue(dubboAttachmentMatch.isMatch(rpcInvocation, Collections.emptySet()));
Map<String, String> invokeDubboContextMap2 = new HashMap<>();
invokeDubboContextMap2.put("name", "jack");
invokeDubboContextMap2.put("machineGroup", "test_host");
invokeDubboContextMap2.put("other", "other");
RpcInvocation rpcInvocation2 = new RpcInvocation();
rpcInvocation2.setAttachments(invokeDubboContextMap2);
assertFalse(dubboAttachmentMatch.isMatch(rpcInvocation2, Collections.emptySet()));
Map<String, String> invokeDubboContextMap3 = new HashMap<>();
invokeDubboContextMap3.put("name", "qinliujie");
invokeDubboContextMap3.put("machineGroup", "my_host");
invokeDubboContextMap3.put("other", "other");
RpcInvocation rpcInvocation3 = new RpcInvocation();
rpcInvocation3.setAttachments(invokeDubboContextMap3);
assertFalse(dubboAttachmentMatch.isMatch(rpcInvocation3, Collections.emptySet()));
}
@Test
void tracingContextMatch() {
DubboAttachmentMatch dubboAttachmentMatch = new DubboAttachmentMatch();
Map<String, StringMatch> tracingContextMatchMap = new HashMap<>();
StringMatch nameMatch = new StringMatch();
nameMatch.setExact("qinliujie");
tracingContextMatchMap.put("name", nameMatch);
StringMatch machineGroupMatch = new StringMatch();
machineGroupMatch.setExact("test_host");
tracingContextMatchMap.put("machineGroup", machineGroupMatch);
dubboAttachmentMatch.setTracingContext(tracingContextMatchMap);
Map<String, String> invokeEagleEyeContextMap = new HashMap<>();
invokeEagleEyeContextMap.put("name", "qinliujie");
invokeEagleEyeContextMap.put("machineGroup", "test_host");
invokeEagleEyeContextMap.put("other", "other");
TracingContextProvider tracingContextProvider = (invocation, key) -> invokeEagleEyeContextMap.get(key);
assertTrue(dubboAttachmentMatch.isMatch(
Mockito.mock(Invocation.class), Collections.singleton(tracingContextProvider)));
Map<String, String> invokeTracingContextMap2 = new HashMap<>();
invokeTracingContextMap2.put("name", "jack");
invokeTracingContextMap2.put("machineGroup", "test_host");
invokeTracingContextMap2.put("other", "other");
TracingContextProvider tracingContextProvider2 = (invocation, key) -> invokeTracingContextMap2.get(key);
assertFalse(dubboAttachmentMatch.isMatch(
Mockito.mock(Invocation.class), Collections.singleton(tracingContextProvider2)));
Map<String, String> invokeEagleEyeContextMap3 = new HashMap<>();
invokeEagleEyeContextMap3.put("name", "qinliujie");
invokeEagleEyeContextMap3.put("machineGroup", "my_host");
invokeEagleEyeContextMap3.put("other", "other");
TracingContextProvider tracingContextProvider3 = (invocation, key) -> invokeEagleEyeContextMap3.get(key);
assertFalse(dubboAttachmentMatch.isMatch(
Mockito.mock(Invocation.class), Collections.singleton(tracingContextProvider3)));
}
@Test
void contextMatch() {
DubboAttachmentMatch dubboAttachmentMatch = new DubboAttachmentMatch();
Map<String, StringMatch> tracingContextMatchMap = new HashMap<>();
StringMatch nameMatch = new StringMatch();
nameMatch.setExact("qinliujie");
tracingContextMatchMap.put("name", nameMatch);
dubboAttachmentMatch.setTracingContext(tracingContextMatchMap);
Map<String, String> invokeTracingContextMap = new HashMap<>();
invokeTracingContextMap.put("name", "qinliujie");
invokeTracingContextMap.put("machineGroup", "test_host");
invokeTracingContextMap.put("other", "other");
Map<String, StringMatch> dubboContextMatchMap = new HashMap<>();
StringMatch dpathMatch = new StringMatch();
dpathMatch.setExact("PRE");
dubboContextMatchMap.put("dpath", dpathMatch);
dubboAttachmentMatch.setDubboContext(dubboContextMatchMap);
Map<String, String> invokeDubboContextMap = new HashMap<>();
invokeDubboContextMap.put("dpath", "PRE");
TracingContextProvider tracingContextProvider = (invocation, key) -> invokeTracingContextMap.get(key);
RpcInvocation rpcInvocation = new RpcInvocation();
rpcInvocation.setAttachments(invokeDubboContextMap);
assertTrue(dubboAttachmentMatch.isMatch(rpcInvocation, Collections.singleton(tracingContextProvider)));
Map<String, String> invokeTracingContextMap1 = new HashMap<>();
invokeTracingContextMap1.put("name", "jack");
invokeTracingContextMap1.put("machineGroup", "test_host");
invokeTracingContextMap1.put("other", "other");
TracingContextProvider tracingContextProvider1 = (invocation, key) -> invokeTracingContextMap1.get(key);
RpcInvocation rpcInvocation1 = new RpcInvocation();
rpcInvocation1.setAttachments(invokeDubboContextMap);
assertFalse(dubboAttachmentMatch.isMatch(rpcInvocation1, Collections.singleton(tracingContextProvider1)));
Map<String, String> invokeDubboContextMap1 = new HashMap<>();
invokeDubboContextMap1.put("dpath", "PRE-2");
TracingContextProvider tracingContextProvider2 = (invocation, key) -> invokeTracingContextMap.get(key);
RpcInvocation rpcInvocation2 = new RpcInvocation();
rpcInvocation2.setAttachments(invokeDubboContextMap1);
assertFalse(dubboAttachmentMatch.isMatch(rpcInvocation2, Collections.singleton(tracingContextProvider2)));
TracingContextProvider tracingContextProvider3 = (invocation, key) -> invokeTracingContextMap1.get(key);
RpcInvocation rpcInvocation3 = new RpcInvocation();
rpcInvocation3.setAttachments(invokeDubboContextMap1);
assertFalse(dubboAttachmentMatch.isMatch(rpcInvocation3, Collections.singleton(tracingContextProvider3)));
Map<String, String> invokeTracingContextMap2 = new HashMap<>();
invokeTracingContextMap2.put("machineGroup", "test_host");
invokeTracingContextMap2.put("other", "other");
TracingContextProvider tracingContextProvider4 = (invocation, key) -> invokeTracingContextMap2.get(key);
RpcInvocation rpcInvocation4 = new RpcInvocation();
rpcInvocation4.setAttachments(invokeDubboContextMap);
assertFalse(dubboAttachmentMatch.isMatch(rpcInvocation4, Collections.singleton(tracingContextProvider4)));
Map<String, String> invokeDubboContextMap2 = new HashMap<>();
TracingContextProvider tracingContextProvider5 = (invocation, key) -> invokeTracingContextMap.get(key);
RpcInvocation rpcInvocation5 = new RpcInvocation();
rpcInvocation5.setAttachments(invokeDubboContextMap2);
assertFalse(dubboAttachmentMatch.isMatch(rpcInvocation5, Collections.singleton(tracingContextProvider5)));
TracingContextProvider tracingContextProvider6 = (invocation, key) -> invokeTracingContextMap2.get(key);
RpcInvocation rpcInvocation6 = new RpcInvocation();
rpcInvocation5.setAttachments(invokeDubboContextMap2);
assertFalse(dubboAttachmentMatch.isMatch(rpcInvocation6, Collections.singleton(tracingContextProvider6)));
}
}
| DubboAttachmentMatchTest |
java | quarkusio__quarkus | extensions/hibernate-reactive/runtime/src/main/java/io/quarkus/hibernate/reactive/runtime/graal/Substitute_FastBootHibernateReactivePersistenceProvider.java | {
"start": 2043,
"end": 2506
} | class ____ implements BooleanSupplier {
private boolean agroalAbsent;
public IsAgroalAbsent() {
try {
Class.forName("io.quarkus.agroal.DataSource");
agroalAbsent = false;
} catch (ClassNotFoundException e) {
agroalAbsent = true;
}
}
@Override
public boolean getAsBoolean() {
return agroalAbsent;
}
}
}
| IsAgroalAbsent |
java | grpc__grpc-java | api/src/main/java/io/grpc/LongUpDownCounterMetricInstrument.java | {
"start": 737,
"end": 1234
} | class ____ extends PartialMetricInstrument {
public LongUpDownCounterMetricInstrument(int index, String name, String description, String unit,
List<String> requiredLabelKeys,
List<String> optionalLabelKeys,
boolean enableByDefault) {
super(index, name, description, unit, requiredLabelKeys, optionalLabelKeys, enableByDefault);
}
} | LongUpDownCounterMetricInstrument |
java | apache__spark | common/kvstore/src/main/java/org/apache/spark/util/kvstore/InMemoryStore.java | {
"start": 4201,
"end": 4270
} | class ____ type T to an InstanceList of type T.
*/
private static | of |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CheckpointFaultInjector.java | {
"start": 1012,
"end": 2003
} | class ____ {
public static CheckpointFaultInjector instance =
new CheckpointFaultInjector();
public static CheckpointFaultInjector getInstance() {
return instance;
}
public static void set(CheckpointFaultInjector instance) {
CheckpointFaultInjector.instance = instance;
}
public void beforeGetImageSetsHeaders() throws IOException {}
public void afterSecondaryCallsRollEditLog() throws IOException {}
public void duringMerge() throws IOException {}
public void afterSecondaryUploadsNewImage() throws IOException {}
public void aboutToSendFile(File localfile) throws IOException {}
public boolean shouldSendShortFile(File localfile) {
return false;
}
public boolean shouldCorruptAByte(File localfile) {
return false;
}
public void afterMD5Rename() throws IOException {}
public void beforeEditsRename() throws IOException {}
public void duringUploadInProgess() throws InterruptedException, IOException {
}
}
| CheckpointFaultInjector |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/persister/entity/EntityPersister.java | {
"start": 4169,
"end": 5518
} | interface ____ a contract between the persistence strategy and
* the {@link org.hibernate.engine.spi.SessionImplementor session}. It does
* not define operations that are required for querying, nor for loading by
* outer join.
* <p>
* Unless a custom {@link org.hibernate.persister.spi.PersisterFactory} is
* used, it is expected that implementations of {@code EntityPersister}
* define a constructor accepting the following arguments:
* <ol>
* <li>
* {@link org.hibernate.mapping.PersistentClass} - describes the
* metadata about the entity to be handled by the persister
* </li>
* <li>
* {@link EntityDataAccess} - the second level caching strategy for
* this entity
* </li>
* <li>
* {@link NaturalIdDataAccess} - the second level caching strategy
* for any natural id defined for this entity
* </li>
* <li>
* {@link org.hibernate.metamodel.spi.RuntimeModelCreationContext} -
* access to additional information useful while constructing the
* persister.
* </li>
* </ol>
* Implementations must be thread-safe (and preferably immutable).
*
* @author Gavin King
* @author Steve Ebersole
*
* @see org.hibernate.persister.spi.PersisterFactory
* @see org.hibernate.persister.spi.PersisterClassResolver
*/
public | defines |
java | spring-projects__spring-security | data/src/main/java/org/springframework/security/data/aot/hint/AuthorizeReturnObjectDataHintsRegistrar.java | {
"start": 2037,
"end": 2647
} | class ____ published as an infrastructural bean by the
* {@code spring-security-config} module. However, in the event you need to publish it
* yourself, remember to publish it as an infrastructural bean like so:
*
* <pre>
* @Bean
* @Role(BeanDefinition.ROLE_INFRASTRUCTURE)
* static SecurityHintsRegistrar proxyThese(AuthorizationProxyFactory proxyFactory) {
* return new AuthorizeReturnObjectDataHintsRegistrar(proxyFactory);
* }
* </pre>
*
* @author Josh Cummings
* @since 6.4
* @see AuthorizeReturnObjectCoreHintsRegistrar
* @see AuthorizeReturnObjectHintsRegistrar
*/
public final | is |
java | spring-projects__spring-framework | spring-context-support/src/main/java/org/springframework/cache/jcache/interceptor/AbstractJCacheKeyOperation.java | {
"start": 1198,
"end": 3958
} | class ____<A extends Annotation> extends AbstractJCacheOperation<A> {
private final KeyGenerator keyGenerator;
private final List<CacheParameterDetail> keyParameterDetails;
/**
* Create a new instance.
* @param methodDetails the {@link CacheMethodDetails} related to the cached method
* @param cacheResolver the cache resolver to resolve regular caches
* @param keyGenerator the key generator to compute cache keys
*/
protected AbstractJCacheKeyOperation(CacheMethodDetails<A> methodDetails,
CacheResolver cacheResolver, KeyGenerator keyGenerator) {
super(methodDetails, cacheResolver);
this.keyGenerator = keyGenerator;
this.keyParameterDetails = initializeKeyParameterDetails(this.allParameterDetails);
}
/**
* Return the {@link KeyGenerator} to use to compute cache keys.
*/
public KeyGenerator getKeyGenerator() {
return this.keyGenerator;
}
/**
* Return the {@link CacheInvocationParameter} for the parameters that are to be
* used to compute the key.
* <p>Per the spec, if some method parameters are annotated with
* {@link javax.cache.annotation.CacheKey}, only those parameters should be part
* of the key. If none are annotated, all parameters except the parameter annotated
* with {@link javax.cache.annotation.CacheValue} should be part of the key.
* <p>The method arguments must match the signature of the related method invocation
* @param values the parameters value for a particular invocation
* @return the {@link CacheInvocationParameter} instances for the parameters to be
* used to compute the key
*/
public CacheInvocationParameter[] getKeyParameters(@Nullable Object... values) {
List<CacheInvocationParameter> result = new ArrayList<>();
for (CacheParameterDetail keyParameterDetail : this.keyParameterDetails) {
int parameterPosition = keyParameterDetail.getParameterPosition();
if (parameterPosition >= values.length) {
throw new IllegalStateException("Values mismatch, key parameter at position " +
parameterPosition + " cannot be matched against " + values.length + " value(s)");
}
result.add(keyParameterDetail.toCacheInvocationParameter(values[parameterPosition]));
}
return result.toArray(new CacheInvocationParameter[0]);
}
private static List<CacheParameterDetail> initializeKeyParameterDetails(List<CacheParameterDetail> allParameters) {
List<CacheParameterDetail> all = new ArrayList<>();
List<CacheParameterDetail> annotated = new ArrayList<>();
for (CacheParameterDetail allParameter : allParameters) {
if (!allParameter.isValue()) {
all.add(allParameter);
}
if (allParameter.isKey()) {
annotated.add(allParameter);
}
}
return (annotated.isEmpty() ? all : annotated);
}
}
| AbstractJCacheKeyOperation |
java | apache__camel | components/camel-aws/camel-aws2-ddb/src/test/java/org/apache/camel/component/aws2/ddb/ScanCommandTest.java | {
"start": 1481,
"end": 3368
} | class ____ {
private ScanCommand command;
private AmazonDDBClientMock ddbClient;
private Ddb2Configuration configuration;
private Exchange exchange;
@BeforeEach
public void setUp() {
ddbClient = new AmazonDDBClientMock();
configuration = new Ddb2Configuration();
configuration.setTableName("DOMAIN1");
exchange = new DefaultExchange(new DefaultCamelContext());
command = new ScanCommand(ddbClient, configuration, exchange);
}
@Test
public void execute() {
Map<String, Condition> scanFilter = new HashMap<>();
Condition.Builder condition = Condition.builder().comparisonOperator(ComparisonOperator.GT.toString())
.attributeValueList(AttributeValue.builder().n("1985").build());
scanFilter.put("year", condition.build());
exchange.getIn().setHeader(Ddb2Constants.SCAN_FILTER, scanFilter);
command.execute();
Map<String, AttributeValue> mapAssert = new HashMap<>();
mapAssert.put("1", AttributeValue.builder().s("LAST_KEY").build());
ConsumedCapacity consumed = (ConsumedCapacity) exchange.getIn().getHeader(Ddb2Constants.CONSUMED_CAPACITY);
assertEquals(scanFilter, ddbClient.scanRequest.scanFilter());
assertEquals(Integer.valueOf(10), exchange.getIn().getHeader(Ddb2Constants.SCANNED_COUNT, Integer.class));
assertEquals(Integer.valueOf(1), exchange.getIn().getHeader(Ddb2Constants.COUNT, Integer.class));
assertEquals(Double.valueOf(1.0), consumed.capacityUnits());
assertEquals(mapAssert, exchange.getIn().getHeader(Ddb2Constants.LAST_EVALUATED_KEY, Map.class));
Map<?, ?> items = (Map<?, ?>) exchange.getIn().getHeader(Ddb2Constants.ITEMS, List.class).get(0);
assertEquals(AttributeValue.builder().s("attrValue").build(), items.get("attrName"));
}
}
| ScanCommandTest |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/observable/ObservableConcatWithCompletable.java | {
"start": 1190,
"end": 1639
} | class ____<T> extends AbstractObservableWithUpstream<T, T> {
final CompletableSource other;
public ObservableConcatWithCompletable(Observable<T> source, CompletableSource other) {
super(source);
this.other = other;
}
@Override
protected void subscribeActual(Observer<? super T> observer) {
source.subscribe(new ConcatWithObserver<>(observer, other));
}
static final | ObservableConcatWithCompletable |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/hql/joinedSubclass/Employee.java | {
"start": 231,
"end": 657
} | class ____ extends Person {
private String employeeNumber;
public Employee() {
}
public Employee(String name) {
super( name );
}
public Employee(String name, String employeeNumber) {
super( name );
this.employeeNumber = employeeNumber;
}
public String getEmployeeNumber() {
return employeeNumber;
}
public void setEmployeeNumber(String employeeNumber) {
this.employeeNumber = employeeNumber;
}
}
| Employee |
java | apache__flink | flink-clients/src/test/java/org/apache/flink/client/program/ClientTest.java | {
"start": 3363,
"end": 12364
} | class ____ {
@RegisterExtension
private static final InternalMiniClusterExtension MINI_CLUSTER_RESOURCE =
new InternalMiniClusterExtension(
new MiniClusterResourceConfiguration.Builder().build());
private StreamGraph streamGraph;
private Configuration config;
private static final String TEST_EXECUTOR_NAME = "test_executor";
private static final String ACCUMULATOR_NAME = "test_accumulator";
private static final String FAIL_MESSAGE =
"Invalid program should have thrown ProgramInvocationException.";
@BeforeEach
void setUp() {
StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironment();
env.fromSequence(1, 1000).sinkTo(new DiscardingSink<>());
streamGraph = env.getStreamGraph();
config = new Configuration();
config.set(JobManagerOptions.ADDRESS, "localhost");
config.set(RpcOptions.ASK_TIMEOUT_DURATION, RpcOptions.ASK_TIMEOUT_DURATION.defaultValue());
}
private Configuration fromPackagedProgram(
final PackagedProgram program, final int parallelism, final boolean detached) {
final Configuration configuration = new Configuration();
configuration.set(DeploymentOptions.TARGET, TEST_EXECUTOR_NAME);
configuration.set(CoreOptions.DEFAULT_PARALLELISM, parallelism);
configuration.set(DeploymentOptions.ATTACHED, !detached);
ConfigUtils.encodeCollectionToConfig(
configuration, PipelineOptions.CLASSPATHS, program.getClasspaths(), URL::toString);
ConfigUtils.encodeCollectionToConfig(
configuration,
PipelineOptions.JARS,
program.getJobJarAndDependencies(),
URL::toString);
return configuration;
}
/** Tests that invalid detached mode programs fail. */
@Test
void testDetachedMode() {
final ClusterClient<?> clusterClient =
new MiniClusterClient(new Configuration(), MINI_CLUSTER_RESOURCE.getMiniCluster());
assertThatThrownBy(
() -> {
PackagedProgram prg =
PackagedProgram.newBuilder()
.setEntryPointClassName(TestEager.class.getName())
.build();
final Configuration configuration = fromPackagedProgram(prg, 1, true);
ClientUtils.executeProgram(
new TestExecutorServiceLoader(clusterClient, streamGraph),
configuration,
prg,
false,
false);
fail(FAIL_MESSAGE);
})
.isInstanceOf(ProgramInvocationException.class)
.hasMessageContaining(
DetachedJobExecutionResult.DETACHED_MESSAGE
+ DetachedJobExecutionResult.JOB_RESULT_MESSAGE
+ DetachedJobExecutionResult.EAGER_FUNCTION_MESSAGE);
assertThatThrownBy(
() -> {
PackagedProgram prg =
PackagedProgram.newBuilder()
.setEntryPointClassName(TestGetRuntime.class.getName())
.build();
final Configuration configuration = fromPackagedProgram(prg, 1, true);
ClientUtils.executeProgram(
new TestExecutorServiceLoader(clusterClient, streamGraph),
configuration,
prg,
false,
false);
fail(FAIL_MESSAGE);
})
.isInstanceOf(ProgramInvocationException.class)
.hasMessageContaining(
DetachedJobExecutionResult.DETACHED_MESSAGE
+ DetachedJobExecutionResult.JOB_RESULT_MESSAGE);
assertThatThrownBy(
() -> {
PackagedProgram prg =
PackagedProgram.newBuilder()
.setEntryPointClassName(
TestGetAccumulator.class.getName())
.build();
final Configuration configuration = fromPackagedProgram(prg, 1, true);
ClientUtils.executeProgram(
new TestExecutorServiceLoader(clusterClient, streamGraph),
configuration,
prg,
false,
false);
fail(FAIL_MESSAGE);
})
.isInstanceOf(ProgramInvocationException.class)
.hasMessageContaining(
DetachedJobExecutionResult.DETACHED_MESSAGE
+ DetachedJobExecutionResult.JOB_RESULT_MESSAGE
+ DetachedJobExecutionResult.EAGER_FUNCTION_MESSAGE);
assertThatThrownBy(
() -> {
PackagedProgram prg =
PackagedProgram.newBuilder()
.setEntryPointClassName(
TestGetAllAccumulator.class.getName())
.build();
final Configuration configuration = fromPackagedProgram(prg, 1, true);
ClientUtils.executeProgram(
new TestExecutorServiceLoader(clusterClient, streamGraph),
configuration,
prg,
false,
false);
fail(FAIL_MESSAGE);
})
.isInstanceOf(ProgramInvocationException.class)
.hasMessageContaining(
DetachedJobExecutionResult.DETACHED_MESSAGE
+ DetachedJobExecutionResult.JOB_RESULT_MESSAGE);
}
@Test
void testMultiExecuteWithEnforcingSingleJobExecution() {
assertThatThrownBy(
() -> {
try {
launchMultiExecuteJob(true);
} catch (Exception e) {
if (e instanceof ProgramInvocationException) {
throw e.getCause();
}
}
fail("Test should have failed due to multiple execute() calls.");
})
.isInstanceOf(FlinkRuntimeException.class);
}
@Test
void testMultiExecuteWithoutEnforcingSingleJobExecution() throws ProgramInvocationException {
launchMultiExecuteJob(false);
}
private void launchMultiExecuteJob(final boolean enforceSingleJobExecution)
throws ProgramInvocationException {
try (final ClusterClient<?> clusterClient =
new MiniClusterClient(
new Configuration(), MINI_CLUSTER_RESOURCE.getMiniCluster())) {
final PackagedProgram program =
PackagedProgram.newBuilder()
.setEntryPointClassName(TestMultiExecute.class.getName())
.build();
final Configuration configuration = fromPackagedProgram(program, 1, false);
ClientUtils.executeProgram(
new TestExecutorServiceLoader(clusterClient, streamGraph),
configuration,
program,
enforceSingleJobExecution,
false);
}
}
/** This test verifies correct job submission messaging logic and plan translation calls. */
@Test
void shouldSubmitToJobClient() {
final ClusterClient<?> clusterClient =
new MiniClusterClient(new Configuration(), MINI_CLUSTER_RESOURCE.getMiniCluster());
JobGraph jobGraph = streamGraph.getJobGraph();
jobGraph.addJars(Collections.emptyList());
jobGraph.setClasspaths(Collections.emptyList());
assertThatFuture(clusterClient.submitJob(jobGraph)).eventuallySucceeds().isNotNull();
}
public static | ClientTest |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/QuickfixjEndpointBuilderFactory.java | {
"start": 15492,
"end": 18222
} | interface ____
extends
QuickfixjEndpointConsumerBuilder,
QuickfixjEndpointProducerBuilder {
default AdvancedQuickfixjEndpointBuilder advanced() {
return (AdvancedQuickfixjEndpointBuilder) this;
}
/**
* This option allows creating QuickFIX/J engine on demand. Value true
* means the engine is started when first message is send or there's
* consumer configured in route definition. When false value is used,
* the engine is started at the endpoint creation. When this parameter
* is missing, the value of component's property lazyCreateEngines is
* being used.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param lazyCreateEngine the value to set
* @return the dsl builder
*/
default QuickfixjEndpointBuilder lazyCreateEngine(boolean lazyCreateEngine) {
doSetProperty("lazyCreateEngine", lazyCreateEngine);
return this;
}
/**
* This option allows creating QuickFIX/J engine on demand. Value true
* means the engine is started when first message is send or there's
* consumer configured in route definition. When false value is used,
* the engine is started at the endpoint creation. When this parameter
* is missing, the value of component's property lazyCreateEngines is
* being used.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param lazyCreateEngine the value to set
* @return the dsl builder
*/
default QuickfixjEndpointBuilder lazyCreateEngine(String lazyCreateEngine) {
doSetProperty("lazyCreateEngine", lazyCreateEngine);
return this;
}
/**
* The optional sessionID identifies a specific FIX session. The format
* of the sessionID is:
* (BeginString):(SenderCompID)/(SenderSubID)/(SenderLocationID)-(TargetCompID)/(TargetSubID)/(TargetLocationID).
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param sessionID the value to set
* @return the dsl builder
*/
default QuickfixjEndpointBuilder sessionID(String sessionID) {
doSetProperty("sessionID", sessionID);
return this;
}
}
/**
* Advanced builder for endpoint for the QuickFix component.
*/
public | QuickfixjEndpointBuilder |
java | netty__netty | codec-http/src/main/java/io/netty/handler/codec/http/ServerCookieEncoder.java | {
"start": 1461,
"end": 3381
} | class ____ {
/**
* Encodes the specified cookie name-value pair into a Set-Cookie header value.
*
* @param name the cookie name
* @param value the cookie value
* @return a single Set-Cookie header value
*/
@Deprecated
public static String encode(String name, String value) {
return io.netty.handler.codec.http.cookie.ServerCookieEncoder.LAX.encode(name, value);
}
/**
* Encodes the specified cookie into a Set-Cookie header value.
*
* @param cookie the cookie
* @return a single Set-Cookie header value
*/
@Deprecated
public static String encode(Cookie cookie) {
return io.netty.handler.codec.http.cookie.ServerCookieEncoder.LAX.encode(cookie);
}
/**
* Batch encodes cookies into Set-Cookie header values.
*
* @param cookies a bunch of cookies
* @return the corresponding bunch of Set-Cookie headers
*/
@Deprecated
public static List<String> encode(Cookie... cookies) {
return io.netty.handler.codec.http.cookie.ServerCookieEncoder.LAX.encode(cookies);
}
/**
* Batch encodes cookies into Set-Cookie header values.
*
* @param cookies a bunch of cookies
* @return the corresponding bunch of Set-Cookie headers
*/
@Deprecated
public static List<String> encode(Collection<Cookie> cookies) {
return io.netty.handler.codec.http.cookie.ServerCookieEncoder.LAX.encode(cookies);
}
/**
* Batch encodes cookies into Set-Cookie header values.
*
* @param cookies a bunch of cookies
* @return the corresponding bunch of Set-Cookie headers
*/
@Deprecated
public static List<String> encode(Iterable<Cookie> cookies) {
return io.netty.handler.codec.http.cookie.ServerCookieEncoder.LAX.encode(cookies);
}
private ServerCookieEncoder() {
// Unused
}
}
| ServerCookieEncoder |
java | google__dagger | javatests/dagger/internal/codegen/DuplicateBindingsValidationTest.java | {
"start": 38375,
"end": 39177
} | class ____ {",
" @Provides @IntoSet static String string() {",
" return \"provided2\";",
" }",
" }",
"}");
CompilerTests.daggerCompiler(foo, injected1, injected2, provided1, provided2)
.compile(
subject -> {
subject.hasErrorCount(1);
subject.hasErrorContaining(
message(
"Foo is bound multiple times:",
" @Inject Foo(Set<String>) [Injected1]",
" @Provides Foo Provided1.Provided1Module.provideFoo(Set<String>) "
+ "[Injected1 → Injected2 → Provided1]"))
.onSource(injected1)
.onLineContaining(" | Provided2Module |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/state/AbstractChannelStateHandle.java | {
"start": 1399,
"end": 4193
} | class ____<Info> implements StateObject {
private static final long serialVersionUID = 1L;
private final Info info;
private final StreamStateHandle delegate;
/**
* Start offsets in a {@link org.apache.flink.core.fs.FSDataInputStream stream} {@link
* StreamStateHandle#openInputStream obtained} from {@link #delegate}.
*/
private final List<Long> offsets;
private final long size;
/** The original subtask index before rescaling recovery. */
private final int subtaskIndex;
AbstractChannelStateHandle(
StreamStateHandle delegate,
List<Long> offsets,
int subtaskIndex,
Info info,
long size) {
this.subtaskIndex = subtaskIndex;
this.info = checkNotNull(info);
this.delegate = checkNotNull(delegate);
this.offsets = checkNotNull(offsets);
this.size = size;
}
public static Stream<StreamStateHandle> collectUniqueDelegates(
Stream<StateObjectCollection<? extends AbstractChannelStateHandle<?>>> collections) {
return collections
.flatMap(Collection::stream)
.map(AbstractChannelStateHandle::getDelegate)
.distinct();
}
@Override
public void discardState() throws Exception {
delegate.discardState();
}
@Override
public long getStateSize() {
return size; // can not rely on delegate.getStateSize because it can be shared
}
public List<Long> getOffsets() {
return offsets;
}
public StreamStateHandle getDelegate() {
return delegate;
}
public Info getInfo() {
return info;
}
public int getSubtaskIndex() {
return subtaskIndex;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final AbstractChannelStateHandle<?> that = (AbstractChannelStateHandle<?>) o;
return subtaskIndex == that.subtaskIndex
&& info.equals(that.info)
&& delegate.equals(that.delegate)
&& offsets.equals(that.offsets);
}
@Override
public int hashCode() {
return Objects.hash(subtaskIndex, info, delegate, offsets);
}
@Override
public String toString() {
return "AbstractChannelStateHandle{"
+ "info="
+ info
+ ", delegate="
+ delegate
+ ", offsets="
+ offsets
+ ", size="
+ size
+ '}';
}
/** Describes the underlying content. */
public static | AbstractChannelStateHandle |
java | bumptech__glide | library/src/main/java/com/bumptech/glide/load/model/LazyHeaders.java | {
"start": 3027,
"end": 4731
} | class ____ {
private static final String USER_AGENT_HEADER = "User-Agent";
private static final String DEFAULT_USER_AGENT = getSanitizedUserAgent();
private static final Map<String, List<LazyHeaderFactory>> DEFAULT_HEADERS;
// Set Accept-Encoding header to do our best to avoid gzip since it's both inefficient for
// images and also makes it more difficult for us to detect and prevent partial content
// rendering. See #440.
static {
Map<String, List<LazyHeaderFactory>> temp = new HashMap<>(2);
if (!TextUtils.isEmpty(DEFAULT_USER_AGENT)) {
temp.put(
USER_AGENT_HEADER,
Collections.<LazyHeaderFactory>singletonList(
new StringHeaderFactory(DEFAULT_USER_AGENT)));
}
DEFAULT_HEADERS = Collections.unmodifiableMap(temp);
}
private boolean copyOnModify = true;
private Map<String, List<LazyHeaderFactory>> headers = DEFAULT_HEADERS;
private boolean isUserAgentDefault = true;
/**
* Adds a value for the given header and returns this builder.
*
* <p>Use {@link #addHeader(String, LazyHeaderFactory)} if obtaining the value requires I/O
* (i.e. an OAuth token).
*
* @see #addHeader(String, LazyHeaderFactory)
*/
public Builder addHeader(@NonNull String key, @NonNull String value) {
return addHeader(key, new StringHeaderFactory(value));
}
/**
* Adds an {@link LazyHeaderFactory} that will be used to construct a value for the given key
* lazily on a background thread.
*
* <p>Headers may have multiple values whose order is defined by the order in which this method
* is called.
*
* <p>This | Builder |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketInputWrapper.java | {
"start": 1541,
"end": 3219
} | class ____ extends FilterInputStream {
private final Socket socket;
private final boolean hasChannel;
SocketInputWrapper(Socket s, InputStream is) {
super(is);
this.socket = s;
this.hasChannel = s.getChannel() != null;
if (hasChannel) {
Preconditions.checkArgument(is instanceof SocketInputStream,
"Expected a SocketInputStream when there is a channel. " +
"Got: %s", is);
}
}
/**
* Set the timeout for reads from this stream.
*
* Note: the behavior here can differ subtly depending on whether the
* underlying socket has an associated Channel. In particular, if there is no
* channel, then this call will affect the socket timeout for <em>all</em>
* readers of this socket. If there is a channel, then this call will affect
* the timeout only for <em>this</em> stream. As such, it is recommended to
* only create one {@link SocketInputWrapper} instance per socket.
*
* @param timeoutMs
* the new timeout, 0 for no timeout
* @throws SocketException
* if the timeout cannot be set
*/
public void setTimeout(long timeoutMs) throws SocketException {
if (hasChannel) {
((SocketInputStream)in).setTimeout(timeoutMs);
} else {
socket.setSoTimeout((int)timeoutMs);
}
}
/**
* @return an underlying ReadableByteChannel implementation.
* @throws IllegalStateException if this socket does not have a channel
*/
public ReadableByteChannel getReadableByteChannel() {
Preconditions.checkState(hasChannel,
"Socket %s does not have a channel",
this.socket);
return (SocketInputStream)in;
}
} | SocketInputWrapper |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlGetQueryRequest.java | {
"start": 621,
"end": 1345
} | class ____ extends ActionRequest {
private final AsyncExecutionId asyncExecutionId;
public EsqlGetQueryRequest(AsyncExecutionId asyncExecutionId) {
this.asyncExecutionId = asyncExecutionId;
}
public AsyncExecutionId id() {
return asyncExecutionId;
}
public EsqlGetQueryRequest(StreamInput streamInput) throws IOException {
super(streamInput);
asyncExecutionId = AsyncExecutionId.decode(streamInput.readString());
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeWriteable(asyncExecutionId);
}
@Override
public ActionRequestValidationException validate() {
return null;
}
}
| EsqlGetQueryRequest |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/asyncprocessing/declare/ContextVariable.java | {
"start": 1019,
"end": 1749
} | class ____<T> {
final DeclarationManager manager;
final int ordinal;
@Nullable final Supplier<T> initializer;
boolean initialized = false;
ContextVariable(DeclarationManager manager, int ordinal, Supplier<T> initializer) {
this.manager = manager;
this.ordinal = ordinal;
this.initializer = initializer;
}
public T get() {
if (!initialized && initializer != null) {
manager.setVariableValue(ordinal, initializer.get());
initialized = true;
}
return manager.getVariableValue(ordinal);
}
public void set(T newValue) {
initialized = true;
manager.setVariableValue(ordinal, newValue);
}
}
| ContextVariable |
java | apache__camel | test-infra/camel-test-infra-common/src/main/java/org/apache/camel/test/infra/common/services/AbstractService.java | {
"start": 912,
"end": 1594
} | class ____ implements InfrastructureService {
@Override
public void initialize() {
try {
setUp();
registerProperties();
} catch (Exception e) {
throw new IllegalArgumentException(e);
}
}
@Override
public void shutdown() {
try {
tearDown();
} catch (Exception e) {
throw new IllegalArgumentException(e);
}
}
protected abstract void registerProperties(BiConsumer<String, String> store);
@Deprecated
protected abstract void setUp() throws Exception;
@Deprecated
protected abstract void tearDown() throws Exception;
}
| AbstractService |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesConfigurationMutation.java | {
"start": 6204,
"end": 48180
} | class ____ extends AbstractBinder {
@Override
protected void configure() {
try {
userName = UserGroupInformation.getCurrentUser().getShortUserName();
} catch (IOException ioe) {
throw new RuntimeException("Unable to get current user name "
+ ioe.getMessage(), ioe);
}
csConf = new CapacitySchedulerConfiguration(new Configuration(false),
false);
setupQueueConfiguration(csConf);
conf = new YarnConfiguration();
conf.setClass(YarnConfiguration.RM_SCHEDULER, CapacityScheduler.class,
ResourceScheduler.class);
conf.set(YarnConfiguration.SCHEDULER_CONFIGURATION_STORE_CLASS,
YarnConfiguration.MEMORY_CONFIGURATION_STORE);
conf.set(YarnConfiguration.YARN_ADMIN_ACL, userName);
try {
FileOutputStream out = new FileOutputStream(getCapacitySchedulerConfigFileInTarget());
csConf.writeXml(out);
out.close();
} catch (IOException e) {
throw new RuntimeException("Failed to write XML file", e);
}
rm = new MockRM(conf);
request = mock(HttpServletRequest.class);
when(request.getScheme()).thenReturn("http");
final HttpServletResponse response = mock(HttpServletResponse.class);
bind(rm).to(ResourceManager.class).named("rm");
bind(csConf).to(Configuration.class).named("conf");
Principal principal = () -> userName;
bind(request).to(HttpServletRequest.class);
when(request.getUserPrincipal()).thenReturn(principal);
bind(response).to(HttpServletResponse.class);
}
}
@BeforeAll
public static void beforeClass() {
backupSchedulerConfigFileInTarget();
}
@AfterAll
public static void afterClass() {
restoreSchedulerConfigFileInTarget();
}
@Override
@BeforeEach
public void setUp() throws Exception {
super.setUp();
}
private static void setupQueueConfiguration(
CapacitySchedulerConfiguration config) {
config.setQueues(ROOT, new String[]{"a", "b", "c", "mappedqueue"});
config.setCapacity(ROOT_A, 25f);
config.setMaximumCapacity(ROOT_A, 50f);
config.setQueues(ROOT_A, new String[]{"a1", "a2"});
config.setCapacity(ROOT_A_A1, 100f);
config.setCapacity(ROOT_A_A2, 0f);
config.setCapacity(ROOT_B, 75f);
config.setCapacity(ROOT_C, 0f);
config.setQueues(ROOT_C, new String[] {"c1"});
config.setCapacity(ROOT_C_C1, 0f);
config.setCapacity(ROOT_D, 0f);
config.set(CapacitySchedulerConfiguration.QUEUE_MAPPING,
"g:hadoop:mappedqueue");
}
public TestRMWebServicesConfigurationMutation() {
}
private CapacitySchedulerConfiguration getSchedulerConf()
throws JSONException {
WebTarget r = targetWithJsonObject();
Response response =
r.path("ws").path("v1").path("cluster")
.queryParam("user.name", userName).path("scheduler-conf")
.request(MediaType.APPLICATION_JSON)
.get(Response.class);
assertEquals(Status.OK.getStatusCode(), response.getStatus());
JSONObject json = response.readEntity(JSONObject.class).
getJSONObject("configuration");
JSONArray items = (JSONArray) json.get("property");
CapacitySchedulerConfiguration parsedConf =
new CapacitySchedulerConfiguration();
for (int i = 0; i < items.length(); i++) {
JSONObject obj = (JSONObject) items.get(i);
parsedConf.set(obj.get("name").toString(),
obj.get("value").toString());
}
return parsedConf;
}
@Test
public void testGetSchedulerConf() throws Exception {
CapacitySchedulerConfiguration orgConf = getSchedulerConf();
assertNotNull(orgConf);
assertEquals(4, orgConf.getQueues(ROOT).size());
}
@Test
public void testFormatSchedulerConf() throws Exception {
CapacitySchedulerConfiguration newConf = getSchedulerConf();
assertNotNull(newConf);
assertEquals(4, newConf.getQueues(ROOT).size());
SchedConfUpdateInfo updateInfo = new SchedConfUpdateInfo();
Map<String, String> nearEmptyCapacity = new HashMap<>();
nearEmptyCapacity.put(CapacitySchedulerConfiguration.CAPACITY, "1E-4");
QueueConfigInfo d = new QueueConfigInfo("root.formattest",
nearEmptyCapacity);
updateInfo.getAddQueueInfo().add(d);
Map<String, String> stoppedParam = new HashMap<>();
stoppedParam.put(CapacitySchedulerConfiguration.STATE,
QueueState.STOPPED.toString());
QueueConfigInfo stoppedInfo = new QueueConfigInfo("root.formattest",
stoppedParam);
updateInfo.getUpdateQueueInfo().add(stoppedInfo);
// Add a queue root.formattest to the existing three queues
WebTarget r = target().register(SchedConfUpdateInfoWriter.class);
Response response = r.path("ws").path("v1").path("cluster")
.path("scheduler-conf").queryParam("user.name", userName)
.request(MediaType.APPLICATION_JSON)
.put(Entity.entity(updateInfo, MediaType.APPLICATION_JSON), Response.class);
newConf = getSchedulerConf();
assertNotNull(newConf);
assertEquals(5, newConf.getQueues(ROOT).size());
// Format the scheduler config and validate root.formattest is not present
response = r.path("ws").path("v1").path("cluster")
.queryParam("user.name", userName)
.path(RMWSConsts.FORMAT_SCHEDULER_CONF)
.request(MediaType.APPLICATION_JSON).get(Response.class);
assertEquals(Status.OK.getStatusCode(), response.getStatus());
newConf = getSchedulerConf();
assertEquals(4, newConf.getQueues(ROOT).size());
}
private long getConfigVersion() throws Exception {
WebTarget r = targetWithJsonObject();
Response response = r.path("ws").path("v1").path("cluster")
.queryParam("user.name", userName)
.path(RMWSConsts.SCHEDULER_CONF_VERSION)
.request(MediaType.APPLICATION_JSON).get(Response.class);
assertEquals(Status.OK.getStatusCode(), response.getStatus());
JSONObject json = response.readEntity(JSONObject.class).
getJSONObject("configversion");
return Long.parseLong(json.get("versionID").toString());
}
@Test
public void testSchedulerConfigVersion() throws Exception {
assertEquals(1, getConfigVersion());
testAddNestedQueue();
assertEquals(2, getConfigVersion());
}
@Test
public void testAddNestedQueue() throws Exception {
CapacitySchedulerConfiguration orgConf = getSchedulerConf();
assertNotNull(orgConf);
assertEquals(4, orgConf.getQueues(ROOT).size());
WebTarget r = target().register(SchedConfUpdateInfoWriter.class);
Response response;
// Add parent queue root.d with two children d1 and d2.
SchedConfUpdateInfo updateInfo = new SchedConfUpdateInfo();
Map<String, String> d1Capacity = new HashMap<>();
d1Capacity.put(CapacitySchedulerConfiguration.CAPACITY, "25");
d1Capacity.put(CapacitySchedulerConfiguration.MAXIMUM_CAPACITY, "25");
Map<String, String> nearEmptyCapacity = new HashMap<>();
nearEmptyCapacity.put(CapacitySchedulerConfiguration.CAPACITY, "1E-4");
nearEmptyCapacity.put(CapacitySchedulerConfiguration.MAXIMUM_CAPACITY,
"1E-4");
Map<String, String> d2Capacity = new HashMap<>();
d2Capacity.put(CapacitySchedulerConfiguration.CAPACITY, "75");
d2Capacity.put(CapacitySchedulerConfiguration.MAXIMUM_CAPACITY, "75");
QueueConfigInfo d1 = new QueueConfigInfo("root.d.d1", d1Capacity);
QueueConfigInfo d2 = new QueueConfigInfo("root.d.d2", d2Capacity);
QueueConfigInfo d = new QueueConfigInfo("root.d", nearEmptyCapacity);
updateInfo.getAddQueueInfo().add(d1);
updateInfo.getAddQueueInfo().add(d2);
updateInfo.getAddQueueInfo().add(d);
response =
r.path("ws").path("v1").path("cluster")
.path("scheduler-conf").queryParam("user.name", userName)
.request(MediaType.APPLICATION_JSON)
.put(Entity.entity(updateInfo, MediaType.APPLICATION_JSON), Response.class);
assertEquals(Status.OK.getStatusCode(), response.getStatus());
CapacitySchedulerConfiguration newCSConf =
((CapacityScheduler) rm.getResourceScheduler()).getConfiguration();
assertEquals(5, newCSConf.getQueues(ROOT).size());
assertEquals(2, newCSConf.getQueues(ROOT_D).size());
assertEquals(25.0f, newCSConf.getNonLabeledQueueCapacity(new QueuePath("root.d.d1")),
0.01f);
assertEquals(75.0f, newCSConf.getNonLabeledQueueCapacity(new QueuePath("root.d.d2")),
0.01f);
CapacitySchedulerConfiguration newConf = getSchedulerConf();
assertNotNull(newConf);
assertEquals(5, newConf.getQueues(ROOT).size());
}
@Test
public void testAddWithUpdate() throws Exception {
WebTarget r = target().register(SchedConfUpdateInfoWriter.class);
Response response;
// Add root.d with capacity 25, reducing root.b capacity from 75 to 50.
SchedConfUpdateInfo updateInfo = new SchedConfUpdateInfo();
Map<String, String> dCapacity = new HashMap<>();
dCapacity.put(CapacitySchedulerConfiguration.CAPACITY, "25");
Map<String, String> bCapacity = new HashMap<>();
bCapacity.put(CapacitySchedulerConfiguration.CAPACITY, "50");
QueueConfigInfo d = new QueueConfigInfo("root.d", dCapacity);
QueueConfigInfo b = new QueueConfigInfo("root.b", bCapacity);
updateInfo.getAddQueueInfo().add(d);
updateInfo.getUpdateQueueInfo().add(b);
response =
r.path("ws").path("v1").path("cluster")
.path("scheduler-conf").queryParam("user.name", userName)
.request(MediaType.APPLICATION_JSON)
.put(Entity.entity(updateInfo, MediaType.APPLICATION_JSON), Response.class);
assertEquals(Status.OK.getStatusCode(), response.getStatus());
CapacitySchedulerConfiguration newCSConf =
((CapacityScheduler) rm.getResourceScheduler()).getConfiguration();
assertEquals(5, newCSConf.getQueues(ROOT).size());
assertEquals(25.0f, newCSConf.getNonLabeledQueueCapacity(new QueuePath("root.d")), 0.01f);
assertEquals(50.0f, newCSConf.getNonLabeledQueueCapacity(new QueuePath("root.b")), 0.01f);
}
@Test
public void testUnsetParentQueueOrderingPolicy() throws Exception {
WebTarget r = target().register(SchedConfUpdateInfoWriter.class);
Response response;
// Update ordering policy of Leaf Queue root.b to fair
SchedConfUpdateInfo updateInfo1 = new SchedConfUpdateInfo();
Map<String, String> updateParam = new HashMap<>();
updateParam.put(CapacitySchedulerConfiguration.ORDERING_POLICY,
"fair");
QueueConfigInfo aUpdateInfo = new QueueConfigInfo("root.b", updateParam);
updateInfo1.getUpdateQueueInfo().add(aUpdateInfo);
response = r.path("ws").path("v1").path("cluster")
.path("scheduler-conf").queryParam("user.name", userName)
.request(MediaType.APPLICATION_JSON)
.put(Entity.entity(updateInfo1, MediaType.APPLICATION_JSON), Response.class);
assertEquals(Status.OK.getStatusCode(), response.getStatus());
CapacitySchedulerConfiguration newCSConf =
((CapacityScheduler) rm.getResourceScheduler()).getConfiguration();
String bOrderingPolicy = CapacitySchedulerConfiguration.PREFIX
+ "root.b" + CapacitySchedulerConfiguration.DOT + ORDERING_POLICY;
assertEquals("fair", newCSConf.get(bOrderingPolicy));
stopQueue(ROOT_B);
// Add root.b.b1 which makes root.b a Parent Queue
SchedConfUpdateInfo updateInfo2 = new SchedConfUpdateInfo();
Map<String, String> capacity = new HashMap<>();
capacity.put(CapacitySchedulerConfiguration.CAPACITY, "100");
QueueConfigInfo b1 = new QueueConfigInfo("root.b.b1", capacity);
updateInfo2.getAddQueueInfo().add(b1);
response = r.path("ws").path("v1").path("cluster")
.path("scheduler-conf").queryParam("user.name", userName)
.request(MediaType.APPLICATION_JSON)
.put(Entity.entity(updateInfo2, MediaType.APPLICATION_JSON), Response.class);
// Validate unset ordering policy of root.b after converted to
// Parent Queue
assertEquals(Status.OK.getStatusCode(), response.getStatus());
newCSConf = ((CapacityScheduler) rm.getResourceScheduler())
.getConfiguration();
bOrderingPolicy = CapacitySchedulerConfiguration.PREFIX
+ "root.b" + CapacitySchedulerConfiguration.DOT + ORDERING_POLICY;
assertNull(newCSConf.get(bOrderingPolicy),
"Failed to unset Parent Queue OrderingPolicy");
}
@Test
public void testUnsetLeafQueueOrderingPolicy() throws Exception {
WebTarget r = target().register(SchedConfUpdateInfoWriter.class);
Response response;
// Update ordering policy of Parent Queue root.c to priority-utilization
SchedConfUpdateInfo updateInfo1 = new SchedConfUpdateInfo();
Map<String, String> updateParam = new HashMap<>();
updateParam.put(CapacitySchedulerConfiguration.ORDERING_POLICY,
"priority-utilization");
QueueConfigInfo aUpdateInfo = new QueueConfigInfo("root.c", updateParam);
updateInfo1.getUpdateQueueInfo().add(aUpdateInfo);
response = r.path("ws").path("v1").path("cluster")
.path("scheduler-conf").queryParam("user.name", userName)
.request(MediaType.APPLICATION_JSON)
.put(Entity.entity(updateInfo1, MediaType.APPLICATION_JSON), Response.class);
assertEquals(Status.OK.getStatusCode(), response.getStatus());
CapacitySchedulerConfiguration newCSConf =
((CapacityScheduler) rm.getResourceScheduler()).getConfiguration();
String cOrderingPolicy = CapacitySchedulerConfiguration.PREFIX
+ "root.c" + CapacitySchedulerConfiguration.DOT + ORDERING_POLICY;
assertEquals("priority-utilization", newCSConf.get(cOrderingPolicy));
stopQueue(ROOT_C_C1);
// Remove root.c.c1 which makes root.c a Leaf Queue
SchedConfUpdateInfo updateInfo2 = new SchedConfUpdateInfo();
updateInfo2.getRemoveQueueInfo().add("root.c.c1");
response = r.path("ws").path("v1").path("cluster")
.path("scheduler-conf").queryParam("user.name", userName)
.request(MediaType.APPLICATION_JSON)
.put(Entity.entity(updateInfo2, MediaType.APPLICATION_JSON), Response.class);
assertEquals(Status.OK.getStatusCode(), response.getStatus());
// Validate unset ordering policy of root.c after converted to
// Leaf Queue
newCSConf = ((CapacityScheduler) rm.getResourceScheduler())
.getConfiguration();
cOrderingPolicy = CapacitySchedulerConfiguration.PREFIX
+ "root.c" + CapacitySchedulerConfiguration.DOT + ORDERING_POLICY;
assertNull(newCSConf.get(cOrderingPolicy),
"Failed to unset Leaf Queue OrderingPolicy");
}
@Test
public void testRemoveQueue() throws Exception {
WebTarget r = target().register(SchedConfUpdateInfoWriter.class);
Response response;
stopQueue(ROOT_A_A2);
// Remove root.a.a2
SchedConfUpdateInfo updateInfo = new SchedConfUpdateInfo();
updateInfo.getRemoveQueueInfo().add("root.a.a2");
response =
r.path("ws").path("v1").path("cluster")
.path("scheduler-conf").queryParam("user.name", userName)
.request(MediaType.APPLICATION_JSON)
.put(Entity.entity(updateInfo, MediaType.APPLICATION_JSON),
Response.class);
assertEquals(Status.OK.getStatusCode(), response.getStatus());
CapacitySchedulerConfiguration newCSConf =
((CapacityScheduler) rm.getResourceScheduler()).getConfiguration();
assertEquals(1, newCSConf.getQueues(ROOT_A).size(),
"Failed to remove the queue");
assertEquals("a1", newCSConf.getQueues(ROOT_A).get(0),
"Failed to remove the right queue");
}
@Test
public void testStopWithRemoveQueue() throws Exception {
WebTarget r = target().register(SchedConfUpdateInfoWriter.class);
Response response;
// Set state of queues to STOPPED.
SchedConfUpdateInfo updateInfo = new SchedConfUpdateInfo();
Map<String, String> stoppedParam = new HashMap<>();
stoppedParam.put(CapacitySchedulerConfiguration.STATE,
QueueState.STOPPED.toString());
QueueConfigInfo stoppedInfo = new QueueConfigInfo("root.a.a2",
stoppedParam);
updateInfo.getUpdateQueueInfo().add(stoppedInfo);
updateInfo.getRemoveQueueInfo().add("root.a.a2");
response = r.path("ws").path("v1").path("cluster")
.path("scheduler-conf").queryParam("user.name", userName)
.request(MediaType.APPLICATION_JSON)
.put(Entity.entity(updateInfo, MediaType.APPLICATION_JSON),
Response.class);
assertEquals(Status.OK.getStatusCode(), response.getStatus());
CapacitySchedulerConfiguration newCSConf =
((CapacityScheduler) rm.getResourceScheduler()).getConfiguration();
assertEquals(1, newCSConf.getQueues(ROOT_A).size());
assertEquals("a1", newCSConf.getQueues(ROOT_A).get(0));
}
@Test
public void testRemoveQueueWhichHasQueueMapping() throws Exception {
WebTarget r = target().register(SchedConfUpdateInfoWriter.class);
Response response;
CapacityScheduler cs = (CapacityScheduler) rm.getResourceScheduler();
// Validate Queue 'mappedqueue' exists before deletion
assertNotNull(cs.getQueue("mappedqueue"),
"Failed to setup CapacityScheduler Configuration");
// Set state of queue 'mappedqueue' to STOPPED.
SchedConfUpdateInfo updateInfo = new SchedConfUpdateInfo();
Map<String, String> stoppedParam = new HashMap<>();
stoppedParam.put(CapacitySchedulerConfiguration.STATE, QueueState.STOPPED.toString());
QueueConfigInfo stoppedInfo = new QueueConfigInfo("root.mappedqueue", stoppedParam);
updateInfo.getUpdateQueueInfo().add(stoppedInfo);
// Remove queue 'mappedqueue' using update scheduler-conf
updateInfo.getRemoveQueueInfo().add("root.mappedqueue");
response = r.path("ws").path("v1").path("cluster").path("scheduler-conf")
.queryParam("user.name", userName).request(MediaType.APPLICATION_JSON)
.put(Entity.entity(updateInfo, MediaType.APPLICATION_JSON), Response.class);
String responseText = response.readEntity(String.class);
// Queue 'mappedqueue' deletion will fail as there is queue mapping present
assertEquals(Status.BAD_REQUEST.getStatusCode(), response.getStatus());
assertTrue(responseText.contains(
"Failed to re-init queues : " + "org.apache.hadoop.yarn.exceptions.YarnException:"
+ " Path root 'mappedqueue' does not exist. Path 'mappedqueue' is invalid"));
// Validate queue 'mappedqueue' exists after above failure
CapacitySchedulerConfiguration newCSConf =
((CapacityScheduler) rm.getResourceScheduler()).getConfiguration();
assertEquals(4, newCSConf.getQueues(ROOT).size());
assertNotNull(cs.getQueue("mappedqueue"),
"CapacityScheduler Configuration is corrupt");
}
@Test
public void testStopWithConvertLeafToParentQueue() throws Exception {
WebTarget r = target().register(SchedConfUpdateInfoWriter.class);
Response response;
// Set state of queues to STOPPED.
SchedConfUpdateInfo updateInfo = new SchedConfUpdateInfo();
Map<String, String> stoppedParam = new HashMap<>();
stoppedParam.put(CapacitySchedulerConfiguration.STATE,
QueueState.STOPPED.toString());
QueueConfigInfo stoppedInfo = new QueueConfigInfo("root.b",
stoppedParam);
updateInfo.getUpdateQueueInfo().add(stoppedInfo);
Map<String, String> b1Capacity = new HashMap<>();
b1Capacity.put(CapacitySchedulerConfiguration.CAPACITY, "100");
QueueConfigInfo b1 = new QueueConfigInfo("root.b.b1", b1Capacity);
updateInfo.getAddQueueInfo().add(b1);
response = r.path("ws").path("v1").path("cluster")
.path("scheduler-conf").queryParam("user.name", userName)
.request(MediaType.APPLICATION_JSON)
.put(Entity.entity(updateInfo, MediaType.APPLICATION_JSON), Response.class);
assertEquals(Status.OK.getStatusCode(), response.getStatus());
CapacitySchedulerConfiguration newCSConf =
((CapacityScheduler) rm.getResourceScheduler()).getConfiguration();
assertEquals(1, newCSConf.getQueues(ROOT_B).size());
assertEquals("b1", newCSConf.getQueues(ROOT_B).get(0));
}
@Test
public void testRemoveParentQueue() throws Exception {
WebTarget r = target().register(SchedConfUpdateInfoWriter.class);
Response response;
stopQueue(ROOT_C, ROOT_C_C1);
// Remove root.c (parent queue)
SchedConfUpdateInfo updateInfo = new SchedConfUpdateInfo();
updateInfo.getRemoveQueueInfo().add("root.c");
response =
r.path("ws").path("v1").path("cluster")
.path("scheduler-conf").queryParam("user.name", userName)
.request(MediaType.APPLICATION_JSON)
.put(Entity.entity(updateInfo, MediaType.APPLICATION_JSON), Response.class);
assertEquals(Status.OK.getStatusCode(), response.getStatus());
CapacitySchedulerConfiguration newCSConf =
((CapacityScheduler) rm.getResourceScheduler()).getConfiguration();
assertEquals(3, newCSConf.getQueues(ROOT).size());
assertEquals(0, newCSConf.getQueues(ROOT_C).size());
}
@Test
public void testRemoveParentQueueWithCapacity() throws Exception {
WebTarget r = target().register(SchedConfUpdateInfoWriter.class);
Response response;
stopQueue(ROOT_A, ROOT_A_A1, ROOT_A_A2);
// Remove root.a (parent queue) with capacity 25
SchedConfUpdateInfo updateInfo = new SchedConfUpdateInfo();
updateInfo.getRemoveQueueInfo().add("root.a");
// Set root.b capacity to 100
Map<String, String> bCapacity = new HashMap<>();
bCapacity.put(CapacitySchedulerConfiguration.CAPACITY, "100");
QueueConfigInfo b = new QueueConfigInfo("root.b", bCapacity);
updateInfo.getUpdateQueueInfo().add(b);
response =
r.path("ws").path("v1").path("cluster")
.path("scheduler-conf").queryParam("user.name", userName)
.request(MediaType.APPLICATION_JSON)
.put(Entity.entity(updateInfo, MediaType.APPLICATION_JSON), Response.class);
assertEquals(Status.OK.getStatusCode(), response.getStatus());
CapacitySchedulerConfiguration newCSConf =
((CapacityScheduler) rm.getResourceScheduler()).getConfiguration();
assertEquals(3, newCSConf.getQueues(ROOT).size());
assertEquals(100.0f, newCSConf.getNonLabeledQueueCapacity(new QueuePath("root.b")),
0.01f);
}
@Test
public void testRemoveMultipleQueues() throws Exception {
WebTarget r = target().register(SchedConfUpdateInfoWriter.class);
Response response;
stopQueue(ROOT_B, ROOT_C, ROOT_C_C1);
// Remove root.b and root.c
SchedConfUpdateInfo updateInfo = new SchedConfUpdateInfo();
updateInfo.getRemoveQueueInfo().add("root.b");
updateInfo.getRemoveQueueInfo().add("root.c");
Map<String, String> aCapacity = new HashMap<>();
aCapacity.put(CapacitySchedulerConfiguration.CAPACITY, "100");
aCapacity.put(CapacitySchedulerConfiguration.MAXIMUM_CAPACITY, "100");
QueueConfigInfo configInfo = new QueueConfigInfo("root.a", aCapacity);
updateInfo.getUpdateQueueInfo().add(configInfo);
response =
r.path("ws").path("v1").path("cluster")
.path("scheduler-conf").queryParam("user.name", userName)
.request(MediaType.APPLICATION_JSON)
.put(Entity.entity(updateInfo, MediaType.APPLICATION_JSON), Response.class);
assertEquals(Status.OK.getStatusCode(), response.getStatus());
CapacitySchedulerConfiguration newCSConf =
((CapacityScheduler) rm.getResourceScheduler()).getConfiguration();
assertEquals(2, newCSConf.getQueues(ROOT).size());
}
private void stopQueue(QueuePath... queuePaths) throws Exception {
WebTarget r = target().register(SchedConfUpdateInfoWriter.class);
Response response;
// Set state of queues to STOPPED.
SchedConfUpdateInfo updateInfo = new SchedConfUpdateInfo();
Map<String, String> stoppedParam = new HashMap<>();
stoppedParam.put(CapacitySchedulerConfiguration.STATE,
QueueState.STOPPED.toString());
for (QueuePath queue : queuePaths) {
QueueConfigInfo stoppedInfo = new QueueConfigInfo(queue.getFullPath(), stoppedParam);
updateInfo.getUpdateQueueInfo().add(stoppedInfo);
}
response =
r.path("ws").path("v1").path("cluster")
.path("scheduler-conf").queryParam("user.name", userName)
.request(MediaType.APPLICATION_JSON)
.put(Entity.entity(updateInfo, MediaType.APPLICATION_JSON), Response.class);
assertEquals(Status.OK.getStatusCode(), response.getStatus());
CapacitySchedulerConfiguration newCSConf =
((CapacityScheduler) rm.getResourceScheduler()).getConfiguration();
for (QueuePath queue : queuePaths) {
assertEquals(QueueState.STOPPED, newCSConf.getState(queue));
}
}
@Test
public void testUpdateQueue() throws Exception {
WebTarget r = target().register(SchedConfUpdateInfoWriter.class);
Response response;
// Update config value.
SchedConfUpdateInfo updateInfo = new SchedConfUpdateInfo();
Map<String, String> updateParam = new HashMap<>();
updateParam.put(CapacitySchedulerConfiguration.MAXIMUM_AM_RESOURCE_SUFFIX,
"0.2");
QueueConfigInfo aUpdateInfo = new QueueConfigInfo("root.a", updateParam);
updateInfo.getUpdateQueueInfo().add(aUpdateInfo);
CapacityScheduler cs = (CapacityScheduler) rm.getResourceScheduler();
assertEquals(CapacitySchedulerConfiguration
.DEFAULT_MAXIMUM_APPLICATIONMASTERS_RESOURCE_PERCENT,
cs.getConfiguration()
.getMaximumApplicationMasterResourcePerQueuePercent(ROOT_A),
0.001f);
response =
r.path("ws").path("v1").path("cluster")
.path("scheduler-conf").queryParam("user.name", userName)
.request(MediaType.APPLICATION_JSON)
.put(Entity.entity(updateInfo, MediaType.APPLICATION_JSON), Response.class);
LOG.debug("Response headers: {}.", response.getHeaders());
assertEquals(Status.OK.getStatusCode(), response.getStatus());
CapacitySchedulerConfiguration newCSConf = cs.getConfiguration();
assertEquals(0.2f, newCSConf
.getMaximumApplicationMasterResourcePerQueuePercent(ROOT_A), 0.001f);
// Remove config. Config value should be reverted to default.
updateParam.put(CapacitySchedulerConfiguration.MAXIMUM_AM_RESOURCE_SUFFIX,
null);
aUpdateInfo = new QueueConfigInfo("root.a", updateParam);
updateInfo.getUpdateQueueInfo().clear();
updateInfo.getUpdateQueueInfo().add(aUpdateInfo);
response =
r.path("ws").path("v1").path("cluster")
.path("scheduler-conf").queryParam("user.name", userName)
.request(MediaType.APPLICATION_JSON)
.put(Entity.entity(updateInfo, MediaType.APPLICATION_JSON),
Response.class);
assertEquals(Status.OK.getStatusCode(), response.getStatus());
newCSConf = cs.getConfiguration();
assertEquals(CapacitySchedulerConfiguration
.DEFAULT_MAXIMUM_APPLICATIONMASTERS_RESOURCE_PERCENT, newCSConf
.getMaximumApplicationMasterResourcePerQueuePercent(ROOT_A),
0.001f);
}
@Test
public void testUpdateQueueCapacity() throws Exception {
WebTarget r = target().register(SchedConfUpdateInfoWriter.class);
Response response;
// Update root.a and root.b capacity to 50.
SchedConfUpdateInfo updateInfo = new SchedConfUpdateInfo();
Map<String, String> updateParam = new HashMap<>();
updateParam.put(CapacitySchedulerConfiguration.CAPACITY, "50");
QueueConfigInfo aUpdateInfo = new QueueConfigInfo("root.a", updateParam);
QueueConfigInfo bUpdateInfo = new QueueConfigInfo("root.b", updateParam);
updateInfo.getUpdateQueueInfo().add(aUpdateInfo);
updateInfo.getUpdateQueueInfo().add(bUpdateInfo);
response =
r.path("ws").path("v1").path("cluster")
.path("scheduler-conf").queryParam("user.name", userName)
.request(MediaType.APPLICATION_JSON)
.put(Entity.entity(updateInfo, MediaType.APPLICATION_JSON), Response.class);
assertEquals(Status.OK.getStatusCode(), response.getStatus());
CapacitySchedulerConfiguration newCSConf =
((CapacityScheduler) rm.getResourceScheduler()).getConfiguration();
assertEquals(50.0f, newCSConf.getNonLabeledQueueCapacity(new QueuePath("root.a")), 0.01f);
assertEquals(50.0f, newCSConf.getNonLabeledQueueCapacity(new QueuePath("root.b")), 0.01f);
}
@Test
public void testGlobalConfChange() throws Exception {
WebTarget r = target().register(SchedConfUpdateInfoWriter.class);
Response response;
// Set maximum-applications to 30000.
SchedConfUpdateInfo updateInfo = new SchedConfUpdateInfo();
updateInfo.getGlobalParams().put(CapacitySchedulerConfiguration.PREFIX +
"maximum-applications", "30000");
response = r.path("ws").path("v1").path("cluster")
.path("scheduler-conf").queryParam("user.name", userName)
.request(MediaType.APPLICATION_JSON)
.put(Entity.entity(updateInfo, MediaType.APPLICATION_JSON), Response.class);
assertEquals(Status.OK.getStatusCode(), response.getStatus());
CapacitySchedulerConfiguration newCSConf =
((CapacityScheduler) rm.getResourceScheduler()).getConfiguration();
assertEquals(30000, newCSConf.getMaximumSystemApplications());
updateInfo.getGlobalParams().put(CapacitySchedulerConfiguration.PREFIX +
"maximum-applications", null);
// Unset maximum-applications. Should be set to default.
response =
r.path("ws").path("v1").path("cluster")
.path("scheduler-conf").queryParam("user.name", userName)
.request(MediaType.APPLICATION_JSON)
.put(Entity.entity(updateInfo, MediaType.APPLICATION_JSON),
Response.class);
assertEquals(Status.OK.getStatusCode(), response.getStatus());
newCSConf =
((CapacityScheduler) rm.getResourceScheduler()).getConfiguration();
assertEquals(CapacitySchedulerConfiguration
.DEFAULT_MAXIMUM_SYSTEM_APPLICATIIONS,
newCSConf.getMaximumSystemApplications());
}
@Test
public void testNodeLabelRemovalResidualConfigsAreCleared() throws Exception {
WebTarget r = target().register(NodeLabelsInfoReader.class);
Response response;
// 1. Create Node Label: label1
NodeLabelsInfo nodeLabelsInfo = new NodeLabelsInfo();
nodeLabelsInfo.getNodeLabelsInfo().add(new NodeLabelInfo(LABEL_1));
WebTarget addNodeLabelsResource = r.path("ws").path("v1").path("cluster")
.path("add-node-labels");
WebTarget getNodeLabelsResource = r.path("ws").path("v1").path("cluster")
.path("get-node-labels");
WebTarget removeNodeLabelsResource = r.path("ws").path("v1").path("cluster")
.path("remove-node-labels");
WebTarget schedulerConfResource = r.path("ws").path("v1").path("cluster")
.path(RMWSConsts.SCHEDULER_CONF);
response = addNodeLabelsResource.queryParam("user.name", userName)
.request(MediaType.APPLICATION_JSON)
.post(Entity.entity(logAndReturnJson(addNodeLabelsResource,
toJson(nodeLabelsInfo, NodeLabelsInfo.class)), MediaType.APPLICATION_JSON), Response.class);
// 2. Verify new Node Label
response = getNodeLabelsResource.queryParam("user.name", userName)
.request(MediaType.APPLICATION_JSON).get(Response.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
nodeLabelsInfo = response.readEntity(NodeLabelsInfo.class);
assertEquals(1, nodeLabelsInfo.getNodeLabels().size());
for (NodeLabelInfo nl : nodeLabelsInfo.getNodeLabelsInfo()) {
assertEquals(LABEL_1, nl.getName());
assertTrue(nl.getExclusivity());
}
// 3. Assign 'label1' to root.a
SchedConfUpdateInfo updateInfo = new SchedConfUpdateInfo();
Map<String, String> updateForRoot = new HashMap<>();
updateForRoot.put(CapacitySchedulerConfiguration.ACCESSIBLE_NODE_LABELS, "*");
QueueConfigInfo rootUpdateInfo = new QueueConfigInfo(ROOT.getFullPath(), updateForRoot);
Map<String, String> updateForRootA = new HashMap<>();
updateForRootA.put(CapacitySchedulerConfiguration.ACCESSIBLE_NODE_LABELS, LABEL_1);
QueueConfigInfo rootAUpdateInfo = new QueueConfigInfo(ROOT_A.getFullPath(), updateForRootA);
updateInfo.getUpdateQueueInfo().add(rootUpdateInfo);
updateInfo.getUpdateQueueInfo().add(rootAUpdateInfo);
response = schedulerConfResource
.queryParam("user.name", userName)
.request(MediaType.APPLICATION_JSON)
.put(Entity.entity(logAndReturnJson(schedulerConfResource, toJson(updateInfo,
SchedConfUpdateInfo.class)), MediaType.APPLICATION_JSON), Response.class);
assertEquals(Status.OK.getStatusCode(), response.getStatus());
CapacityScheduler cs = (CapacityScheduler) rm.getResourceScheduler();
assertEquals(Sets.newHashSet("*"),
cs.getConfiguration().getAccessibleNodeLabels(ROOT));
assertEquals(Sets.newHashSet(LABEL_1),
cs.getConfiguration().getAccessibleNodeLabels(ROOT_A));
// 4. Set partition capacities to queues as below
updateInfo = new SchedConfUpdateInfo();
updateForRoot = new HashMap<>();
updateForRoot.put(getAccessibleNodeLabelsCapacityPropertyName(LABEL_1), "100");
updateForRoot.put(getAccessibleNodeLabelsMaxCapacityPropertyName(LABEL_1), "100");
rootUpdateInfo = new QueueConfigInfo(ROOT.getFullPath(), updateForRoot);
updateForRootA = new HashMap<>();
updateForRootA.put(getAccessibleNodeLabelsCapacityPropertyName(LABEL_1), "100");
updateForRootA.put(getAccessibleNodeLabelsMaxCapacityPropertyName(LABEL_1), "100");
rootAUpdateInfo = new QueueConfigInfo(ROOT_A.getFullPath(), updateForRootA);
// Avoid the following exception by adding some capacities to root.a.a1 and root.a.a2 to label1
// Illegal capacity sum of 0.0 for children of queue a for label=label1.
// It is set to 0, but parent percent != 0, and doesn't allow children capacity to set to 0
Map<String, String> updateForRootA_A1 = new HashMap<>();
updateForRootA_A1.put(getAccessibleNodeLabelsCapacityPropertyName(LABEL_1), "20");
updateForRootA_A1.put(getAccessibleNodeLabelsMaxCapacityPropertyName(LABEL_1), "20");
QueueConfigInfo rootA_A1UpdateInfo = new QueueConfigInfo(ROOT_A_A1.getFullPath(),
updateForRootA_A1);
Map<String, String> updateForRootA_A2 = new HashMap<>();
updateForRootA_A2.put(getAccessibleNodeLabelsCapacityPropertyName(LABEL_1), "80");
updateForRootA_A2.put(getAccessibleNodeLabelsMaxCapacityPropertyName(LABEL_1), "80");
QueueConfigInfo rootA_A2UpdateInfo = new QueueConfigInfo(ROOT_A_A2.getFullPath(),
updateForRootA_A2);
updateInfo.getUpdateQueueInfo().add(rootUpdateInfo);
updateInfo.getUpdateQueueInfo().add(rootAUpdateInfo);
updateInfo.getUpdateQueueInfo().add(rootA_A1UpdateInfo);
updateInfo.getUpdateQueueInfo().add(rootA_A2UpdateInfo);
response = schedulerConfResource
.queryParam("user.name", userName)
.request(MediaType.APPLICATION_JSON)
.put(Entity.entity(logAndReturnJson(schedulerConfResource, toJson(updateInfo,
SchedConfUpdateInfo.class)), MediaType.APPLICATION_JSON),
Response.class);
assertEquals(Status.OK.getStatusCode(), response.getStatus());
assertEquals(100.0, cs.getConfiguration().getLabeledQueueCapacity(ROOT, LABEL_1), 0.001f);
assertEquals(100.0, cs.getConfiguration().getLabeledQueueMaximumCapacity(ROOT, LABEL_1),
0.001f);
assertEquals(100.0, cs.getConfiguration().getLabeledQueueCapacity(ROOT_A, LABEL_1), 0.001f);
assertEquals(100.0, cs.getConfiguration().getLabeledQueueMaximumCapacity(ROOT_A, LABEL_1),
0.001f);
assertEquals(20.0, cs.getConfiguration().getLabeledQueueCapacity(ROOT_A_A1, LABEL_1), 0.001f);
assertEquals(20.0, cs.getConfiguration().getLabeledQueueMaximumCapacity(ROOT_A_A1, LABEL_1),
0.001f);
assertEquals(80.0, cs.getConfiguration().getLabeledQueueCapacity(ROOT_A_A2, LABEL_1), 0.001f);
assertEquals(80.0, cs.getConfiguration().getLabeledQueueMaximumCapacity(ROOT_A_A2, LABEL_1),
0.001f);
//5. De-assign node label: "label1" + Remove residual properties
updateInfo = new SchedConfUpdateInfo();
updateForRoot = new HashMap<>();
updateForRoot.put(CapacitySchedulerConfiguration.ACCESSIBLE_NODE_LABELS, "*");
updateForRoot.put(getAccessibleNodeLabelsCapacityPropertyName(LABEL_1), "");
updateForRoot.put(getAccessibleNodeLabelsMaxCapacityPropertyName(LABEL_1), "");
rootUpdateInfo = new QueueConfigInfo(ROOT.getFullPath(), updateForRoot);
updateForRootA = new HashMap<>();
updateForRootA.put(CapacitySchedulerConfiguration.ACCESSIBLE_NODE_LABELS, "");
updateForRootA.put(getAccessibleNodeLabelsCapacityPropertyName(LABEL_1), "");
updateForRootA.put(getAccessibleNodeLabelsMaxCapacityPropertyName(LABEL_1), "");
rootAUpdateInfo = new QueueConfigInfo(ROOT_A.getFullPath(), updateForRootA);
updateForRootA_A1 = new HashMap<>();
updateForRootA_A1.put(CapacitySchedulerConfiguration.ACCESSIBLE_NODE_LABELS, "");
updateForRootA_A1.put(getAccessibleNodeLabelsCapacityPropertyName(LABEL_1), "");
updateForRootA_A1.put(getAccessibleNodeLabelsMaxCapacityPropertyName(LABEL_1), "");
rootA_A1UpdateInfo = new QueueConfigInfo(ROOT_A_A1.getFullPath(), updateForRootA_A1);
updateForRootA_A2 = new HashMap<>();
updateForRootA_A2.put(CapacitySchedulerConfiguration.ACCESSIBLE_NODE_LABELS, "");
updateForRootA_A2.put(getAccessibleNodeLabelsCapacityPropertyName(LABEL_1), "");
updateForRootA_A2.put(getAccessibleNodeLabelsMaxCapacityPropertyName(LABEL_1), "");
rootA_A2UpdateInfo = new QueueConfigInfo(ROOT_A_A2.getFullPath(), updateForRootA_A2);
updateInfo.getUpdateQueueInfo().add(rootUpdateInfo);
updateInfo.getUpdateQueueInfo().add(rootAUpdateInfo);
updateInfo.getUpdateQueueInfo().add(rootA_A1UpdateInfo);
updateInfo.getUpdateQueueInfo().add(rootA_A2UpdateInfo);
response = schedulerConfResource
.queryParam("user.name", userName)
.request(MediaType.APPLICATION_JSON)
.put(Entity.entity(logAndReturnJson(schedulerConfResource, toJson(updateInfo,
SchedConfUpdateInfo.class)), MediaType.APPLICATION_JSON), Response.class);
assertEquals(Status.OK.getStatusCode(), response.getStatus());
assertEquals(Sets.newHashSet("*"),
cs.getConfiguration().getAccessibleNodeLabels(ROOT));
assertNull(cs.getConfiguration().getAccessibleNodeLabels(ROOT_A));
//6. Remove node label 'label1'
response =
removeNodeLabelsResource
.queryParam("user.name", userName)
.queryParam("labels", LABEL_1)
.request(MediaType.APPLICATION_JSON)
.post(null, Response.class);
// Verify
response =
getNodeLabelsResource.queryParam("user.name", userName)
.request(MediaType.APPLICATION_JSON).get(Response.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
nodeLabelsInfo = response.readEntity(NodeLabelsInfo.class);
assertEquals(0, nodeLabelsInfo.getNodeLabels().size());
//6. Check residual configs
assertNull(getConfValueForQueueAndLabelAndType(cs, ROOT, LABEL_1, CAPACITY));
assertNull(getConfValueForQueueAndLabelAndType(cs, ROOT, LABEL_1, MAXIMUM_CAPACITY));
assertNull(getConfValueForQueueAndLabelAndType(cs, ROOT_A, LABEL_1, CAPACITY));
assertNull(getConfValueForQueueAndLabelAndType(cs, ROOT_A, LABEL_1, MAXIMUM_CAPACITY));
assertNull(getConfValueForQueueAndLabelAndType(cs, ROOT_A_A1, LABEL_1, CAPACITY));
assertNull(getConfValueForQueueAndLabelAndType(cs, ROOT_A_A1, LABEL_1, MAXIMUM_CAPACITY));
assertNull(getConfValueForQueueAndLabelAndType(cs, ROOT_A_A2, LABEL_1, CAPACITY));
assertNull(getConfValueForQueueAndLabelAndType(cs, ROOT_A_A2, LABEL_1, MAXIMUM_CAPACITY));
}
private String getConfValueForQueueAndLabelAndType(CapacityScheduler cs,
QueuePath queuePath, String label, String type) {
return cs.getConfiguration().get(
QueuePrefixes.getNodeLabelPrefix(
queuePath, label) + type);
}
private Object logAndReturnJson(WebTarget ws, String json) {
LOG.info("Sending to web resource: {}, json: {}", ws, json);
return json;
}
private String getAccessibleNodeLabelsCapacityPropertyName(String label) {
return String.format("%s.%s.%s", ACCESSIBLE_NODE_LABELS, label, CAPACITY);
}
private String getAccessibleNodeLabelsMaxCapacityPropertyName(String label) {
return String.format("%s.%s.%s", ACCESSIBLE_NODE_LABELS, label, MAXIMUM_CAPACITY);
}
@Test
public void testValidateWithClusterMaxAllocation() throws Exception {
WebTarget r = target().register(SchedConfUpdateInfoWriter.class);
int clusterMax = YarnConfiguration.
DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB * 2;
conf.setInt(YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_MB,
clusterMax);
SchedConfUpdateInfo updateInfo = new SchedConfUpdateInfo();
Map<String, String> updateParam = new HashMap<>();
updateParam.put(CapacitySchedulerConfiguration.MAXIMUM_APPLICATIONS_SUFFIX,
"100");
QueueConfigInfo aUpdateInfo = new QueueConfigInfo("root.a", updateParam);
updateInfo.getUpdateQueueInfo().add(aUpdateInfo);
Response response =
r.path("ws").path("v1").path("cluster")
.path(RMWSConsts.SCHEDULER_CONF_VALIDATE)
.queryParam("user.name", userName)
.request(MediaType.APPLICATION_JSON)
.post(Entity.entity(updateInfo, MediaType.APPLICATION_JSON),
Response.class);
assertEquals(Status.OK.getStatusCode(), response.getStatus());
}
@Override
@AfterEach
public void tearDown() throws Exception {
if (rm != null) {
rm.stop();
}
super.tearDown();
}
}
| JerseyBinder |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/jobmanager/OnCompletionActions.java | {
"start": 1063,
"end": 1436
} | interface ____ {
/**
* Job reached a globally terminal state.
*
* @param executionGraphInfo contains information about the terminated job
*/
void jobReachedGloballyTerminalState(ExecutionGraphInfo executionGraphInfo);
/** The {@link JobMaster} failed while executing the job. */
void jobMasterFailed(Throwable cause);
}
| OnCompletionActions |
java | apache__camel | tooling/maven/camel-api-component-maven-plugin/src/main/java/org/apache/camel/maven/JavaSourceParser.java | {
"start": 4377,
"end": 8129
} | class
____ = findInnerClass(rootClazz, innerClass);
if (clazz == null) {
errorMessage = "Cannot find inner class " + innerClass + " in class: " + rootClazz.getQualifiedName();
return;
}
}
LOG.debug("Parsing class: {}", clazz.getQualifiedName());
String rawClass = clazz.toUnformattedString();
String doc = getClassJavadocRaw(clazz, rawClass);
classDoc = sanitizeJavaDocValue(doc, true);
if (classDoc == null || classDoc.isEmpty()) {
rawClass = rootClazz.toUnformattedString();
doc = getClassJavadocRaw(rootClazz, rawClass);
classDoc = sanitizeJavaDocValue(doc, true);
}
if (classDoc != null && classDoc.indexOf('.') > 0) {
classDoc = StringHelper.before(classDoc, ".");
}
List<MethodSource> ml = clazz.getMethods();
for (MethodSource ms : ml) {
String methodName = ms.getName();
LOG.debug("Parsing method: {}", methodName);
// should not be constructor and must not be private
boolean isInterface = clazz instanceof JavaInterfaceSource;
boolean accept = isInterface || !ms.isConstructor() && ms.isPublic();
if (!accept) {
continue;
}
doc = getMethodJavadocRaw(ms, rawClass);
doc = sanitizeJavaDocValue(doc, true);
if (doc != null && doc.indexOf('.') > 0) {
doc = StringHelper.before(doc, ".");
}
if (doc != null && !doc.isEmpty()) {
methodDocs.put(ms.getName(), doc);
}
String result = resolveParameterizedType(rootClazz, clazz, ms, null, ms.getReturnType(), classLoader);
if (result.isEmpty()) {
result = "void";
}
LOG.trace("Parsed return type as: {}", result);
List<JavaDocTag> params = ms.getJavaDoc().getTags("@param");
Map<String, String> docs = new LinkedHashMap<>();
Map<String, String> args = new LinkedHashMap<>();
StringBuilder sb = new StringBuilder();
sb.append("public ").append(result).append(" ").append(ms.getName()).append("(");
List<ParameterSource> list = ms.getParameters();
for (int i = 0; i < list.size(); i++) {
ParameterSource ps = list.get(i);
String name = ps.getName();
String type = resolveParameterizedType(rootClazz, clazz, ms, ps, ps.getType(), classLoader);
LOG.trace("Parsing parameter #{} ({} {})", i, type, name);
sb.append(type);
sb.append(" ").append(name);
if (i < list.size() - 1) {
sb.append(", ");
}
// need documentation for this parameter
docs.put(name, getJavadocValue(params, name));
args.put(name, type);
}
sb.append(")");
Map<String, String> existing = parameterDocs.get(ms.getName());
if (existing != null) {
existing.putAll(docs);
} else {
parameterDocs.put(ms.getName(), docs);
}
String signature = sb.toString();
methodSignatures.add(signature);
parameterTypes.put(signature, args);
docs = new LinkedHashMap<>();
args = new LinkedHashMap<>();
if (includeSetters) {
// special for camel-google which has many API options as optional getter/setter pairs
// which we need special code to discover and parse due to complex nested | clazz |
java | elastic__elasticsearch | x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/LogsDbVersusReindexedLogsDbChallengeRestIT.java | {
"start": 638,
"end": 1511
} | class ____ extends ReindexChallengeRestIT {
public String getBaselineDataStreamName() {
return "logs-apache-baseline";
}
public String getContenderDataStreamName() {
return "logs-apache-reindexed-contender";
}
@Override
public void baselineSettings(Settings.Builder builder) {
dataGenerationHelper.logsDbSettings(builder);
}
@Override
public void contenderSettings(Settings.Builder builder) {
dataGenerationHelper.logsDbSettings(builder);
}
@Override
public void baselineMappings(XContentBuilder builder) throws IOException {
dataGenerationHelper.writeLogsDbMapping(builder);
}
@Override
public void contenderMappings(XContentBuilder builder) throws IOException {
dataGenerationHelper.writeLogsDbMapping(builder);
}
}
| LogsDbVersusReindexedLogsDbChallengeRestIT |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/MasterEndpointBuilderFactory.java | {
"start": 7608,
"end": 9616
} | interface ____ {
/**
* Master (camel-master)
* Have only a single consumer in a cluster consuming from a given
* endpoint; with automatic failover if the JVM dies.
*
* Category: clustering
* Since: 2.20
* Maven coordinates: org.apache.camel:camel-master
*
* Syntax: <code>master:namespace:delegateUri</code>
*
* Path parameter: namespace (required)
* The name of the cluster namespace to use
*
* Path parameter: delegateUri (required)
* The endpoint uri to use in master/slave mode
*
* @param path namespace:delegateUri
* @return the dsl builder
*/
default MasterEndpointBuilder master(String path) {
return MasterEndpointBuilderFactory.endpointBuilder("master", path);
}
/**
* Master (camel-master)
* Have only a single consumer in a cluster consuming from a given
* endpoint; with automatic failover if the JVM dies.
*
* Category: clustering
* Since: 2.20
* Maven coordinates: org.apache.camel:camel-master
*
* Syntax: <code>master:namespace:delegateUri</code>
*
* Path parameter: namespace (required)
* The name of the cluster namespace to use
*
* Path parameter: delegateUri (required)
* The endpoint uri to use in master/slave mode
*
* @param componentName to use a custom component name for the endpoint
* instead of the default name
* @param path namespace:delegateUri
* @return the dsl builder
*/
default MasterEndpointBuilder master(String componentName, String path) {
return MasterEndpointBuilderFactory.endpointBuilder(componentName, path);
}
}
static MasterEndpointBuilder endpointBuilder(String componentName, String path) {
| MasterBuilders |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAppController.java | {
"start": 4972,
"end": 5969
} | class ____ rendering
*/
@Test
public void testGetJob() {
when(job.checkAccess(any(UserGroupInformation.class), any(JobACL.class)))
.thenReturn(false);
appController.job();
verify(appController.response()).setContentType(MimeType.TEXT);
assertEquals(
"Access denied: User user does not have permission to view job job_01_01",
appController.getData());
when(job.checkAccess(any(UserGroupInformation.class), any(JobACL.class)))
.thenReturn(true);
appController.getProperty().remove(AMParams.JOB_ID);
appController.job();
assertEquals(
"Access denied: User user does not have permission to view job job_01_01Bad Request: Missing job ID",
appController.getData());
appController.getProperty().put(AMParams.JOB_ID, "job_01_01");
appController.job();
assertEquals(JobPage.class, appController.getClazz());
}
/**
* Test method 'jobCounters'. Should print message about error or set CountersPage | for |
java | hibernate__hibernate-orm | hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/query/ids/IdClassRelatedIdQueryTest.java | {
"start": 6218,
"end": 6854
} | class ____ implements Serializable {
@Id
@ManyToOne(optional = false)
private Document document;
@Id
@ManyToOne(optional = false)
private Person person;
PersonDocument() {
}
PersonDocument(Person person, Document document) {
this.document = document;
this.person = person;
}
public Document getDocument() {
return document;
}
public void setDocument(Document document) {
this.document = document;
}
public Person getPerson() {
return person;
}
public void setPerson(Person person) {
this.person = person;
}
}
@Audited
@Entity(name = "Document")
public static | PersonDocument |
java | quarkusio__quarkus | extensions/grpc/runtime/src/main/java/io/quarkus/grpc/auth/DefaultAuthExceptionHandlerProvider.java | {
"start": 281,
"end": 2054
} | class ____ implements AuthExceptionHandlerProvider {
private final boolean addStatusDescription;
public DefaultAuthExceptionHandlerProvider() {
this.addStatusDescription = LaunchMode.current().isDevOrTest();
}
@Override
public int getPriority() {
return DEFAULT_PRIORITY;
}
@Override
public <ReqT, RespT> AuthExceptionHandler<ReqT, RespT> createHandler(ServerCall.Listener<ReqT> listener,
ServerCall<ReqT, RespT> serverCall, Metadata metadata) {
return new AuthExceptionHandler<>(listener, serverCall, metadata, addStatusDescription);
}
@Override
public StatusException transformToStatusException(Throwable t) {
return new StatusException(transformToStatusException(addStatusDescription, t));
}
@Override
public boolean handlesException(Throwable failure) {
return failure instanceof AuthenticationException || failure instanceof SecurityException;
}
static Status transformToStatusException(boolean addExceptionMessage, Throwable exception) {
if (exception instanceof AuthenticationException) {
if (addExceptionMessage) {
return Status.UNAUTHENTICATED.withDescription(exception.getMessage());
} else {
return Status.UNAUTHENTICATED;
}
} else if (exception instanceof SecurityException) {
if (addExceptionMessage) {
return Status.PERMISSION_DENIED.withDescription(exception.getMessage());
} else {
return Status.PERMISSION_DENIED;
}
} else {
throw new IllegalStateException("Cannot transform exception " + exception, exception);
}
}
}
| DefaultAuthExceptionHandlerProvider |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/script/LeafReaderContextSupplier.java | {
"start": 688,
"end": 774
} | interface ____ {
LeafReaderContext getLeafReaderContext();
}
| LeafReaderContextSupplier |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/SshComponentBuilderFactory.java | {
"start": 18101,
"end": 22384
} | class ____
extends AbstractComponentBuilder<SshComponent>
implements SshComponentBuilder {
@Override
protected SshComponent buildConcreteComponent() {
return new SshComponent();
}
private org.apache.camel.component.ssh.SshConfiguration getOrCreateConfiguration(SshComponent component) {
if (component.getConfiguration() == null) {
component.setConfiguration(new org.apache.camel.component.ssh.SshConfiguration());
}
return component.getConfiguration();
}
@Override
protected boolean setPropertyOnComponent(
Component component,
String name,
Object value) {
switch (name) {
case "failOnUnknownHost": getOrCreateConfiguration((SshComponent) component).setFailOnUnknownHost((boolean) value); return true;
case "knownHostsResource": getOrCreateConfiguration((SshComponent) component).setKnownHostsResource((java.lang.String) value); return true;
case "timeout": getOrCreateConfiguration((SshComponent) component).setTimeout((long) value); return true;
case "bridgeErrorHandler": ((SshComponent) component).setBridgeErrorHandler((boolean) value); return true;
case "pollCommand": getOrCreateConfiguration((SshComponent) component).setPollCommand((java.lang.String) value); return true;
case "lazyStartProducer": ((SshComponent) component).setLazyStartProducer((boolean) value); return true;
case "autowiredEnabled": ((SshComponent) component).setAutowiredEnabled((boolean) value); return true;
case "channelType": getOrCreateConfiguration((SshComponent) component).setChannelType((java.lang.String) value); return true;
case "clientBuilder": getOrCreateConfiguration((SshComponent) component).setClientBuilder((org.apache.sshd.client.ClientBuilder) value); return true;
case "compressions": getOrCreateConfiguration((SshComponent) component).setCompressions((java.lang.String) value); return true;
case "configuration": ((SshComponent) component).setConfiguration((org.apache.camel.component.ssh.SshConfiguration) value); return true;
case "shellPrompt": getOrCreateConfiguration((SshComponent) component).setShellPrompt((java.lang.String) value); return true;
case "sleepForShellPrompt": getOrCreateConfiguration((SshComponent) component).setSleepForShellPrompt((long) value); return true;
case "healthCheckConsumerEnabled": ((SshComponent) component).setHealthCheckConsumerEnabled((boolean) value); return true;
case "healthCheckProducerEnabled": ((SshComponent) component).setHealthCheckProducerEnabled((boolean) value); return true;
case "certResource": getOrCreateConfiguration((SshComponent) component).setCertResource((java.lang.String) value); return true;
case "certResourcePassword": getOrCreateConfiguration((SshComponent) component).setCertResourcePassword((java.lang.String) value); return true;
case "ciphers": getOrCreateConfiguration((SshComponent) component).setCiphers((java.lang.String) value); return true;
case "kex": getOrCreateConfiguration((SshComponent) component).setKex((java.lang.String) value); return true;
case "keyPairProvider": getOrCreateConfiguration((SshComponent) component).setKeyPairProvider((org.apache.sshd.common.keyprovider.KeyPairProvider) value); return true;
case "keyType": getOrCreateConfiguration((SshComponent) component).setKeyType((java.lang.String) value); return true;
case "macs": getOrCreateConfiguration((SshComponent) component).setMacs((java.lang.String) value); return true;
case "password": getOrCreateConfiguration((SshComponent) component).setPassword((java.lang.String) value); return true;
case "signatures": getOrCreateConfiguration((SshComponent) component).setSignatures((java.lang.String) value); return true;
case "username": getOrCreateConfiguration((SshComponent) component).setUsername((java.lang.String) value); return true;
default: return false;
}
}
}
} | SshComponentBuilderImpl |
java | apache__kafka | clients/src/main/java/org/apache/kafka/clients/consumer/ConsumerGroupMetadata.java | {
"start": 1110,
"end": 1399
} | class ____ {
private final String groupId;
private final int generationId;
private final String memberId;
private final Optional<String> groupInstanceId;
/**
* @deprecated Since 4.2, please use {@link KafkaConsumer#groupMetadata()} instead. This | ConsumerGroupMetadata |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConfigurableClusterPrivileges.java | {
"start": 18347,
"end": 32696
} | class ____ implements ConfigurableClusterPrivilege {
public static final String WRITEABLE_NAME = "manage-roles-privilege";
private final List<ManageRolesIndexPermissionGroup> indexPermissionGroups;
private final Function<RestrictedIndices, Predicate<TransportRequest>> requestPredicateSupplier;
private static final Set<String> EXPECTED_INDEX_GROUP_FIELDS = Set.of(
Fields.NAMES.getPreferredName(),
Fields.PRIVILEGES.getPreferredName()
);
public ManageRolesPrivilege(List<ManageRolesIndexPermissionGroup> manageRolesIndexPermissionGroups) {
this.indexPermissionGroups = manageRolesIndexPermissionGroups;
this.requestPredicateSupplier = (restrictedIndices) -> {
IndicesPermission.Builder indicesPermissionBuilder = new IndicesPermission.Builder(restrictedIndices);
for (ManageRolesIndexPermissionGroup indexPatternPrivilege : manageRolesIndexPermissionGroups) {
Set<IndexPrivilege> privileges = IndexPrivilege.resolveBySelectorAccess(Set.of(indexPatternPrivilege.privileges()));
assert privileges.stream().allMatch(p -> p.getSelectorPredicate() != IndexComponentSelectorPredicate.FAILURES)
: "not support for failures store access yet";
for (IndexPrivilege indexPrivilege : privileges) {
indicesPermissionBuilder.addGroup(
indexPrivilege,
FieldPermissions.DEFAULT,
null,
false,
indexPatternPrivilege.indexPatterns()
);
}
}
final IndicesPermission indicesPermission = indicesPermissionBuilder.build();
return (TransportRequest request) -> {
if (request instanceof final PutRoleRequest putRoleRequest) {
return hasNonIndexPrivileges(putRoleRequest.roleDescriptor()) == false
&& Arrays.stream(putRoleRequest.indices())
.noneMatch(
indexPrivilege -> requestIndexPatternsAllowed(
indicesPermission,
indexPrivilege.getIndices(),
indexPrivilege.getPrivileges()
) == false
);
} else if (request instanceof final BulkPutRolesRequest bulkPutRoleRequest) {
return bulkPutRoleRequest.getRoles().stream().noneMatch(ManageRolesPrivilege::hasNonIndexPrivileges)
&& bulkPutRoleRequest.getRoles()
.stream()
.allMatch(
roleDescriptor -> Arrays.stream(roleDescriptor.getIndicesPrivileges())
.noneMatch(
indexPrivilege -> requestIndexPatternsAllowed(
indicesPermission,
indexPrivilege.getIndices(),
indexPrivilege.getPrivileges()
) == false
)
);
} else if (request instanceof final DeleteRoleRequest deleteRoleRequest) {
return requestIndexPatternsAllowed(
indicesPermission,
new String[] { deleteRoleRequest.name() },
DELETE_INDEX.name().toArray(String[]::new)
);
} else if (request instanceof final BulkDeleteRolesRequest bulkDeleteRoleRequest) {
return requestIndexPatternsAllowed(
indicesPermission,
bulkDeleteRoleRequest.getRoleNames().toArray(String[]::new),
DELETE_INDEX.name().toArray(String[]::new)
);
}
throw new IllegalArgumentException("Unsupported request type [" + request.getClass() + "]");
};
};
}
@Override
public Category getCategory() {
return Category.ROLE;
}
@Override
public String getWriteableName() {
return WRITEABLE_NAME;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeCollection(indexPermissionGroups);
}
public static ManageRolesPrivilege createFrom(StreamInput in) throws IOException {
final List<ManageRolesIndexPermissionGroup> indexPatternPrivileges = in.readCollectionAsList(
ManageRolesIndexPermissionGroup::createFrom
);
return new ManageRolesPrivilege(indexPatternPrivileges);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return builder.field(
Fields.MANAGE.getPreferredName(),
Map.of(Fields.INDICES.getPreferredName(), indexPermissionGroups.stream().map(indexPatternPrivilege -> {
Map<String, String[]> sortedMap = new TreeMap<>();
sortedMap.put(Fields.NAMES.getPreferredName(), indexPatternPrivilege.indexPatterns());
sortedMap.put(Fields.PRIVILEGES.getPreferredName(), indexPatternPrivilege.privileges());
return sortedMap;
}).toList())
);
}
private static void expectedIndexGroupFields(String fieldName, XContentParser parser) {
if (EXPECTED_INDEX_GROUP_FIELDS.contains(fieldName) == false) {
throw new XContentParseException(
parser.getTokenLocation(),
"failed to parse privilege. expected one of "
+ Arrays.toString(EXPECTED_INDEX_GROUP_FIELDS.toArray(String[]::new))
+ " but found ["
+ fieldName
+ "] instead"
);
}
}
public static ManageRolesPrivilege parse(XContentParser parser) throws IOException {
expectedToken(parser.currentToken(), parser, XContentParser.Token.FIELD_NAME);
expectFieldName(parser, Fields.MANAGE);
expectedToken(parser.nextToken(), parser, XContentParser.Token.START_OBJECT);
expectedToken(parser.nextToken(), parser, XContentParser.Token.FIELD_NAME);
expectFieldName(parser, Fields.INDICES);
expectedToken(parser.nextToken(), parser, XContentParser.Token.START_ARRAY);
List<ManageRolesIndexPermissionGroup> indexPrivileges = new ArrayList<>();
Map<String, String[]> parsedArraysByFieldName = new HashMap<>();
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
expectedToken(token, parser, XContentParser.Token.START_OBJECT);
expectedToken(parser.nextToken(), parser, XContentParser.Token.FIELD_NAME);
String currentFieldName = parser.currentName();
expectedIndexGroupFields(currentFieldName, parser);
expectedToken(parser.nextToken(), parser, XContentParser.Token.START_ARRAY);
parsedArraysByFieldName.put(currentFieldName, XContentUtils.readStringArray(parser, false));
expectedToken(parser.nextToken(), parser, XContentParser.Token.FIELD_NAME);
currentFieldName = parser.currentName();
expectedIndexGroupFields(currentFieldName, parser);
expectedToken(parser.nextToken(), parser, XContentParser.Token.START_ARRAY);
parsedArraysByFieldName.put(currentFieldName, XContentUtils.readStringArray(parser, false));
expectedToken(parser.nextToken(), parser, XContentParser.Token.END_OBJECT);
indexPrivileges.add(
new ManageRolesIndexPermissionGroup(
parsedArraysByFieldName.get(Fields.NAMES.getPreferredName()),
parsedArraysByFieldName.get(Fields.PRIVILEGES.getPreferredName())
)
);
}
expectedToken(parser.nextToken(), parser, XContentParser.Token.END_OBJECT);
for (var indexPrivilege : indexPrivileges) {
if (indexPrivilege.indexPatterns == null || indexPrivilege.indexPatterns.length == 0) {
throw new IllegalArgumentException("Indices privileges must refer to at least one index name or index name pattern");
}
if (indexPrivilege.privileges == null || indexPrivilege.privileges.length == 0) {
throw new IllegalArgumentException("Indices privileges must define at least one privilege");
}
for (String privilege : indexPrivilege.privileges) {
IndexPrivilege namedPrivilege = IndexPrivilege.getNamedOrNull(privilege);
// Use resolveBySelectorAccess to determine whether the passed privilege is valid.
// IllegalArgumentException is thrown here when an invalid permission is encountered.
IndexPrivilege.resolveBySelectorAccess(Set.of(privilege));
if (namedPrivilege != null && namedPrivilege.getSelectorPredicate() == IndexComponentSelectorPredicate.FAILURES) {
throw new IllegalArgumentException(
"Failure store related privileges are not supported as targets of manage roles but found [" + privilege + "]"
);
}
}
}
return new ManageRolesPrivilege(indexPrivileges);
}
public record ManageRolesIndexPermissionGroup(String[] indexPatterns, String[] privileges) implements Writeable {
public static ManageRolesIndexPermissionGroup createFrom(StreamInput in) throws IOException {
return new ManageRolesIndexPermissionGroup(in.readStringArray(), in.readStringArray());
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeStringArray(indexPatterns);
out.writeStringArray(privileges);
}
@Override
public String toString() {
return "{"
+ Fields.NAMES
+ ":"
+ Arrays.toString(indexPatterns())
+ ":"
+ Fields.PRIVILEGES
+ ":"
+ Arrays.toString(privileges())
+ "}";
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ManageRolesIndexPermissionGroup that = (ManageRolesIndexPermissionGroup) o;
return Arrays.equals(indexPatterns, that.indexPatterns) && Arrays.equals(privileges, that.privileges);
}
@Override
public int hashCode() {
return Objects.hash(Arrays.hashCode(indexPatterns), Arrays.hashCode(privileges));
}
}
@Override
public String toString() {
return "{"
+ getCategory()
+ ":"
+ Fields.MANAGE.getPreferredName()
+ ":"
+ Fields.INDICES.getPreferredName()
+ "=["
+ Strings.collectionToDelimitedString(indexPermissionGroups, ",")
+ "]}";
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final ManageRolesPrivilege that = (ManageRolesPrivilege) o;
if (this.indexPermissionGroups.size() != that.indexPermissionGroups.size()) {
return false;
}
for (int i = 0; i < this.indexPermissionGroups.size(); i++) {
if (Objects.equals(this.indexPermissionGroups.get(i), that.indexPermissionGroups.get(i)) == false) {
return false;
}
}
return true;
}
@Override
public int hashCode() {
return Objects.hash(indexPermissionGroups.hashCode());
}
@Override
public ClusterPermission.Builder buildPermission(final ClusterPermission.Builder builder) {
return builder.addWithPredicateSupplier(
this,
Set.of(PutRoleAction.NAME, ActionTypes.BULK_PUT_ROLES.name(), ActionTypes.BULK_DELETE_ROLES.name(), DeleteRoleAction.NAME),
requestPredicateSupplier
);
}
private static boolean requestIndexPatternsAllowed(
IndicesPermission indicesPermission,
String[] requestIndexPatterns,
String[] privileges
) {
return indicesPermission.checkResourcePrivileges(Set.of(requestIndexPatterns), false, Set.of(privileges), true, null);
}
private static boolean hasNonIndexPrivileges(RoleDescriptor roleDescriptor) {
return roleDescriptor.hasApplicationPrivileges()
|| roleDescriptor.hasClusterPrivileges()
|| roleDescriptor.hasConfigurableClusterPrivileges()
|| roleDescriptor.hasRemoteIndicesPrivileges()
|| roleDescriptor.hasRemoteClusterPermissions()
|| roleDescriptor.hasRunAs()
|| roleDescriptor.hasWorkflowsRestriction();
}
private | ManageRolesPrivilege |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/SimulatorTest.java | {
"start": 1225,
"end": 2742
} | class ____ extends ContextTestSupport {
@Override
protected Registry createCamelRegistry() throws Exception {
Registry answer = super.createCamelRegistry();
answer.bind("foo", new MyBean("foo"));
answer.bind("bar", new MyBean("bar"));
return answer;
}
@Test
public void testReceivesFooResponse() throws Exception {
assertRespondsWith("foo", "Bye said foo");
}
@Test
public void testReceivesBarResponse() throws Exception {
assertRespondsWith("bar", "Bye said bar");
}
protected void assertRespondsWith(final String value, String containedText) throws InvalidPayloadException {
Exchange response = template.request("direct:a", new Processor() {
public void process(Exchange exchange) {
Message in = exchange.getIn();
in.setBody("answer");
in.setHeader("cheese", value);
}
});
assertNotNull(response, "Should receive a response!");
String text = response.getMessage().getMandatoryBody(String.class);
assertStringContains(text, containedText);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
// START SNIPPET: example
from("direct:a").recipientList(simple("bean:${in.header.cheese}"));
// END SNIPPET: example
}
};
}
public static | SimulatorTest |
java | apache__avro | lang/java/tools/src/test/java/org/apache/avro/tool/TestSpecificCompilerTool.java | {
"start": 1279,
"end": 13825
} | class ____ {
// where test input/expected output comes from
private static final File TEST_DIR = new File(System.getProperty("test.compile.schema.dir", "src/test/compiler"));
// where test input comes from
private static final File TEST_INPUT_DIR = new File(TEST_DIR, "input");
// where test expected output comes from
private static final File TEST_EXPECTED_OUTPUT_DIR = new File(TEST_DIR, "output");
private static final File TEST_EXPECTED_POSITION = new File(TEST_EXPECTED_OUTPUT_DIR, "Position.java");
private static final File TEST_EXPECTED_PLAYER = new File(TEST_EXPECTED_OUTPUT_DIR, "Player.java");
private static final File TEST_EXPECTED_NO_SETTERS = new File(TEST_EXPECTED_OUTPUT_DIR, "NoSettersTest.java");
private static final File TEST_EXPECTED_OPTIONAL_GETTERS_FOR_NULLABLE_FIELDS = new File(TEST_EXPECTED_OUTPUT_DIR,
"OptionalGettersNullableFieldsTest.java");
private static final File TEST_EXPECTED_OPTIONAL_GETTERS_FOR_ALL_FIELDS = new File(TEST_EXPECTED_OUTPUT_DIR,
"OptionalGettersAllFieldsTest.java");
private static final File TEST_EXPECTED_ADD_EXTRA_OPTIONAL_GETTERS = new File(TEST_EXPECTED_OUTPUT_DIR,
"AddExtraOptionalGettersTest.java");
private static final File TEST_EXPECTED_STRING_OUTPUT_DIR = new File(TEST_DIR, "output-string");
private static final File TEST_EXPECTED_STRING_POSITION = new File(TEST_EXPECTED_STRING_OUTPUT_DIR,
"avro/examples/baseball/Position.java");
private static final File TEST_EXPECTED_STRING_PLAYER = new File(TEST_EXPECTED_STRING_OUTPUT_DIR,
"avro/examples/baseball/Player.java");
private static final File TEST_EXPECTED_STRING_JET_BRAINS_NULL_SAFE_ANNOTATIONS_TEST = new File(
TEST_EXPECTED_STRING_OUTPUT_DIR, "avro/examples/baseball/JetBrainsNullSafeAnnotationsFieldsTest.java");
private static final File TEST_EXPECTED_STRING_JSPECIFY_NULL_SAFE_ANNOTATIONS_TEST = new File(
TEST_EXPECTED_STRING_OUTPUT_DIR, "avro/examples/baseball/JSpecifyNullSafeAnnotationsFieldsTest.java");
private static final File TEST_EXPECTED_STRING_FIELDTEST = new File(TEST_EXPECTED_STRING_OUTPUT_DIR,
"avro/examples/baseball/FieldTest.java");
private static final File TEST_EXPECTED_STRING_PROTO = new File(TEST_EXPECTED_STRING_OUTPUT_DIR,
"avro/examples/baseball/Proto.java");
// where test output goes
private static final File TEST_OUTPUT_DIR = new File("target/compiler/output");
private static final File TEST_OUTPUT_PLAYER = new File(TEST_OUTPUT_DIR, "avro/examples/baseball/Player.java");
private static final File TEST_OUTPUT_POSITION = new File(TEST_OUTPUT_DIR, "avro/examples/baseball/Position.java");
private static final File TEST_OUTPUT_NO_SETTERS = new File(TEST_OUTPUT_DIR,
"avro/examples/baseball/NoSettersTest.java");
private static final File TEST_OUTPUT_OPTIONAL_GETTERS_NULLABLE_FIELDS = new File(TEST_OUTPUT_DIR,
"avro/examples/baseball/OptionalGettersNullableFieldsTest.java");
private static final File TEST_OUTPUT_OPTIONAL_GETTERS_ALL_FIELDS = new File(TEST_OUTPUT_DIR,
"avro/examples/baseball/OptionalGettersAllFieldsTest.java");
private static final File TEST_OUTPUT_ADD_EXTRA_OPTIONAL_GETTERS = new File(TEST_OUTPUT_DIR,
"avro/examples/baseball/AddExtraOptionalGettersTest.java");
private static final File TEST_OUTPUT_STRING_DIR = new File("target/compiler/output-string");
private static final File TEST_OUTPUT_STRING_PLAYER = new File(TEST_OUTPUT_STRING_DIR,
"avro/examples/baseball/Player.java");
private static final File TEST_OUTPUT_STRING_POSITION = new File(TEST_OUTPUT_STRING_DIR,
"avro/examples/baseball/Position.java");
private static final File TEST_OUTPUT_STRING_FIELDTEST = new File(TEST_OUTPUT_STRING_DIR,
"avro/examples/baseball/FieldTest.java");
private static final File TEST_OUTPUT_STRING_JET_BRAINS_NULL_SAFE_ANNOTATIONS_TEST = new File(TEST_OUTPUT_STRING_DIR,
"avro/examples/baseball/JetBrainsNullSafeAnnotationsFieldsTest.java");
private static final File TEST_OUTPUT_STRING_JSPECIFY_NULL_SAFE_ANNOTATIONS_TEST = new File(TEST_OUTPUT_STRING_DIR,
"avro/examples/baseball/JSpecifyNullSafeAnnotationsFieldsTest.java");
private static final File TEST_OUTPUT_STRING_PROTO = new File(TEST_OUTPUT_STRING_DIR,
"avro/examples/baseball/Proto.java");
@BeforeEach
public void setUp() {
TEST_OUTPUT_DIR.delete();
}
@Test
void compileSchemaWithExcludedSetters() throws Exception {
TEST_OUTPUT_NO_SETTERS.delete();
doCompile(new String[] { "-encoding", "UTF-8", "-noSetters", "schema",
TEST_INPUT_DIR.toString() + "/nosetterstest.avsc", TEST_OUTPUT_DIR.getPath() });
assertFileMatch(TEST_EXPECTED_NO_SETTERS, TEST_OUTPUT_NO_SETTERS);
}
@Test
void compileSchemaWithOptionalGettersForNullableFieldsOnly() throws Exception {
TEST_OUTPUT_OPTIONAL_GETTERS_NULLABLE_FIELDS.delete();
doCompile(new String[] { "-encoding", "UTF-8", "-optionalGetters", "only_nullable_fields", "schema",
TEST_INPUT_DIR.toString() + "/optionalgettersnullablefieldstest.avsc", TEST_OUTPUT_DIR.getPath() });
assertFileMatch(TEST_EXPECTED_OPTIONAL_GETTERS_FOR_NULLABLE_FIELDS, TEST_OUTPUT_OPTIONAL_GETTERS_NULLABLE_FIELDS);
}
@Test
void compileSchemaWithJetBrainsNullSafeAnnotationsFields() throws Exception {
TEST_OUTPUT_STRING_JET_BRAINS_NULL_SAFE_ANNOTATIONS_TEST.delete();
doCompile(new String[] { "-encoding", "UTF-8", "-nullSafeAnnotations", "-string", "schema",
TEST_INPUT_DIR.toString() + "/jetbrainsnullsafeannotationsfieldstest.avsc", TEST_OUTPUT_STRING_DIR.getPath() });
assertFileMatch(TEST_EXPECTED_STRING_JET_BRAINS_NULL_SAFE_ANNOTATIONS_TEST,
TEST_OUTPUT_STRING_JET_BRAINS_NULL_SAFE_ANNOTATIONS_TEST);
}
@Test
void compileSchemaWithJSpecifyNullSafeAnnotationsFields() throws Exception {
TEST_OUTPUT_STRING_JSPECIFY_NULL_SAFE_ANNOTATIONS_TEST.delete();
doCompile(new String[] { "-encoding", "UTF-8", "-nullSafeAnnotations", "-nullSafeAnnotationNullable",
"org.jspecify.annotations.Nullable", "-nullSafeAnnotationNotNull", "org.jspecify.annotations.NonNull",
"-string", "schema", TEST_INPUT_DIR.toString() + "/jspecifynullsafeannotationsfieldstest.avsc",
TEST_OUTPUT_STRING_DIR.getPath() });
assertFileMatch(TEST_EXPECTED_STRING_JSPECIFY_NULL_SAFE_ANNOTATIONS_TEST,
TEST_OUTPUT_STRING_JSPECIFY_NULL_SAFE_ANNOTATIONS_TEST);
}
@Test
void compileSchemaWithOptionalGettersForAllFields() throws Exception {
TEST_OUTPUT_OPTIONAL_GETTERS_ALL_FIELDS.delete();
doCompile(new String[] { "-encoding", "UTF-8", "-optionalGetters", "all_fields", "schema",
TEST_INPUT_DIR.toString() + "/optionalgettersallfieldstest.avsc", TEST_OUTPUT_DIR.getPath() });
assertFileMatch(TEST_EXPECTED_OPTIONAL_GETTERS_FOR_ALL_FIELDS, TEST_OUTPUT_OPTIONAL_GETTERS_ALL_FIELDS);
}
@Test
void compileSchemaWithAddExtraOptionalGetters() throws Exception {
TEST_OUTPUT_ADD_EXTRA_OPTIONAL_GETTERS.delete();
doCompile(new String[] { "-encoding", "UTF-8", "-addExtraOptionalGetters", "schema",
TEST_INPUT_DIR.toString() + "/addextraoptionalgetterstest.avsc", TEST_OUTPUT_DIR.getPath() });
assertFileMatch(TEST_EXPECTED_ADD_EXTRA_OPTIONAL_GETTERS, TEST_OUTPUT_ADD_EXTRA_OPTIONAL_GETTERS);
}
@Test
void compileSchemaSingleFile() throws Exception {
doCompile(new String[] { "-encoding", "UTF-8", "schema", TEST_INPUT_DIR.toString() + "/position.avsc",
TEST_OUTPUT_DIR.getPath() });
assertFileMatch(TEST_EXPECTED_POSITION, TEST_OUTPUT_POSITION);
}
@Test
void compileSchemaTwoFiles() throws Exception {
doCompile(new String[] { "-encoding", "UTF-8", "schema", TEST_INPUT_DIR.toString() + "/position.avsc",
TEST_INPUT_DIR.toString() + "/player.avsc", TEST_OUTPUT_DIR.getPath() });
assertFileMatch(TEST_EXPECTED_POSITION, TEST_OUTPUT_POSITION);
assertFileMatch(TEST_EXPECTED_PLAYER, TEST_OUTPUT_PLAYER);
}
@Test
void compileSchemaFileAndDirectory() throws Exception {
doCompile(new String[] { "-encoding", "UTF-8", "schema", TEST_INPUT_DIR.toString() + "/position.avsc",
TEST_INPUT_DIR.toString(), TEST_OUTPUT_DIR.getPath() });
assertFileMatch(TEST_EXPECTED_POSITION, TEST_OUTPUT_POSITION);
assertFileMatch(TEST_EXPECTED_PLAYER, TEST_OUTPUT_PLAYER);
}
@Test
void compileSchemasUsingString() throws Exception {
doCompile(new String[] { "-encoding", "UTF-8", "-string", "schema", TEST_INPUT_DIR.toString() + "/position.avsc",
TEST_INPUT_DIR.toString() + "/player.avsc", TEST_OUTPUT_STRING_DIR.getPath() });
assertFileMatch(TEST_EXPECTED_STRING_POSITION, TEST_OUTPUT_STRING_POSITION);
assertFileMatch(TEST_EXPECTED_STRING_PLAYER, TEST_OUTPUT_STRING_PLAYER);
}
@Test
void compileSchemasWithVariousFieldTypes() throws Exception {
doCompile(new String[] { "-encoding", "UTF-8", "-string", "schema", TEST_INPUT_DIR.toString() + "/fieldtest.avsc",
TEST_INPUT_DIR.toString() + "/fieldtest.avsc", TEST_OUTPUT_STRING_DIR.getPath() });
assertFileMatch(TEST_EXPECTED_STRING_FIELDTEST, TEST_OUTPUT_STRING_FIELDTEST);
}
@Test
void orderingOfFlags() throws Exception {
// Order of Flags as per initial implementation
doCompile(new String[] { "-encoding", "UTF-8", "-string", "-bigDecimal", "schema",
TEST_INPUT_DIR.toString() + "/fieldtest.avsc", TEST_INPUT_DIR.toString() + "/fieldtest.avsc",
TEST_OUTPUT_STRING_DIR.getPath() });
assertFileMatch(TEST_EXPECTED_STRING_FIELDTEST, TEST_OUTPUT_STRING_FIELDTEST);
// Change order of encoding and string
doCompile(new String[] { "-string", "-encoding", "UTF-8", "-bigDecimal", "schema",
TEST_INPUT_DIR.toString() + "/fieldtest.avsc", TEST_INPUT_DIR.toString() + "/fieldtest.avsc",
TEST_OUTPUT_STRING_DIR.getPath() });
assertFileMatch(TEST_EXPECTED_STRING_FIELDTEST, TEST_OUTPUT_STRING_FIELDTEST);
// Change order of -string and -bigDecimal
doCompile(new String[] { "-bigDecimal", "-encoding", "UTF-8", "-string", "schema",
TEST_INPUT_DIR.toString() + "/fieldtest.avsc", TEST_INPUT_DIR.toString() + "/fieldtest.avsc",
TEST_OUTPUT_STRING_DIR.getPath() });
assertFileMatch(TEST_EXPECTED_STRING_FIELDTEST, TEST_OUTPUT_STRING_FIELDTEST);
// Keep encoding at the end
doCompile(new String[] { "-bigDecimal", "-string", "-encoding", "UTF-8", "schema",
TEST_INPUT_DIR.toString() + "/fieldtest.avsc", TEST_INPUT_DIR.toString() + "/fieldtest.avsc",
TEST_OUTPUT_STRING_DIR.getPath() });
assertFileMatch(TEST_EXPECTED_STRING_FIELDTEST, TEST_OUTPUT_STRING_FIELDTEST);
}
@Test
void compileProtocol() throws Exception {
doCompile(new String[] { "-encoding", "UTF-8", "protocol", TEST_INPUT_DIR.toString() + "/proto.avpr",
TEST_OUTPUT_STRING_DIR.getPath() });
assertFileMatch(TEST_EXPECTED_STRING_PROTO, TEST_OUTPUT_STRING_PROTO);
}
// Runs the actual compiler tool with the given input args
private void doCompile(String[] args) throws Exception {
SpecificCompilerTool tool = new SpecificCompilerTool();
tool.run(null, null, null, Arrays.asList((args)));
}
/**
* Verify that the generated Java files match the expected. This approach has
* room for improvement, since we're currently just verify that the text
* matches, which can be brittle if the code generation formatting or method
* ordering changes for example. A better approach would be to compile the
* sources and do a deeper comparison.
*
* See
* https://download.oracle.com/javase/6/docs/api/javax/tools/JavaCompiler.html
*/
private static void assertFileMatch(File expected, File found) throws IOException {
assertEquals(readFile(expected), readFile(found),
"Found file: " + found + " does not match expected file: " + expected);
}
/**
* Not the best implementation, but does the job. Building full strings of the
* file content and comparing provides nice diffs via JUnit when failures occur.
*/
private static String readFile(File file) throws IOException {
BufferedReader reader = new BufferedReader(
new InputStreamReader(new FileInputStream(file), StandardCharsets.UTF_8));
StringBuilder sb = new StringBuilder();
String line = null;
boolean first = true;
while ((line = reader.readLine()) != null) {
if (!first) {
sb.append("\n");
first = false;
}
sb.append(line);
}
reader.close();
return sb.toString();
}
}
| TestSpecificCompilerTool |
java | elastic__elasticsearch | x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestManager.java | {
"start": 744,
"end": 1336
} | interface ____ extends RateLimitable {
void execute(
InferenceInputs inferenceInputs,
RequestSender requestSender,
Supplier<Boolean> hasRequestCompletedFunction,
ActionListener<InferenceServiceResults> listener
);
// TODO For batching we'll add 2 new method: prepare(query, input, ...) which will allow the individual
// managers to implement their own batching
// executePreparedRequest() which will execute all prepared requests aka sends the batch
String inferenceEntityId();
// TODO: add service() and taskType()
}
| RequestManager |
java | mockito__mockito | mockito-integration-tests/inline-mocks-tests/src/test/java/org/mockitoinline/StaticRuleTest.java | {
"start": 421,
"end": 726
} | class ____ {
@Rule public MockitoRule mockitoRule = MockitoJUnit.rule();
@Mock private MockedStatic<UUID> mock;
@Test
public void runs() {
mock.when(UUID::randomUUID).thenReturn(new UUID(123, 456));
assertEquals(UUID.randomUUID(), new UUID(123, 456));
}
}
| StaticRuleTest |
java | apache__kafka | clients/src/test/java/org/apache/kafka/common/requests/AddPartitionsToTxnResponseTest.java | {
"start": 2104,
"end": 7565
} | class ____ {
protected final int throttleTimeMs = 10;
protected final String topicOne = "topic1";
protected final int partitionOne = 1;
protected final Errors errorOne = Errors.COORDINATOR_NOT_AVAILABLE;
protected final Errors errorTwo = Errors.NOT_COORDINATOR;
protected final String topicTwo = "topic2";
protected final int partitionTwo = 2;
protected final TopicPartition tp1 = new TopicPartition(topicOne, partitionOne);
protected final TopicPartition tp2 = new TopicPartition(topicTwo, partitionTwo);
protected Map<Errors, Integer> expectedErrorCounts;
protected Map<TopicPartition, Errors> errorsMap;
@BeforeEach
public void setUp() {
expectedErrorCounts = new EnumMap<>(Errors.class);
expectedErrorCounts.put(errorOne, 1);
expectedErrorCounts.put(errorTwo, 1);
errorsMap = new HashMap<>();
errorsMap.put(tp1, errorOne);
errorsMap.put(tp2, errorTwo);
}
@ParameterizedTest
@ApiKeyVersionsSource(apiKey = ApiKeys.ADD_PARTITIONS_TO_TXN)
public void testParse(short version) {
AddPartitionsToTxnTopicResultCollection topicCollection = new AddPartitionsToTxnTopicResultCollection();
AddPartitionsToTxnTopicResult topicResult = new AddPartitionsToTxnTopicResult();
topicResult.setName(topicOne);
topicResult.resultsByPartition().add(new AddPartitionsToTxnPartitionResult()
.setPartitionErrorCode(errorOne.code())
.setPartitionIndex(partitionOne));
topicResult.resultsByPartition().add(new AddPartitionsToTxnPartitionResult()
.setPartitionErrorCode(errorTwo.code())
.setPartitionIndex(partitionTwo));
topicCollection.add(topicResult);
if (version < 4) {
AddPartitionsToTxnResponseData data = new AddPartitionsToTxnResponseData()
.setResultsByTopicV3AndBelow(topicCollection)
.setThrottleTimeMs(throttleTimeMs);
AddPartitionsToTxnResponse response = new AddPartitionsToTxnResponse(data);
AddPartitionsToTxnResponse parsedResponse = AddPartitionsToTxnResponse.parse(response.serialize(version), version);
assertEquals(expectedErrorCounts, parsedResponse.errorCounts());
assertEquals(throttleTimeMs, parsedResponse.throttleTimeMs());
assertEquals(version >= 1, parsedResponse.shouldClientThrottle(version));
} else {
AddPartitionsToTxnResultCollection results = new AddPartitionsToTxnResultCollection();
results.add(new AddPartitionsToTxnResult().setTransactionalId("txn1").setTopicResults(topicCollection));
// Create another transaction with new name and errorOne for a single partition.
Map<TopicPartition, Errors> txnTwoExpectedErrors = Collections.singletonMap(tp2, errorOne);
results.add(AddPartitionsToTxnResponse.resultForTransaction("txn2", txnTwoExpectedErrors));
AddPartitionsToTxnResponseData data = new AddPartitionsToTxnResponseData()
.setResultsByTransaction(results)
.setThrottleTimeMs(throttleTimeMs);
AddPartitionsToTxnResponse response = new AddPartitionsToTxnResponse(data);
Map<Errors, Integer> newExpectedErrorCounts = new EnumMap<>(Errors.class);
newExpectedErrorCounts.put(Errors.NONE, 1); // top level error
newExpectedErrorCounts.put(errorOne, 2);
newExpectedErrorCounts.put(errorTwo, 1);
AddPartitionsToTxnResponse parsedResponse = AddPartitionsToTxnResponse.parse(response.serialize(version), version);
assertEquals(txnTwoExpectedErrors, errorsForTransaction(response.getTransactionTopicResults("txn2")));
assertEquals(newExpectedErrorCounts, parsedResponse.errorCounts());
assertEquals(throttleTimeMs, parsedResponse.throttleTimeMs());
assertTrue(parsedResponse.shouldClientThrottle(version));
}
}
@Test
public void testBatchedErrors() {
Map<TopicPartition, Errors> txn1Errors = Collections.singletonMap(tp1, errorOne);
Map<TopicPartition, Errors> txn2Errors = Collections.singletonMap(tp1, errorOne);
AddPartitionsToTxnResult transaction1 = AddPartitionsToTxnResponse.resultForTransaction("txn1", txn1Errors);
AddPartitionsToTxnResult transaction2 = AddPartitionsToTxnResponse.resultForTransaction("txn2", txn2Errors);
AddPartitionsToTxnResultCollection results = new AddPartitionsToTxnResultCollection();
results.add(transaction1);
results.add(transaction2);
AddPartitionsToTxnResponse response = new AddPartitionsToTxnResponse(new AddPartitionsToTxnResponseData().setResultsByTransaction(results));
assertEquals(txn1Errors, errorsForTransaction(response.getTransactionTopicResults("txn1")));
assertEquals(txn2Errors, errorsForTransaction(response.getTransactionTopicResults("txn2")));
Map<String, Map<TopicPartition, Errors>> expectedErrors = new HashMap<>();
expectedErrors.put("txn1", txn1Errors);
expectedErrors.put("txn2", txn2Errors);
assertEquals(expectedErrors, response.errors());
}
}
| AddPartitionsToTxnResponseTest |
java | hibernate__hibernate-orm | hibernate-envers/src/main/java/org/hibernate/envers/boot/model/Bindable.java | {
"start": 347,
"end": 397
} | class ____, never {@code null}
*/
T build();
}
| type |
java | apache__flink | flink-connectors/flink-connector-files/src/main/java/org/apache/flink/connector/file/src/enumerate/FileEnumerator.java | {
"start": 2093,
"end": 2177
} | interface ____ extends Serializable {
FileEnumerator create();
}
}
| Provider |
java | apache__flink | flink-table/flink-table-api-scala/src/main/java/org/apache/flink/table/api/typeutils/Tuple2CaseClassSerializerSnapshot.java | {
"start": 1562,
"end": 2135
} | class ____<T1, T2>
extends CompositeTypeSerializerSnapshot<
Tuple2<T1, T2>, ScalaCaseClassSerializer<Tuple2<T1, T2>>> {
private static final int VERSION = 2;
private Class<Tuple2<T1, T2>> type;
@SuppressWarnings("unused")
public Tuple2CaseClassSerializerSnapshot() {}
public Tuple2CaseClassSerializerSnapshot(
ScalaCaseClassSerializer<Tuple2<T1, T2>> serializerInstance) {
super(serializerInstance);
this.type = checkNotNull(serializerInstance.getTupleClass(), "tuple | Tuple2CaseClassSerializerSnapshot |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/web/configurers/AuthorizeHttpRequestsConfigurerTests.java | {
"start": 56507,
"end": 57032
} | class ____ {
@Bean
SecurityFilterChain filterChain(HttpSecurity http) throws Exception {
// @formatter:off
return http
.httpBasic(withDefaults())
.authorizeHttpRequests((authorize) -> authorize
.anyRequest().authenticated()
)
.build();
// @formatter:on
}
@Bean
UserDetailsService users() {
return new InMemoryUserDetailsManager(
User.withUsername("user").password("{noop}password").roles("USER").build());
}
}
@Configuration
@EnableWebSecurity
static | AuthenticatedConfig |
java | spring-projects__spring-security | webauthn/src/main/java/org/springframework/security/web/webauthn/api/CredentialPropertiesOutput.java | {
"start": 1755,
"end": 2399
} | class ____ implements Serializable {
@Serial
private static final long serialVersionUID = 4557406414847424019L;
private final boolean rk;
private ExtensionOutput(boolean rk) {
this.rk = rk;
}
/**
* This OPTIONAL property, known abstractly as the resident key credential
* property (i.e., client-side discoverable credential property), is a Boolean
* value indicating whether the PublicKeyCredential returned as a result of a
* registration ceremony is a client-side discoverable credential.
* @return is resident key credential property
*/
public boolean isRk() {
return this.rk;
}
}
}
| ExtensionOutput |
java | apache__dubbo | dubbo-remoting/dubbo-remoting-websocket/src/main/java/org/apache/dubbo/remoting/websocket/WebSocketHeaderNames.java | {
"start": 855,
"end": 1092
} | enum ____ {
WEBSOCKET_MESSAGE("websocket-message");
private final String name;
WebSocketHeaderNames(String name) {
this.name = name;
}
public String getName() {
return name;
}
}
| WebSocketHeaderNames |
java | elastic__elasticsearch | x-pack/plugin/security/cli/src/test/java/org/elasticsearch/xpack/security/cli/CertificateToolTests.java | {
"start": 4960,
"end": 57547
} | class ____ extends ESTestCase {
private FileSystem jimfs;
private static final String CN_OID = "2.5.4.3";
private Path initTempDir() throws Exception {
Configuration conf = Configuration.unix().toBuilder().setAttributeViews("posix").build();
jimfs = Jimfs.newFileSystem(conf);
Path tempDir = jimfs.getPath("temp");
IOUtils.rm(tempDir);
Files.createDirectories(tempDir);
return tempDir;
}
@BeforeClass
public static void checkFipsJvm() {
assumeFalse("Can't run in a FIPS JVM, depends on Non FIPS BouncyCastle", inFipsJvm());
}
@After
public void tearDown() throws Exception {
IOUtils.close(jimfs);
super.tearDown();
}
public void testOutputDirectory() throws Exception {
Path outputDir = createTempDir();
Path outputFile = outputDir.resolve("certs.zip");
MockTerminal terminal = MockTerminal.create();
// test with a user provided file
Path resolvedOutputFile = CertificateCommand.resolveOutputPath(terminal, outputFile.toString(), "something");
assertEquals(outputFile, resolvedOutputFile);
assertTrue(terminal.getOutput().isEmpty());
// test without a user provided file, with user input (prompted)
Path userPromptedOutputFile = outputDir.resolve("csr");
assertFalse(Files.exists(userPromptedOutputFile));
terminal.addTextInput(userPromptedOutputFile.toString());
resolvedOutputFile = CertificateCommand.resolveOutputPath(terminal, (String) null, "default.zip");
assertEquals(userPromptedOutputFile, resolvedOutputFile);
assertTrue(terminal.getOutput().isEmpty());
// test with empty user input
String defaultFilename = randomAlphaOfLengthBetween(1, 10);
Path expectedDefaultPath = resolvePath(defaultFilename);
terminal.addTextInput("");
resolvedOutputFile = CertificateCommand.resolveOutputPath(terminal, (String) null, defaultFilename);
assertEquals(expectedDefaultPath, resolvedOutputFile);
assertTrue(terminal.getOutput().isEmpty());
}
public void testPromptingForInstanceInformation() throws Exception {
final int numberOfInstances = scaledRandomIntBetween(1, 12);
Map<String, Map<String, String>> instanceInput = Maps.newMapWithExpectedSize(numberOfInstances);
for (int i = 0; i < numberOfInstances; i++) {
final String name;
while (true) {
String randomName = getValidRandomInstanceName();
if (instanceInput.containsKey(randomName) == false) {
name = randomName;
break;
}
}
Map<String, String> instanceInfo = new HashMap<>();
instanceInput.put(name, instanceInfo);
instanceInfo.put("ip", randomFrom("127.0.0.1", "::1", "192.168.1.1,::1", ""));
instanceInfo.put("dns", randomFrom("localhost", "localhost.localdomain", "localhost,myhost", ""));
logger.info("instance [{}] name [{}] [{}]", i, name, instanceInfo);
}
int count = 0;
MockTerminal terminal = MockTerminal.create();
for (Entry<String, Map<String, String>> entry : instanceInput.entrySet()) {
terminal.addTextInput(entry.getKey());
terminal.addTextInput("");
terminal.addTextInput(entry.getValue().get("ip"));
terminal.addTextInput(entry.getValue().get("dns"));
count++;
if (count == numberOfInstances) {
terminal.addTextInput("n");
} else {
terminal.addTextInput("y");
}
}
Collection<CertificateInformation> certInfos = CertificateCommand.readMultipleCertificateInformation(terminal);
logger.info("certificate tool output:\n{}", terminal.getOutput());
assertEquals(numberOfInstances, certInfos.size());
for (CertificateInformation certInfo : certInfos) {
String name = certInfo.name.originalName;
Map<String, String> instanceInfo = instanceInput.get(name);
assertNotNull("did not find map for " + name, instanceInfo);
List<String> expectedIps = Arrays.asList(Strings.commaDelimitedListToStringArray(instanceInfo.get("ip")));
List<String> expectedDns = Arrays.asList(Strings.commaDelimitedListToStringArray(instanceInfo.get("dns")));
assertEquals(expectedIps, certInfo.ipAddresses);
assertEquals(expectedDns, certInfo.dnsNames);
instanceInput.remove(name);
}
assertEquals(0, instanceInput.size());
final String output = terminal.getOutput();
assertTrue("Output: " + output, output.isEmpty());
}
public void testParsingFile() throws Exception {
Path tempDir = initTempDir();
Path instanceFile = writeInstancesTo(tempDir.resolve("instances.yml"));
Collection<CertificateInformation> certInfos = CertificateTool.parseFile(instanceFile);
assertEquals(4, certInfos.size());
Map<String, CertificateInformation> certInfosMap = certInfos.stream()
.collect(Collectors.toMap((c) -> c.name.originalName, Function.identity()));
CertificateInformation certInfo = certInfosMap.get("node1");
assertEquals(Collections.singletonList("127.0.0.1"), certInfo.ipAddresses);
assertEquals(Collections.singletonList("localhost"), certInfo.dnsNames);
assertEquals(Collections.emptyList(), certInfo.commonNames);
assertEquals("node1", certInfo.name.filename);
certInfo = certInfosMap.get("node2");
assertEquals(Collections.singletonList("::1"), certInfo.ipAddresses);
assertEquals(Collections.emptyList(), certInfo.dnsNames);
assertEquals(Collections.singletonList("node2.elasticsearch"), certInfo.commonNames);
assertEquals("node2", certInfo.name.filename);
certInfo = certInfosMap.get("node3");
assertEquals(Collections.emptyList(), certInfo.ipAddresses);
assertEquals(Collections.emptyList(), certInfo.dnsNames);
assertEquals(Collections.emptyList(), certInfo.commonNames);
assertEquals("node3", certInfo.name.filename);
certInfo = certInfosMap.get("CN=different value");
assertEquals(Collections.emptyList(), certInfo.ipAddresses);
assertEquals(Collections.singletonList("node4.mydomain.com"), certInfo.dnsNames);
assertEquals(Collections.emptyList(), certInfo.commonNames);
assertEquals("different file", certInfo.name.filename);
}
public void testParsingFileWithInvalidDetails() throws Exception {
Path tempDir = initTempDir();
Path instanceFile = writeInvalidInstanceInformation(tempDir.resolve("instances-invalid.yml"));
final MockTerminal terminal = MockTerminal.create();
final UserException exception = expectThrows(
UserException.class,
() -> CertificateTool.parseAndValidateFile(terminal, instanceFile)
);
assertThat(exception.getMessage(), containsString("invalid configuration"));
assertThat(exception.getMessage(), containsString(instanceFile.toString()));
assertThat(terminal.getErrorOutput(), containsString("THIS=not a,valid DN"));
assertThat(terminal.getErrorOutput(), containsString("could not be converted to a valid DN"));
}
public void testGeneratingCsrFromInstancesFile() throws Exception {
Path tempDir = initTempDir();
Path outputFile = tempDir.resolve("out.zip");
MockTerminal terminal = MockTerminal.create();
final List<String> args = new ArrayList<>();
Path instanceFile = writeInstancesTo(tempDir.resolve("instances.yml"));
Collection<CertificateInformation> certInfos = CertificateTool.parseFile(instanceFile);
assertEquals(4, certInfos.size());
assertFalse(Files.exists(outputFile));
int keySize = randomFrom(1024, 2048);
final boolean encrypt = randomBoolean();
final String password = encrypt ? randomAlphaOfLengthBetween(8, 12) : null;
if (encrypt) {
args.add("--pass");
if (randomBoolean()) {
args.add(password);
} else {
for (var ignore : certInfos) {
terminal.addSecretInput(password);
}
}
}
final CertificateTool.SigningRequestCommand command = new CertificateTool.SigningRequestCommand();
final OptionSet options = command.getParser().parse(Strings.toStringArray(args));
command.generateAndWriteCsrs(terminal, options, outputFile, keySize, certInfos);
assertTrue(Files.exists(outputFile));
Set<PosixFilePermission> perms = Files.getPosixFilePermissions(outputFile);
assertTrue(perms.toString(), perms.contains(PosixFilePermission.OWNER_READ));
assertTrue(perms.toString(), perms.contains(PosixFilePermission.OWNER_WRITE));
assertEquals(perms.toString(), 2, perms.size());
final Path zipRoot = getRootPathOfZip(outputFile);
assertFalse(Files.exists(zipRoot.resolve("ca")));
for (CertificateInformation certInfo : certInfos) {
String filename = certInfo.name.filename;
assertTrue(Files.exists(zipRoot.resolve(filename)));
final Path csr = zipRoot.resolve(filename + "/" + filename + ".csr");
assertTrue(Files.exists(csr));
PKCS10CertificationRequest request = readCertificateRequest(csr);
assertEquals(certInfo.name.x500Principal.getName(), request.getSubject().toString());
Attribute[] extensionsReq = request.getAttributes(PKCSObjectIdentifiers.pkcs_9_at_extensionRequest);
if (certInfo.ipAddresses.size() > 0 || certInfo.dnsNames.size() > 0) {
assertEquals(1, extensionsReq.length);
Extensions extensions = Extensions.getInstance(extensionsReq[0].getAttributeValues()[0]);
GeneralNames subjAltNames = GeneralNames.fromExtensions(extensions, Extension.subjectAlternativeName);
assertSubjAltNames(subjAltNames, certInfo);
} else {
assertEquals(0, extensionsReq.length);
}
final Path keyPath = zipRoot.resolve(filename + "/" + filename + ".key");
assertTrue(Files.exists(keyPath));
PEMKeyPair key = readPrivateKey(keyPath, password);
assertNotNull(key);
}
}
public void testGeneratingCsrFromCommandLineParameters() throws Exception {
Path tempDir = initTempDir();
Path outputFile = tempDir.resolve("out.zip");
MockTerminal terminal = MockTerminal.create();
final List<String> args = new ArrayList<>();
final int keySize = randomFrom(1024, 2048);
args.add("--keysize");
args.add(String.valueOf(keySize));
final String name = randomAlphaOfLengthBetween(4, 16);
args.add("--name");
args.add(name);
final List<String> dns = randomList(0, 4, () -> randomAlphaOfLengthBetween(4, 8) + "." + randomAlphaOfLengthBetween(2, 5));
dns.stream().map(s -> "--dns=" + s).forEach(args::add);
final List<String> ip = randomList(
0,
2,
() -> Stream.generate(() -> randomIntBetween(10, 250)).limit(4).map(String::valueOf).collect(Collectors.joining("."))
);
ip.stream().map(s -> "--ip=" + s).forEach(args::add);
final boolean encrypt = randomBoolean();
final String password = encrypt ? randomAlphaOfLengthBetween(8, 12) : null;
if (encrypt) {
args.add("--pass");
if (randomBoolean()) {
args.add(password);
} else {
terminal.addSecretInput(password);
}
}
final CertificateTool.SigningRequestCommand command = new CertificateTool.SigningRequestCommand();
final OptionSet options = command.getParser().parse(Strings.toStringArray(args));
command.generateAndWriteCsrs(terminal, options, outputFile);
assertTrue(Files.exists(outputFile));
Set<PosixFilePermission> perms = Files.getPosixFilePermissions(outputFile);
assertTrue(perms.toString(), perms.contains(PosixFilePermission.OWNER_READ));
assertTrue(perms.toString(), perms.contains(PosixFilePermission.OWNER_WRITE));
assertEquals(perms.toString(), 2, perms.size());
final Path zipRoot = getRootPathOfZip(outputFile);
assertFalse(Files.exists(zipRoot.resolve("ca")));
assertTrue(Files.exists(zipRoot.resolve(name)));
final Path csr = zipRoot.resolve(name + "/" + name + ".csr");
assertTrue(Files.exists(csr));
PKCS10CertificationRequest request = readCertificateRequest(csr);
assertEquals("CN=" + name, request.getSubject().toString());
Attribute[] extensionsReq = request.getAttributes(PKCSObjectIdentifiers.pkcs_9_at_extensionRequest);
if (dns.size() > 0 || ip.size() > 0) {
assertEquals(1, extensionsReq.length);
Extensions extensions = Extensions.getInstance(extensionsReq[0].getAttributeValues()[0]);
GeneralNames subjAltNames = GeneralNames.fromExtensions(extensions, Extension.subjectAlternativeName);
assertSubjAltNames(subjAltNames, ip, dns);
} else {
assertEquals(0, extensionsReq.length);
}
final Path keyPath = zipRoot.resolve(name + "/" + name + ".key");
assertTrue(Files.exists(keyPath));
PEMKeyPair key = readPrivateKey(keyPath, password);
assertNotNull(key);
}
public void testGeneratingSignedPemCertificates() throws Exception {
Path tempDir = initTempDir();
Path outputFile = tempDir.resolve("out.zip");
Path instanceFile = writeInstancesTo(tempDir.resolve("instances.yml"));
Collection<CertificateInformation> certInfos = CertificateTool.parseFile(instanceFile);
assertEquals(4, certInfos.size());
int keySize = randomFrom(1024, 2048);
int days = randomIntBetween(1, 1024);
KeyPair keyPair = CertGenUtils.generateKeyPair(keySize);
List<String> caKeyUsage = randomBoolean() ? null : CertificateTool.DEFAULT_CA_KEY_USAGE;
X509Certificate caCert = CertGenUtils.generateCACertificate(
new X500Principal("CN=test ca"),
keyPair,
days,
CertGenUtils.buildKeyUsage(caKeyUsage)
);
final boolean selfSigned = randomBoolean();
final String keyPassword = randomBoolean() ? SecuritySettingsSourceField.TEST_PASSWORD : null;
assertFalse(Files.exists(outputFile));
CAInfo caInfo = selfSigned
? null
: new CAInfo(caCert, keyPair.getPrivate(), false, keyPassword == null ? null : keyPassword.toCharArray());
final GenerateCertificateCommand command = new GenerateCertificateCommand();
List<String> args = CollectionUtils.arrayAsArrayList("-keysize", String.valueOf(keySize), "-days", String.valueOf(days), "-pem");
if (keyPassword != null) {
args.add("-pass");
args.add(keyPassword);
}
final OptionSet options = command.getParser().parse(Strings.toStringArray(args));
command.generateAndWriteSignedCertificates(outputFile, true, options, certInfos, caInfo, null);
assertTrue(Files.exists(outputFile));
Set<PosixFilePermission> perms = Files.getPosixFilePermissions(outputFile);
assertTrue(perms.toString(), perms.contains(PosixFilePermission.OWNER_READ));
assertTrue(perms.toString(), perms.contains(PosixFilePermission.OWNER_WRITE));
assertEquals(perms.toString(), 2, perms.size());
final Path zipRoot = getRootPathOfZip(outputFile);
assertFalse(Files.exists(zipRoot.resolve("ca")));
for (CertificateInformation certInfo : certInfos) {
String filename = certInfo.name.filename;
assertTrue(Files.exists(zipRoot.resolve(filename)));
final Path cert = zipRoot.resolve(filename + "/" + filename + ".crt");
assertTrue(Files.exists(cert));
Path keyFile = zipRoot.resolve(filename + "/" + filename + ".key");
assertTrue(Files.exists(keyFile));
if (keyPassword != null) {
assertTrue(Files.readString(keyFile).contains("DEK-Info: AES-128-CBC"));
} else {
assertFalse(Files.readString(keyFile).contains("DEK-Info:"));
}
final Path p12 = zipRoot.resolve(filename + "/" + filename + ".p12");
try (InputStream input = Files.newInputStream(cert)) {
X509Certificate certificate = readX509Certificate(input);
assertEquals(certInfo.name.x500Principal.toString(), certificate.getSubjectX500Principal().getName());
if (selfSigned) {
assertEquals(certificate.getSubjectX500Principal(), certificate.getIssuerX500Principal());
assertEquals(-1, certificate.getBasicConstraints());
}
final int sanCount = certInfo.ipAddresses.size() + certInfo.dnsNames.size() + certInfo.commonNames.size();
if (sanCount == 0) {
assertNull(certificate.getSubjectAlternativeNames());
} else {
X509CertificateHolder x509CertHolder = new X509CertificateHolder(certificate.getEncoded());
GeneralNames subjAltNames = GeneralNames.fromExtensions(
x509CertHolder.getExtensions(),
Extension.subjectAlternativeName
);
assertSubjAltNames(subjAltNames, certInfo);
}
assertThat(p12, not(pathExists()));
}
}
}
@SuppressWarnings("unchecked")
public void testErrorMessageOnInvalidKeepCaOption() {
final CertificateTool certificateTool = new CertificateTool();
final OptionSet optionSet = mock(OptionSet.class);
final ProcessInfo processInfo = new ProcessInfo(Map.of(), Map.of(), createTempDir());
when(optionSet.valuesOf(any(OptionSpec.class))).thenAnswer(invocation -> {
if (invocation.getArguments()[0] instanceof NonOptionArgumentSpec) {
return List.of("cert", "--keep-ca-key");
} else {
return List.of();
}
});
final UserException e = expectThrows(
UserException.class,
() -> certificateTool.execute(MockTerminal.create(), optionSet, processInfo)
);
assertThat(e.getMessage(), containsString("Generating certificates without providing a CA is no longer supported"));
}
public void testHandleLongPasswords() throws Exception {
final Path tempDir = initTempDir();
final MockTerminal terminal = MockTerminal.create();
Environment env = TestEnvironment.newEnvironment(Settings.builder().put("path.home", tempDir).build());
final Path caFile = tempDir.resolve("ca.p12");
final Path pemZipFile = tempDir.resolve("cert.zip").toAbsolutePath();
final String longPassword = randomAlphaOfLengthBetween(51, 256);
boolean expectPrompt = randomBoolean();
final CertificateAuthorityCommand caCommand = new PathAwareCertificateAuthorityCommand(caFile);
final OptionSet gen1Options = caCommand.getParser()
.parse("-ca-dn", "CN=Test-Ca", (expectPrompt ? "-pass" : "-pass=" + longPassword), "-out", caFile.toString());
final ProcessInfo processInfo = new ProcessInfo(Map.of(), Map.of(), createTempDir());
if (expectPrompt) {
terminal.addSecretInput(longPassword);
terminal.addTextInput("y"); // Yes, really use it
}
caCommand.execute(terminal, gen1Options, env, processInfo);
assertThat(terminal.getOutput(), containsString("50 characters"));
assertThat(terminal.getOutput(), containsString("OpenSSL"));
assertThat(terminal.getOutput(), containsString("1.1.0"));
terminal.reset();
final GenerateCertificateCommand genCommand = new PathAwareGenerateCertificateCommand(caFile, pemZipFile);
final OptionSet gen2Options = genCommand.getParser()
.parse(
"-ca",
"<ca>",
"-ca-pass",
longPassword,
(expectPrompt ? "-pass" : "-pass=" + longPassword),
"-out",
"<node2>",
"-name",
"cert",
"-pem"
);
if (expectPrompt) {
terminal.addSecretInput(longPassword);
terminal.addTextInput("n"); // No, don't really use it
terminal.addSecretInput(longPassword);
terminal.addTextInput("y"); // This time, yes we will use it
}
genCommand.execute(terminal, gen2Options, env, processInfo);
assertThat(terminal.getOutput(), containsString("50 characters"));
assertThat(terminal.getOutput(), containsString("OpenSSL"));
assertThat(terminal.getOutput(), containsString("1.1.0"));
assertThat(pemZipFile, pathExists());
final KeyStore caKeyStore = KeyStoreUtil.readKeyStore(caFile, "PKCS12", longPassword.toCharArray());
Certificate caCert = caKeyStore.getCertificate("ca");
assertThat(caCert, notNullValue());
final Path zipRoot = getRootPathOfZip(pemZipFile);
final Path keyPath = zipRoot.resolve("cert/cert.key");
final PrivateKey key = PemUtils.readPrivateKey(keyPath, () -> longPassword.toCharArray());
assertThat(key, notNullValue());
final Path certPath = zipRoot.resolve("cert/cert.crt");
final List<Certificate> certificates = PemUtils.readCertificates(List.of(certPath));
assertThat(certificates, hasSize(1));
assertThat(
((X509Certificate) certificates.get(0)).getIssuerX500Principal(),
equalTo(((X509Certificate) caCert).getSubjectX500Principal())
);
}
public void testGetCAInfo() throws Exception {
Environment env = TestEnvironment.newEnvironment(Settings.builder().put("path.home", createTempDir()).build());
Path testNodeCertPath = getDataPath("/org/elasticsearch/xpack/security/cli/testnode.crt");
Path testNodeKeyPath = getDataPath("/org/elasticsearch/xpack/security/cli/testnode.pem");
final boolean passwordPrompt = randomBoolean();
MockTerminal terminal = MockTerminal.create();
if (passwordPrompt) {
terminal.addSecretInput("testnode");
}
final int keySize = randomFrom(1024, 2048);
final int days = randomIntBetween(1, 1024);
String caPassword = passwordPrompt ? null : "testnode";
List<String> args = CollectionUtils.arrayAsArrayList(
"-keysize",
String.valueOf(keySize),
"-days",
String.valueOf(days),
"-pem",
"-ca-cert",
testNodeCertPath.toString(),
"-ca-key",
testNodeKeyPath.toString()
);
args.add("-ca-pass");
if (caPassword != null) {
args.add(caPassword);
}
final GenerateCertificateCommand command = new GenerateCertificateCommand();
OptionSet options = command.getParser().parse(Strings.toStringArray(args));
CAInfo caInfo = command.getCAInfo(terminal, options, env);
assertTrue(terminal.getOutput().isEmpty());
CertificateTool.CertificateAndKey caCK = caInfo.certAndKey;
assertEquals(caCK.cert.getSubjectX500Principal().getName(), "CN=Elasticsearch Test Node,OU=elasticsearch,O=org");
assertThat(caCK.key.getAlgorithm(), containsString("RSA"));
assertEquals(2048, ((RSAKey) caCK.key).getModulus().bitLength());
assertFalse(caInfo.generated);
long daysBetween = getDurationInDays(caCK.cert);
assertEquals(1460L, daysBetween);
// test generation
args = CollectionUtils.arrayAsArrayList(
"-keysize",
String.valueOf(keySize),
"-days",
String.valueOf(days),
"-pem",
"-ca-dn",
"CN=foo bar"
);
final boolean passwordProtected = randomBoolean();
if (passwordProtected) {
args.add("-ca-pass");
if (passwordPrompt) {
terminal.addSecretInput("testnode");
} else {
args.add(caPassword);
}
}
final OptionSet options2 = command.getParser().parse(Strings.toStringArray(args));
final UserException e = expectThrows(UserException.class, () -> command.getCAInfo(terminal, options2, env));
assertThat(e.getMessage(), containsString("Must specify either --ca or --ca-cert/--ca-key or --self-signed"));
// test self-signed
args = CollectionUtils.arrayAsArrayList("-self-signed");
options = command.getParser().parse(Strings.toStringArray(args));
assertNull(command.getCAInfo(terminal, options, env));
}
public void testNameValues() throws Exception {
// good name
Name name = Name.fromUserProvidedName("my instance", "my instance");
assertEquals("my instance", name.originalName);
assertNull(name.error);
assertEquals("CN=my instance", name.x500Principal.getName());
assertEquals("my instance", name.filename);
// null
name = Name.fromUserProvidedName(null, "");
assertEquals("", name.originalName);
assertThat(name.error, containsString("null"));
assertNull(name.x500Principal);
assertNull(name.filename);
// too long
String userProvidedName = randomAlphaOfLength(CertificateTool.MAX_FILENAME_LENGTH + 1);
name = Name.fromUserProvidedName(userProvidedName, userProvidedName);
assertEquals(userProvidedName, name.originalName);
assertThat(name.error, containsString("valid filename"));
// too short
name = Name.fromUserProvidedName("", "");
assertEquals("", name.originalName);
assertThat(name.error, containsString("valid filename"));
assertEquals("CN=", String.valueOf(name.x500Principal));
assertNull(name.filename);
// invalid characters only
userProvidedName = "<>|<>*|?\"\\";
name = Name.fromUserProvidedName(userProvidedName, userProvidedName);
assertEquals(userProvidedName, name.originalName);
assertThat(name.error, containsString("valid DN"));
assertNull(name.x500Principal);
assertNull(name.filename);
// invalid for file but DN ok
userProvidedName = "*";
name = Name.fromUserProvidedName(userProvidedName, userProvidedName);
assertEquals(userProvidedName, name.originalName);
assertThat(name.error, containsString("valid filename"));
assertEquals("CN=" + userProvidedName, name.x500Principal.getName());
assertNull(name.filename);
// invalid with valid chars for filename
userProvidedName = "*.mydomain.com";
name = Name.fromUserProvidedName(userProvidedName, userProvidedName);
assertEquals(userProvidedName, name.originalName);
assertThat(name.error, containsString("valid filename"));
assertEquals("CN=" + userProvidedName, name.x500Principal.getName());
// valid but could create hidden file/dir so it is not allowed
userProvidedName = ".mydomain.com";
name = Name.fromUserProvidedName(userProvidedName, userProvidedName);
assertEquals(userProvidedName, name.originalName);
assertThat(name.error, containsString("valid filename"));
assertEquals("CN=" + userProvidedName, name.x500Principal.getName());
}
/**
* A multi-stage test that:
* - Create a new CA
* - Uses that CA to create 2 node certificates
* - Creates a 3rd node certificate as a self-signed cert
* - Checks that the first 2 node certificates trust one another
* - Checks that the 3rd node certificate is _not_ trusted
* - Checks that all 3 certificates have the right values based on the command line options provided during generation
*/
public void testCreateCaAndMultipleInstances() throws Exception {
final Path tempDir = initTempDir();
final MockTerminal terminal = MockTerminal.create();
Environment env = TestEnvironment.newEnvironment(Settings.builder().put("path.home", tempDir).build());
final Path caFile = tempDir.resolve("ca.p12");
final Path node1File = tempDir.resolve("node1.p12").toAbsolutePath();
final Path node2File = tempDir.resolve("node2.p12").toAbsolutePath();
final Path node3File = tempDir.resolve("node3.p12").toAbsolutePath();
final int node1KeySize = randomIntBetween(2, 6) * 512;
final int node2KeySize = randomIntBetween(2, 6) * 512;
final int node3KeySize = randomIntBetween(1, 4) * 512;
final int days = randomIntBetween(7, 1500);
final String node1Password = randomFrom("", randomAlphaOfLengthBetween(4, 16));
final String node2Password = randomFrom("", randomAlphaOfLengthBetween(4, 16));
final String node3Password = randomFrom("", randomAlphaOfLengthBetween(4, 16));
final String node1Ip = "200.181." + randomIntBetween(1, 250) + "." + randomIntBetween(1, 250);
final String node2Ip = "200.182." + randomIntBetween(1, 250) + "." + randomIntBetween(1, 250);
final String node3Ip = "200.183." + randomIntBetween(1, 250) + "." + randomIntBetween(1, 250);
final String caPassword = generateCA(caFile, terminal, env, false);
final GenerateCertificateCommand gen1Command = new PathAwareGenerateCertificateCommand(caFile, node1File);
final OptionSet gen1Options = gen1Command.getParser()
.parse(
"-ca",
"<ca>",
"-ca-pass",
caPassword,
"-pass",
node1Password,
"-out",
"<node1>",
"-keysize",
String.valueOf(node1KeySize),
"-days",
String.valueOf(days),
"-dns",
"node01.cluster1.es.internal.corp.net",
"-ip",
node1Ip,
"-name",
"node01"
);
final ProcessInfo processInfo = new ProcessInfo(Map.of(), Map.of(), createTempDir());
gen1Command.execute(terminal, gen1Options, env, processInfo);
assertThat(node1File, pathExists());
final GenerateCertificateCommand gen2Command = new PathAwareGenerateCertificateCommand(caFile, node2File);
final OptionSet gen2Options = gen2Command.getParser()
.parse(
"-ca",
"<ca>",
"-ca-pass",
caPassword,
"-pass",
node2Password,
"-out",
"<node2>",
"-keysize",
String.valueOf(node2KeySize),
"-days",
String.valueOf(days),
"-dns",
"node02.cluster1.es.internal.corp.net",
"-ip",
node2Ip,
"-name",
"node02"
);
gen2Command.execute(terminal, gen2Options, env, processInfo);
assertThat(node2File, pathExists());
// Node 3 uses an auto generated CA or a self-signed cert, and therefore should not be trusted by the other nodes.
final List<String> gen3Args = CollectionUtils.arrayAsArrayList(
"-pass",
node3Password,
"-out",
"<node3>",
"-keysize",
String.valueOf(node3KeySize),
"-days",
String.valueOf(days),
"-dns",
"node03.cluster2.es.internal.corp.net",
"-ip",
node3Ip
);
gen3Args.add("-self-signed");
final GenerateCertificateCommand gen3Command = new PathAwareGenerateCertificateCommand(Map.of(), node3File);
final OptionSet gen3Options = gen3Command.getParser().parse(Strings.toStringArray(gen3Args));
gen3Command.execute(terminal, gen3Options, env, processInfo);
assertThat(node3File, pathExists());
final KeyStore node1KeyStore = KeyStoreUtil.readKeyStore(node1File, "PKCS12", node1Password.toCharArray());
final KeyStore node2KeyStore = KeyStoreUtil.readKeyStore(node2File, "PKCS12", node2Password.toCharArray());
final KeyStore node3KeyStore = KeyStoreUtil.readKeyStore(node3File, "PKCS12", node3Password.toCharArray());
checkTrust(node1KeyStore, node1Password.toCharArray(), node1KeyStore, true);
checkTrust(node1KeyStore, node1Password.toCharArray(), node2KeyStore, true);
checkTrust(node2KeyStore, node2Password.toCharArray(), node2KeyStore, true);
checkTrust(node2KeyStore, node2Password.toCharArray(), node1KeyStore, true);
checkTrust(node1KeyStore, node1Password.toCharArray(), node3KeyStore, false);
checkTrust(node3KeyStore, node3Password.toCharArray(), node2KeyStore, false);
checkTrust(node3KeyStore, node3Password.toCharArray(), node3KeyStore, true);
final Certificate node1Cert = node1KeyStore.getCertificate("node01");
assertThat(node1Cert, instanceOf(X509Certificate.class));
assertSubjAltNames(node1Cert, node1Ip, "node01.cluster1.es.internal.corp.net");
assertThat(getDurationInDays((X509Certificate) node1Cert), equalTo(days));
final Key node1Key = node1KeyStore.getKey("node01", node1Password.toCharArray());
assertThat(getKeySize(node1Key), equalTo(node1KeySize));
final Certificate node2Cert = node2KeyStore.getCertificate("node02");
assertThat(node2Cert, instanceOf(X509Certificate.class));
assertSubjAltNames(node2Cert, node2Ip, "node02.cluster1.es.internal.corp.net");
assertThat(getDurationInDays((X509Certificate) node2Cert), equalTo(days));
final Key node2Key = node2KeyStore.getKey("node02", node2Password.toCharArray());
assertThat(getKeySize(node2Key), equalTo(node2KeySize));
final Certificate node3Cert = node3KeyStore.getCertificate(CertificateTool.DEFAULT_CERT_NAME);
assertThat(node3Cert, instanceOf(X509Certificate.class));
assertSubjAltNames(node3Cert, node3Ip, "node03.cluster2.es.internal.corp.net");
assertThat(getDurationInDays((X509Certificate) node3Cert), equalTo(days));
final Key node3Key = node3KeyStore.getKey(CertificateTool.DEFAULT_CERT_NAME, node3Password.toCharArray());
assertThat(getKeySize(node3Key), equalTo(node3KeySize));
final Certificate[] certificateChain = node3KeyStore.getCertificateChain(CertificateTool.DEFAULT_CERT_NAME);
final X509Certificate node3x509Certificate = (X509Certificate) certificateChain[0];
assertEquals(1, certificateChain.length);
assertEquals(node3x509Certificate.getSubjectX500Principal(), node3x509Certificate.getIssuerX500Principal());
}
/**
* A multi-stage test that:
* - Creates a ZIP of a PKCS12 cert, with an auto-generated CA
* - Uses the generate CA to create a PEM certificate
* - Checks that the PKCS12 certificate and the PEM certificate trust one another
*/
public void testTrustBetweenPEMandPKCS12() throws Exception {
final Path tempDir = initTempDir();
final MockTerminal terminal = MockTerminal.create();
Environment env = TestEnvironment.newEnvironment(Settings.builder().put("path.home", tempDir).build());
final Path caFile = tempDir.resolve("ca.p12");
final String caPassword = generateCA(caFile, terminal, env, false);
final Path node1Pkcs12 = tempDir.resolve("node1.p12");
final Path pemZip = tempDir.resolve("pem.zip");
final int keySize = randomIntBetween(4, 8) * 512;
final int days = randomIntBetween(500, 1500);
final String node1Password = randomAlphaOfLengthBetween(4, 16);
final GenerateCertificateCommand gen1Command = new PathAwareGenerateCertificateCommand(caFile, node1Pkcs12);
final OptionSet gen1Options = gen1Command.getParser()
.parse(
"-ca",
"<ca>",
"-ca-pass",
caPassword,
"-out",
"<zip>",
"-keysize",
String.valueOf(keySize),
"-days",
String.valueOf(days),
"-dns",
"node01.cluster1.es.internal.corp.net",
"-name",
"node01"
);
final ProcessInfo processInfo = new ProcessInfo(Map.of(), Map.of(), createTempDir());
terminal.addSecretInput(node1Password);
gen1Command.execute(terminal, gen1Options, env, processInfo);
assertThat(node1Pkcs12, pathExists());
final GenerateCertificateCommand gen2Command = new PathAwareGenerateCertificateCommand(caFile, pemZip);
final OptionSet gen2Options = gen2Command.getParser()
.parse(
"-ca",
"<ca>",
"-out",
"<zip>",
"-keysize",
String.valueOf(keySize),
"-days",
String.valueOf(days),
"-dns",
"node02.cluster1.es.internal.corp.net",
"-name",
"node02",
"-pem"
);
terminal.addSecretInput(caPassword);
gen2Command.execute(terminal, gen2Options, env, processInfo);
assertThat(pemZip, pathExists());
final Path zip2Root = getRootPathOfZip(pemZip);
final Path ca2 = zip2Root.resolve("ca/ca.p12");
assertThat(ca2, not(pathExists()));
final Path node2Cert = zip2Root.resolve("node02/node02.crt");
assertThat(node2Cert, pathExists());
final Path node2Key = zip2Root.resolve("node02/node02.key");
assertThat(node2Key, pathExists());
final KeyStore node1KeyStore = KeyStoreUtil.readKeyStore(node1Pkcs12, "PKCS12", node1Password.toCharArray());
final KeyStore node1TrustStore = node1KeyStore;
final KeyStore node2KeyStore = CertParsingUtils.getKeyStoreFromPEM(node2Cert, node2Key, new char[0]);
final KeyStore node2TrustStore = KeyStoreUtil.readKeyStore(caFile, "PKCS12", caPassword.toCharArray());
checkTrust(node1KeyStore, node1Password.toCharArray(), node2TrustStore, true);
checkTrust(node2KeyStore, new char[0], node1TrustStore, true);
}
public void testZipOutputFromCommandLineOptions() throws Exception {
final Path tempDir = initTempDir();
final MockTerminal terminal = MockTerminal.create();
Environment env = TestEnvironment.newEnvironment(Settings.builder().put("path.home", tempDir).build());
final Path zip = tempDir.resolve("pem.zip");
final AtomicBoolean isZip = new AtomicBoolean(false);
final GenerateCertificateCommand genCommand = new PathAwareGenerateCertificateCommand(Map.of(), zip) {
@Override
void generateAndWriteSignedCertificates(
Path output,
boolean writeZipFile,
OptionSet options,
Collection<CertificateInformation> certs,
CAInfo caInfo,
Terminal terminal
) throws Exception {
isZip.set(writeZipFile);
// do nothing, all we care about is the "zip" flag
}
@Override
Collection<CertificateInformation> getCertificateInformationList(Terminal terminal, OptionSet options) throws Exception {
// Regardless of the commandline options, just work with a single cert
return Collections.singleton(
new CertificateInformation("node", "node", Collections.emptyList(), Collections.emptyList(), Collections.emptyList())
);
}
};
final String optionThatTriggersZip = randomFrom("-pem", "-multiple", "-in=input.yml");
final OptionSet genOptions = genCommand.getParser().parse("--self-signed", "-out", "<zip>", optionThatTriggersZip);
final ProcessInfo processInfo = new ProcessInfo(Map.of(), Map.of(), createTempDir());
genCommand.execute(terminal, genOptions, env, processInfo);
assertThat("For command line option " + optionThatTriggersZip, isZip.get(), equalTo(true));
}
public void testErrorIfSigningCertificateAndKeyDontMatch() throws Exception {
final Path tempDir = initTempDir();
final var terminal = MockTerminal.create();
final var env = TestEnvironment.newEnvironment(Settings.builder().put("path.home", tempDir).build());
final var processInfo = new ProcessInfo(Map.of(), Map.of(), createTempDir());
final Path ca1zip = tempDir.resolve("ca1.zip");
final String ca1Password = generateCA(ca1zip, terminal, env, true);
terminal.reset();
final Path ca2zip = tempDir.resolve("ca2.zip");
final String ca2Password = generateCA(ca2zip, terminal, env, true);
var ca1Root = getRootPathOfZip(ca1zip);
var ca1Cert = ca1Root.resolve("ca/ca.crt");
var ca1Key = ca1Root.resolve("ca/ca.key");
var ca2Root = getRootPathOfZip(ca2zip);
var ca2Key = ca2Root.resolve("ca/ca.key");
var p12Out = tempDir.resolve("certs.p12");
var p12Password = randomAlphaOfLength(8);
final var gen1Command = new PathAwareGenerateCertificateCommand(Map.of("ca-cert", ca1Cert, "ca-key", ca2Key), p12Out);
final var gen1Options = gen1Command.getParser()
.parse("--ca-cert", "<ca_crt>", "--ca-key", "<ca_key>", "--ca-pass", ca2Password, "--out", "<p12>", "--pass", p12Password);
final UserException e = expectThrows(UserException.class, () -> gen1Command.execute(terminal, gen1Options, env, processInfo));
assertThat(e.exitCode, is(ExitCodes.CONFIG));
assertThat(e.getMessage(), containsString("Certificate verification failed"));
assertThat(p12Out, not(pathExists()));
final var gen2Command = new PathAwareGenerateCertificateCommand(Map.of("ca-cert", ca1Cert, "ca-key", ca1Key), p12Out);
final var gen2Options = gen2Command.getParser()
.parse("--ca-cert", "<ca_crt>", "--ca-key", "<ca_key>", "--ca-pass", ca1Password, "--out", "<p12>", "--pass", p12Password);
gen2Command.execute(terminal, gen2Options, env, processInfo);
assertThat(p12Out, pathExists());
}
private int getKeySize(Key node1Key) {
assertThat(node1Key, instanceOf(RSAKey.class));
return ((RSAKey) node1Key).getModulus().bitLength();
}
private int getDurationInDays(X509Certificate cert) {
return (int) ChronoUnit.DAYS.between(cert.getNotBefore().toInstant(), cert.getNotAfter().toInstant());
}
/**
* Checks whether there are keys in {@code keyStore} that are trusted by {@code trustStore}.
*/
private void checkTrust(KeyStore keyStore, char[] keyPassword, KeyStore trustStore, boolean trust) throws Exception {
final X509ExtendedKeyManager keyManager = createKeyManager(keyStore, keyPassword, KeyManagerFactory.getDefaultAlgorithm());
final X509ExtendedTrustManager trustManager = createTrustManager(trustStore, TrustManagerFactory.getDefaultAlgorithm());
final X509Certificate[] node1CertificateIssuers = trustManager.getAcceptedIssuers();
final Principal[] trustedPrincipals = new Principal[node1CertificateIssuers.length];
for (int i = 0; i < node1CertificateIssuers.length; i++) {
trustedPrincipals[i] = node1CertificateIssuers[i].getIssuerX500Principal();
}
final String[] keyAliases = keyManager.getClientAliases("RSA", trustedPrincipals);
if (trust) {
assertThat(keyAliases, arrayWithSize(1));
trustManager.checkClientTrusted(keyManager.getCertificateChain(keyAliases[0]), "RSA");
} else {
assertThat(keyAliases, nullValue());
}
}
private PKCS10CertificationRequest readCertificateRequest(Path path) throws Exception {
try (Reader reader = Files.newBufferedReader(path); PEMParser pemParser = new PEMParser(reader)) {
Object object = pemParser.readObject();
assertThat(object, instanceOf(PKCS10CertificationRequest.class));
return (PKCS10CertificationRequest) object;
}
}
private PEMKeyPair readPrivateKey(Path path, String password) throws Exception {
try (Reader reader = Files.newBufferedReader(path); PEMParser pemParser = new PEMParser(reader)) {
Object object = pemParser.readObject();
if (password == null) {
assertThat(object, instanceOf(PEMKeyPair.class));
return (PEMKeyPair) object;
} else {
assertThat(object, instanceOf(PEMEncryptedKeyPair.class));
final PEMEncryptedKeyPair encryptedKeyPair = (PEMEncryptedKeyPair) object;
assertThat(encryptedKeyPair.getDekAlgName(), is("AES-128-CBC"));
return encryptedKeyPair.decryptKeyPair(new BcPEMDecryptorProvider(password.toCharArray()));
}
}
}
private X509Certificate readX509Certificate(InputStream input) throws Exception {
List<Certificate> list = CertParsingUtils.readCertificates(input);
assertEquals(1, list.size());
assertThat(list.get(0), instanceOf(X509Certificate.class));
return (X509Certificate) list.get(0);
}
private void assertSubjAltNames(Certificate certificate, String ip, String dns) throws Exception {
final X509CertificateHolder holder = new X509CertificateHolder(certificate.getEncoded());
final GeneralNames names = GeneralNames.fromExtensions(holder.getExtensions(), Extension.subjectAlternativeName);
assertSubjAltNames(names, Collections.singletonList(ip), Collections.singletonList(dns));
}
private void assertSubjAltNames(GeneralNames generalNames, List<String> ip, List<String> dns) throws Exception {
final CertificateInformation certInfo = new CertificateInformation("n", "n", ip, dns, Collections.emptyList());
assertSubjAltNames(generalNames, certInfo);
}
private void assertSubjAltNames(GeneralNames subjAltNames, CertificateInformation certInfo) throws Exception {
final int expectedCount = certInfo.ipAddresses.size() + certInfo.dnsNames.size() + certInfo.commonNames.size();
assertEquals(expectedCount, subjAltNames.getNames().length);
Collections.sort(certInfo.dnsNames);
Collections.sort(certInfo.ipAddresses);
for (GeneralName generalName : subjAltNames.getNames()) {
if (generalName.getTagNo() == GeneralName.dNSName) {
String dns = ((ASN1String) generalName.getName()).getString();
assertTrue(certInfo.dnsNames.stream().anyMatch(dns::equals));
} else if (generalName.getTagNo() == GeneralName.iPAddress) {
byte[] ipBytes = DEROctetString.getInstance(generalName.getName()).getOctets();
String ip = NetworkAddress.format(InetAddress.getByAddress(ipBytes));
assertTrue(certInfo.ipAddresses.stream().anyMatch(ip::equals));
} else if (generalName.getTagNo() == GeneralName.otherName) {
ASN1Sequence seq = ASN1Sequence.getInstance(generalName.getName());
assertThat(seq.size(), equalTo(2));
assertThat(seq.getObjectAt(0), instanceOf(ASN1ObjectIdentifier.class));
assertThat(seq.getObjectAt(0).toString(), equalTo(CN_OID));
assertThat(seq.getObjectAt(1), instanceOf(ASN1TaggedObject.class));
ASN1TaggedObject tagged = (ASN1TaggedObject) seq.getObjectAt(1);
assertThat(tagged.getBaseObject(), instanceOf(ASN1String.class));
assertThat(tagged.getBaseObject().toString(), is(in(certInfo.commonNames)));
} else {
fail("unknown general name with tag " + generalName.getTagNo());
}
}
}
/**
* Gets a random name that is valid for certificate generation. There are some cases where the random value could match one of the
* reserved names like ca, so this method allows us to avoid these issues.
*/
private String getValidRandomInstanceName() {
String name;
boolean valid;
do {
name = randomAlphaOfLengthBetween(1, 32);
valid = Name.fromUserProvidedName(name, name).error == null;
} while (valid == false);
return name;
}
/**
* Writes the description of instances to a given {@link Path}
*/
private Path writeInstancesTo(Path path) throws IOException {
String instances = """
instances:
- name: "node1"
ip:
- "127.0.0.1"
dns: "localhost"
- name: "node2"
filename: "node2"
ip: "::1"
cn:
- "node2.elasticsearch"
- name: "node3"
filename: "node3"
- name: "CN=different value"
filename: "different file"
dns:
- "node4.mydomain.com"
""";
return Files.writeString(path, instances);
}
/**
* Writes the description of instances to a given {@link Path}
*/
private Path writeInvalidInstanceInformation(Path path) throws IOException {
String instances = """
instances:
- name: "THIS=not a,valid DN"
ip: "127.0.0.1"
""";
return Files.writeString(path, instances);
}
@SuppressForbidden(reason = "resolve paths against CWD for a CLI tool")
private static Path resolvePath(String path) {
return PathUtils.get(path).toAbsolutePath();
}
private static Path getRootPathOfZip(Path pemZip) throws IOException, URISyntaxException {
FileSystem zipFS = FileSystems.newFileSystem(new URI("jar:" + pemZip.toUri()), Collections.emptyMap());
return zipFS.getPath("/");
}
private String generateCA(Path caFile, MockTerminal terminal, Environment env, boolean pem) throws Exception {
final int caKeySize = randomIntBetween(4, 8) * 512;
final int days = randomIntBetween(7, 1500);
final String caPassword = randomFrom("", randomAlphaOfLengthBetween(4, 80));
final String caKeyUsage = randomFrom("", Strings.collectionToCommaDelimitedString(CertificateTool.DEFAULT_CA_KEY_USAGE));
final CertificateAuthorityCommand caCommand = new PathAwareCertificateAuthorityCommand(caFile);
String[] args = {
"-ca-dn",
"CN=My ElasticSearch Cluster",
"-pass",
caPassword,
"-out",
caFile.toString(),
"-keysize",
String.valueOf(caKeySize),
"-days",
String.valueOf(days),
"-keyusage",
caKeyUsage };
if (pem) {
args = ArrayUtils.append(args, "--pem");
}
final OptionSet caOptions = caCommand.getParser().parse(args);
final ProcessInfo processInfo = new ProcessInfo(Map.of(), Map.of(), createTempDir());
caCommand.execute(terminal, caOptions, env, processInfo);
// Check output for OpenSSL compatibility version
if (caPassword.length() > 50) {
assertThat(terminal.getOutput(), containsString("OpenSSL"));
} else {
assertThat(terminal.getOutput(), not(containsString("OpenSSL")));
}
assertThat(caFile, pathExists());
return caPassword;
}
/**
* Converting jimfs Paths into strings and back to paths doesn't work with the security manager.
* This | CertificateToolTests |
java | google__error-prone | check_api/src/main/java/com/google/errorprone/util/ASTHelpers.java | {
"start": 46251,
"end": 56590
} | enum ____, in declaration order.
*/
public static LinkedHashSet<String> enumValues(TypeSymbol enumType) {
if (enumType.getKind() != ElementKind.ENUM) {
throw new IllegalStateException();
}
Scope scope = enumType.members();
Deque<String> values = new ArrayDeque<>();
for (Symbol sym : scope.getSymbols()) {
if (sym instanceof VarSymbol var) {
if ((var.flags() & Flags.ENUM) != 0) {
/*
* Javac gives us the members backwards, apparently. It's worth making an effort to
* preserve declaration order because it's useful for diagnostics (e.g. in {@link
* MissingCasesInEnumSwitch}).
*/
values.push(sym.name.toString());
}
}
}
return new LinkedHashSet<>(values);
}
/** Returns true if the given tree is a generated constructor. */
public static boolean isGeneratedConstructor(MethodTree tree) {
if (!(tree instanceof JCMethodDecl jCMethodDecl)) {
return false;
}
return (jCMethodDecl.mods.flags & Flags.GENERATEDCONSTR) == Flags.GENERATEDCONSTR;
}
/** Returns the list of all constructors defined in the class (including generated ones). */
public static List<MethodTree> getConstructors(ClassTree classTree) {
List<MethodTree> constructors = new ArrayList<>();
for (Tree member : classTree.getMembers()) {
if (member instanceof MethodTree methodTree) {
if (getSymbol(methodTree).isConstructor()) {
constructors.add(methodTree);
}
}
}
return constructors;
}
/**
* A wrapper for {@link Symbol#getEnclosedElements} to avoid binary compatibility issues for
* covariant overrides in subtypes of {@link Symbol}.
*/
public static List<Symbol> getEnclosedElements(Symbol symbol) {
return symbol.getEnclosedElements();
}
/** Returns the list of all constructors defined in the class. */
public static ImmutableList<MethodSymbol> getConstructors(ClassSymbol classSymbol) {
return getEnclosedElements(classSymbol).stream()
.filter(Symbol::isConstructor)
.map(e -> (MethodSymbol) e)
.collect(toImmutableList());
}
/**
* Returns the {@code Type} of the given tree, or {@code null} if the type could not be
* determined.
*/
public static @Nullable Type getType(@Nullable Tree tree) {
return tree instanceof JCTree jCTree ? jCTree.type : null;
}
/**
* Returns the {@code ClassType} for the given type {@code ClassTree} or {@code null} if the type
* could not be determined.
*/
public static @Nullable ClassType getType(@Nullable ClassTree tree) {
Type type = getType((Tree) tree);
return type instanceof ClassType classType ? classType : null;
}
public static @Nullable String getAnnotationName(AnnotationTree tree) {
Symbol sym = getSymbol(tree);
return sym == null ? null : sym.name.toString();
}
/** Returns the erasure of the given type tree, i.e. {@code List} for {@code List<Foo>}. */
public static Tree getErasedTypeTree(Tree tree) {
return tree.accept(
new SimpleTreeVisitor<Tree, Void>() {
@Override
public Tree visitIdentifier(IdentifierTree tree, Void unused) {
return tree;
}
@Override
public Tree visitParameterizedType(ParameterizedTypeTree tree, Void unused) {
return tree.getType();
}
},
null);
}
/** Return the enclosing {@code ClassSymbol} of the given symbol, or {@code null}. */
public static @Nullable ClassSymbol enclosingClass(Symbol sym) {
// sym.owner is null in the case of module symbols.
return sym.owner == null ? null : sym.owner.enclClass();
}
/**
* Return the enclosing {@code PackageSymbol} of the given symbol, or {@code null}.
*
* <p>Prefer this to {@link Symbol#packge}, which throws a {@link NullPointerException} for
* symbols that are not contained by a package: https://bugs.openjdk.java.net/browse/JDK-8231911
*/
public static @Nullable PackageSymbol enclosingPackage(Symbol sym) {
Symbol curr = sym;
while (curr != null) {
if (curr.getKind().equals(ElementKind.PACKAGE)) {
return (PackageSymbol) curr;
}
curr = curr.owner;
}
return null;
}
/** Return true if the given symbol is defined in the current package. */
public static boolean inSamePackage(Symbol targetSymbol, VisitorState state) {
JCCompilationUnit compilationUnit = (JCCompilationUnit) state.getPath().getCompilationUnit();
PackageSymbol usePackage = compilationUnit.packge;
PackageSymbol targetPackage = enclosingPackage(targetSymbol);
return targetPackage != null
&& usePackage != null
&& targetPackage.getQualifiedName().equals(usePackage.getQualifiedName());
}
/**
* Returns the {@link Nullness} for an expression as determined by the nullness dataflow analysis.
*/
public static Nullness getNullnessValue(
ExpressionTree expr, VisitorState state, NullnessAnalysis nullnessAnalysis) {
TreePath pathToExpr = new TreePath(state.getPath(), expr);
return nullnessAnalysis.getNullness(pathToExpr, state.context);
}
/** Returns the compile-time constant value of a tree if it has one, or {@code null}. */
public static @Nullable Object constValue(Tree tree) {
if (tree == null) {
return null;
}
tree = stripParentheses(tree);
Type type = ASTHelpers.getType(tree);
Object value;
if (tree instanceof JCLiteral jCLiteral) {
value = jCLiteral.value;
} else if (type != null) {
value = type.constValue();
} else {
return null;
}
if (type.hasTag(TypeTag.BOOLEAN) && value instanceof Integer integer) {
return integer == 1;
}
if (type.hasTag(TypeTag.CHAR) && value instanceof Integer) {
return (char) (int) value;
}
return value;
}
/** Returns the compile-time constant value of a tree if it is of type clazz, or {@code null}. */
public static <T> @Nullable T constValue(Tree tree, Class<? extends T> clazz) {
Object value = constValue(tree);
return clazz.isInstance(value) ? clazz.cast(value) : null;
}
/** Return true if the given type is 'void' or 'Void'. */
public static boolean isVoidType(Type type, VisitorState state) {
if (type == null) {
return false;
}
return type.getKind() == TypeKind.VOID
|| state.getTypes().isSameType(Suppliers.JAVA_LANG_VOID_TYPE.get(state), type);
}
private static final ImmutableSet<TypeTag> SUBTYPE_UNDEFINED =
Sets.immutableEnumSet(TypeTag.METHOD, TypeTag.PACKAGE, TypeTag.ERROR, TypeTag.FORALL);
/** Returns true if {@code erasure(s) <: erasure(t)}. */
public static boolean isSubtype(Type s, Type t, VisitorState state) {
if (s == null || t == null) {
return false;
}
if (SUBTYPE_UNDEFINED.contains(s.getTag())) {
return false;
}
if (t == state.getSymtab().unknownType) {
return false;
}
Types types = state.getTypes();
return types.isSubtype(types.erasure(s), types.erasure(t));
}
/**
* Returns true if {@code t} is a subtype of Throwable but not a subtype of RuntimeException or
* Error.
*/
public static boolean isCheckedExceptionType(Type t, VisitorState state) {
Symtab symtab = state.getSymtab();
return isSubtype(t, symtab.throwableType, state)
&& !isSubtype(t, symtab.runtimeExceptionType, state)
&& !isSubtype(t, symtab.errorType, state);
}
/** Returns true if {@code erasure(s)} is castable to {@code erasure(t)}. */
public static boolean isCastable(Type s, Type t, VisitorState state) {
if (s == null || t == null) {
return false;
}
Types types = state.getTypes();
return types.isCastable(types.erasure(s), types.erasure(t));
}
/** Returns true if {@code erasure(s) == erasure(t)}. */
public static boolean isSameType(Type s, Type t, VisitorState state) {
if (s == null || t == null) {
return false;
}
Types types = state.getTypes();
return types.isSameType(types.erasure(s), types.erasure(t));
}
/** Returns the modifiers tree of the given class, method, or variable declaration. */
public static @Nullable ModifiersTree getModifiers(Tree tree) {
if (tree instanceof ClassTree classTree) {
return classTree.getModifiers();
}
if (tree instanceof MethodTree methodTree) {
return methodTree.getModifiers();
}
if (tree instanceof VariableTree variableTree) {
return variableTree.getModifiers();
}
if (tree instanceof ModifiersTree modifiersTree) {
return modifiersTree;
}
return null;
}
/** Returns the annotations of the given tree, or an empty list. */
public static List<? extends AnnotationTree> getAnnotations(Tree tree) {
if (tree instanceof TypeParameterTree typeParameterTree) {
return typeParameterTree.getAnnotations();
}
if (tree instanceof ModuleTree moduleTree) {
return moduleTree.getAnnotations();
}
if (tree instanceof PackageTree packageTree) {
return packageTree.getAnnotations();
}
if (tree instanceof NewArrayTree newArrayTree) {
return newArrayTree.getAnnotations();
}
if (tree instanceof AnnotatedTypeTree annotatedTypeTree) {
return annotatedTypeTree.getAnnotations();
}
if (tree instanceof ModifiersTree modifiersTree) {
return modifiersTree.getAnnotations();
}
ModifiersTree modifiersTree = getModifiers(tree);
return modifiersTree == null ? ImmutableList.of() : modifiersTree.getAnnotations();
}
/**
* Returns the upper bound of a type if it has one, or the type itself if not. Correctly handles
* wildcards and capture variables.
*/
public static Type getUpperBound(Type type, Types types) {
if (type.hasTag(TypeTag.WILDCARD)) {
return types.wildUpperBound(type);
}
if (type.hasTag(TypeTag.TYPEVAR) && ((TypeVar) type).isCaptured()) {
return types.cvarUpperBound(type);
}
if (type.getUpperBound() != null) {
return type.getUpperBound();
}
// concrete type, e.g. java.lang.String, or a case we haven't considered
return type;
}
/**
* Returns true if the leaf node in the {@link TreePath} from {@code state} sits somewhere
* underneath a | type |
java | apache__logging-log4j2 | log4j-perf-test/src/main/java/org/apache/logging/log4j/perf/jmh/StackWalkBenchmark.java | {
"start": 3731,
"end": 4773
} | class ____ implements Function<Stream<StackWalker.StackFrame>, StackWalker.StackFrame> {
@Override
public StackWalker.StackFrame apply(final Stream<StackWalker.StackFrame> stackFrameStream) {
final String fqcn = FQCN.get();
boolean foundFqcn = false;
final Object[] frames = stackFrameStream.toArray();
for (int i = 0; i < frames.length; ++i) {
final String className = ((StackWalker.StackFrame) frames[i]).getClassName();
if (!foundFqcn) {
// Skip frames until we find the FQCN
foundFqcn = className.equals(fqcn);
} else if (!className.equals(fqcn)) {
// The frame is no longer equal to the FQCN so it is the one we want.
return (StackWalker.StackFrame) frames[i];
} // Otherwise it is equal to the FQCN so we need to skip it.
}
// Should never happen
return null;
}
}
}
| FqcnCallerLocator |
java | quarkusio__quarkus | integration-tests/oidc-token-propagation-reactive/src/test/java/io/quarkus/it/keycloak/TestSecurityLazyAuthTest.java | {
"start": 2030,
"end": 2306
} | interface ____ {
}
@Retention(RetentionPolicy.RUNTIME)
@Target({ ElementType.METHOD })
@TestSecurity(user = "userJwt", roles = "viewer")
@OidcSecurity(claims = {
@Claim(key = "email", value = "user@gmail.com")
})
public @ | TestAsUser1Tester |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/RenewDelegationTokenResponse.java | {
"start": 1164,
"end": 1633
} | class ____ {
@Private
@Unstable
public static RenewDelegationTokenResponse newInstance(long expTime) {
RenewDelegationTokenResponse response =
Records.newRecord(RenewDelegationTokenResponse.class);
response.setNextExpirationTime(expTime);
return response;
}
@Private
@Unstable
public abstract long getNextExpirationTime();
@Private
@Unstable
public abstract void setNextExpirationTime(long expTime);
}
| RenewDelegationTokenResponse |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/decorators/delegate/DelegateSubtypeTest.java | {
"start": 1238,
"end": 1508
} | class ____ implements A {
private final A a;
// The delegate type is B!
@Inject
public D(@Delegate B b) {
this.a = b;
}
@Override
public int ping() {
return a.ping() + 1;
}
}
}
| D |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/inlineme/SuggesterTest.java | {
"start": 29579,
"end": 30269
} | class ____ {
public static final Supplier<Integer> MAGIC = () -> 42;
@InlineMe(replacement = "Client.after(MAGIC.get())", imports = "com.google.frobber.Client")
@Deprecated
public static int before() {
return after(MAGIC.get());
}
public static int after(int value) {
return value;
}
}
""")
.doTest();
}
@Test
public void apisLikelyUsedReflectively() {
refactoringTestHelper
.addInputLines(
"Test.java",
"""
import com.google.errorprone.annotations.Keep;
import com.google.inject.Provides;
import java.time.Duration;
import java.util.Optional;
public | Client |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/env/AbstractExplicitPropertiesFileTests.java | {
"start": 1130,
"end": 1549
} | class ____ {
@Autowired
Environment env;
@Test
@DisplayName("verify properties are available in the Environment")
void verifyPropertiesAreAvailableInEnvironment() {
String userHomeKey = "user.home";
assertThat(env.getProperty(userHomeKey)).isEqualTo(System.getProperty(userHomeKey));
assertThat(env.getProperty("explicit")).isEqualTo("enigma");
}
@Configuration
static | AbstractExplicitPropertiesFileTests |
java | apache__maven | its/core-it-support/core-it-plugins/maven-it-plugin-configuration/src/main/java/org/apache/maven/plugin/coreit/Bean.java | {
"start": 940,
"end": 1452
} | class ____ {
String fieldParam;
String setterParam;
boolean setterCalled;
public void set(String value) {
fieldParam = value;
setterCalled = true;
}
public void setSetterParam(String value) {
setterParam = value;
setterCalled = true;
}
@Override
public String toString() {
return getClass().getName() + "[fieldParam=" + fieldParam + ", setterParam=" + setterParam + ", setterCalled="
+ setterCalled + "]";
}
}
| Bean |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/internal/operators/flowable/BlockingFlowableToIteratorTest.java | {
"start": 1237,
"end": 3287
} | class ____ extends RxJavaTest {
@Test
public void toIterator() {
Flowable<String> obs = Flowable.just("one", "two", "three");
Iterator<String> it = obs.blockingIterable().iterator();
assertTrue(it.hasNext());
assertEquals("one", it.next());
assertTrue(it.hasNext());
assertEquals("two", it.next());
assertTrue(it.hasNext());
assertEquals("three", it.next());
assertFalse(it.hasNext());
}
@Test(expected = TestException.class)
public void toIteratorWithException() {
Flowable<String> obs = Flowable.unsafeCreate(new Publisher<String>() {
@Override
public void subscribe(Subscriber<? super String> subscriber) {
subscriber.onSubscribe(new BooleanSubscription());
subscriber.onNext("one");
subscriber.onError(new TestException());
}
});
Iterator<String> it = obs.blockingIterable().iterator();
assertTrue(it.hasNext());
assertEquals("one", it.next());
assertTrue(it.hasNext());
it.next();
}
@Test
public void iteratorExertBackpressure() {
final Counter src = new Counter();
Flowable<Integer> obs = Flowable.fromIterable(new Iterable<Integer>() {
@Override
public Iterator<Integer> iterator() {
return src;
}
});
Iterator<Integer> it = obs.blockingIterable().iterator();
while (it.hasNext()) {
// Correct backpressure should cause this interleaved behavior.
// We first request RxRingBuffer.SIZE. Then in increments of
// SubscriberIterator.LIMIT.
int i = it.next();
int expected = i - (i % (Flowable.bufferSize() - (Flowable.bufferSize() >> 2))) + Flowable.bufferSize();
expected = Math.min(expected, Counter.MAX);
assertEquals(expected, src.count);
}
}
public static final | BlockingFlowableToIteratorTest |
java | elastic__elasticsearch | x-pack/plugin/rank-rrf/src/test/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderTests.java | {
"start": 1559,
"end": 27474
} | class ____ extends AbstractRetrieverBuilderTests<RRFRetrieverBuilder> {
/** Tests extraction errors related to compound retrievers. These tests require a compound retriever which is why they are here. */
public void testRetrieverExtractionErrors() throws IOException {
try (
XContentParser parser = createParser(
JsonXContent.jsonXContent,
"{\"retriever\":{\"rrf_nl\":{\"retrievers\":"
+ "[{\"standard\":{\"search_after\":[1]}},{\"standard\":{\"search_after\":[2]}}]}}}"
)
) {
SearchSourceBuilder ssb = new SearchSourceBuilder();
IllegalArgumentException iae = expectThrows(
IllegalArgumentException.class,
() -> ssb.parseXContent(parser, true, nf -> true)
.rewrite(
new QueryRewriteContext(
parserConfig(),
null,
null,
null,
null,
null,
new PointInTimeBuilder(new BytesArray("pitid")),
null,
null
)
)
);
assertEquals("[search_after] cannot be used in children of compound retrievers", iae.getMessage());
}
try (
XContentParser parser = createParser(
JsonXContent.jsonXContent,
"{\"retriever\":{\"rrf_nl\":{\"retrievers\":"
+ "[{\"standard\":{\"terminate_after\":1}},{\"standard\":{\"terminate_after\":2}}]}}}"
)
) {
SearchSourceBuilder ssb = new SearchSourceBuilder();
IllegalArgumentException iae = expectThrows(
IllegalArgumentException.class,
() -> ssb.parseXContent(parser, true, nf -> true)
.rewrite(
new QueryRewriteContext(
parserConfig(),
null,
null,
null,
null,
null,
new PointInTimeBuilder(new BytesArray("pitid")),
null,
null
)
)
);
assertEquals("[terminate_after] cannot be used in children of compound retrievers", iae.getMessage());
}
}
public void testRRFRetrieverParsingSyntax() throws IOException {
BiConsumer<String, float[]> testCase = (json, expectedWeights) -> {
try (XContentParser parser = createParser(JsonXContent.jsonXContent, json)) {
SearchSourceBuilder ssb = new SearchSourceBuilder().parseXContent(parser, true, nf -> true);
assertThat(ssb.retriever(), instanceOf(RRFRetrieverBuilder.class));
RRFRetrieverBuilder rrf = (RRFRetrieverBuilder) ssb.retriever();
assertArrayEquals(expectedWeights, rrf.weights(), 0.001f);
} catch (IOException e) {
throw new RuntimeException(e);
}
};
String legacyJson = """
{
"retriever": {
"rrf_nl": {
"retrievers": [
{ "standard": { "query": { "match_all": {} } } },
{ "standard": { "query": { "match_all": {} } } }
]
}
}
}
""";
testCase.accept(legacyJson, new float[] { 1.0f, 1.0f });
String weightedJson = """
{
"retriever": {
"rrf_nl": {
"retrievers": [
{ "retriever": { "standard": { "query": { "match_all": {} } } }, "weight": 2.5 },
{ "retriever": { "standard": { "query": { "match_all": {} } } }, "weight": 0.5 }
]
}
}
}
""";
testCase.accept(weightedJson, new float[] { 2.5f, 0.5f });
String mixedJson = """
{
"retriever": {
"rrf_nl": {
"retrievers": [
{ "standard": { "query": { "match_all": {} } } },
{ "retriever": { "standard": { "query": { "match_all": {} } } }, "weight": 0.6 }
]
}
}
}
""";
testCase.accept(mixedJson, new float[] { 1.0f, 0.6f });
}
public void testMultiFieldsParamsRewrite() {
final String indexName = "test-index";
final List<String> testInferenceFields = List.of("semantic_field_1", "semantic_field_2");
final ResolvedIndices resolvedIndices = createMockResolvedIndices(Map.of(indexName, testInferenceFields), null, Map.of());
final QueryRewriteContext queryRewriteContext = new QueryRewriteContext(
parserConfig(),
null,
null,
TransportVersion.current(),
RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY,
resolvedIndices,
new PointInTimeBuilder(new BytesArray("pitid")),
null,
null
);
// No wildcards, no per-field boosting
RRFRetrieverBuilder rrfRetrieverBuilder = new RRFRetrieverBuilder(
null,
List.of("field_1", "field_2", "semantic_field_1", "semantic_field_2"),
"foo",
DEFAULT_RANK_WINDOW_SIZE,
RRFRetrieverBuilder.DEFAULT_RANK_CONSTANT,
new float[0]
);
assertMultiFieldsParamsRewrite(
rrfRetrieverBuilder,
queryRewriteContext,
Map.of("field_1", 1.0f, "field_2", 1.0f),
Map.of("semantic_field_1", 1.0f, "semantic_field_2", 1.0f),
"foo"
);
// Non-default rank window size and rank constant
rrfRetrieverBuilder = new RRFRetrieverBuilder(
null,
List.of("field_1", "field_2", "semantic_field_1", "semantic_field_2"),
"foo2",
DEFAULT_RANK_WINDOW_SIZE * 2,
RRFRetrieverBuilder.DEFAULT_RANK_CONSTANT / 2,
new float[0]
);
assertMultiFieldsParamsRewrite(
rrfRetrieverBuilder,
queryRewriteContext,
Map.of("field_1", 1.0f, "field_2", 1.0f),
Map.of("semantic_field_1", 1.0f, "semantic_field_2", 1.0f),
"foo2"
);
// No wildcards, per-field boosting
rrfRetrieverBuilder = new RRFRetrieverBuilder(
null,
List.of("field_1", "field_2^1.5", "semantic_field_1", "semantic_field_2^2"),
"bar",
DEFAULT_RANK_WINDOW_SIZE,
RRFRetrieverBuilder.DEFAULT_RANK_CONSTANT,
new float[0]
);
assertMultiFieldsParamsRewrite(
rrfRetrieverBuilder,
queryRewriteContext,
Map.of("field_1", 1.0f, "field_2", 1.5f),
Map.of("semantic_field_1", 1.0f, "semantic_field_2", 2.0f),
"bar"
);
// Zero weights
rrfRetrieverBuilder = new RRFRetrieverBuilder(
null,
List.of("field_1^0", "field_2^1.0"),
"zero_test",
DEFAULT_RANK_WINDOW_SIZE,
RRFRetrieverBuilder.DEFAULT_RANK_CONSTANT,
new float[0]
);
assertMultiFieldsParamsRewrite(
rrfRetrieverBuilder,
queryRewriteContext,
Map.of("field_1", 0.0f, "field_2", 1.0f),
Map.of(),
"zero_test"
);
// Glob matching on inference and non-inference fields with per-field boosting
rrfRetrieverBuilder = new RRFRetrieverBuilder(
null,
List.of("field_*^1.5", "*_field_1^2.5"),
"baz",
DEFAULT_RANK_WINDOW_SIZE,
RRFRetrieverBuilder.DEFAULT_RANK_CONSTANT,
new float[0]
);
assertMultiFieldsParamsRewrite(
rrfRetrieverBuilder,
queryRewriteContext,
Map.of("field_*", 1.5f, "*_field_1", 2.5f),
Map.of("semantic_field_1", 2.5f),
"baz"
);
// Multiple boosts defined on the same field
rrfRetrieverBuilder = new RRFRetrieverBuilder(
null,
List.of("field_*^1.5", "field_1^3.0", "*_field_1^2.5", "semantic_*^1.5"),
"baz2",
DEFAULT_RANK_WINDOW_SIZE,
RRFRetrieverBuilder.DEFAULT_RANK_CONSTANT,
new float[0]
);
assertMultiFieldsParamsRewrite(
rrfRetrieverBuilder,
queryRewriteContext,
Map.of("field_*", 1.5f, "field_1", 3.0f, "*_field_1", 2.5f, "semantic_*", 1.5f),
Map.of("semantic_field_1", 3.75f, "semantic_field_2", 1.5f),
"baz2"
);
// All-fields wildcard
rrfRetrieverBuilder = new RRFRetrieverBuilder(
null,
List.of("*"),
"qux",
DEFAULT_RANK_WINDOW_SIZE,
RRFRetrieverBuilder.DEFAULT_RANK_CONSTANT,
new float[0]
);
assertMultiFieldsParamsRewrite(
rrfRetrieverBuilder,
queryRewriteContext,
Map.of("*", 1.0f),
Map.of("semantic_field_1", 1.0f, "semantic_field_2", 1.0f),
"qux"
);
}
public void testMultiIndexMultiFieldsParamsRewrite() {
String indexName = "test-index";
String anotherIndexName = "test-another-index";
final ResolvedIndices resolvedIndices = createMockResolvedIndices(
Map.of(
indexName,
List.of("semantic_field_1", "semantic_field_2"),
anotherIndexName,
List.of("semantic_field_2", "semantic_field_3")
),
null,
Map.of() // use random and different inference IDs for semantic_text fields
);
final QueryRewriteContext queryRewriteContext = new QueryRewriteContext(
parserConfig(),
null,
null,
TransportVersion.current(),
RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY,
resolvedIndices,
new PointInTimeBuilder(new BytesArray("pitid")),
null,
null
);
// No wildcards, no per-field boosting
RRFRetrieverBuilder retriever = new RRFRetrieverBuilder(
null,
List.of("field_1", "field_2", "semantic_field_1", "semantic_field_2"),
"foo",
DEFAULT_RANK_WINDOW_SIZE,
RRFRetrieverBuilder.DEFAULT_RANK_CONSTANT,
new float[0]
);
assertMultiIndexMultiFieldsParamsRewrite(
retriever,
queryRewriteContext,
Map.of(
Map.of("field_1", 1.0f, "field_2", 1.0f),
List.of(indexName),
Map.of("field_1", 1.0f, "field_2", 1.0f, "semantic_field_1", 1.0f),
List.of(anotherIndexName)
),
Map.of(
new Tuple<>("semantic_field_1", List.of(indexName)),
1.0f,
new Tuple<>("semantic_field_2", List.of(indexName)), // field with different inference IDs, we filter on index name
1.0f,
new Tuple<>("semantic_field_2", List.of(anotherIndexName)),
1.0f
),
"foo",
null
);
// Non-default rank window size and non-default rank_constant
retriever = new RRFRetrieverBuilder(
null,
List.of("field_1", "field_2", "semantic_field_1", "semantic_field_2"),
"foo2",
DEFAULT_RANK_WINDOW_SIZE * 2,
RRFRetrieverBuilder.DEFAULT_RANK_CONSTANT * 2,
new float[0]
);
assertMultiIndexMultiFieldsParamsRewrite(
retriever,
queryRewriteContext,
Map.of(
Map.of("field_1", 1.0f, "field_2", 1.0f),
List.of(indexName),
Map.of("field_1", 1.0f, "field_2", 1.0f, "semantic_field_1", 1.0f),
List.of(anotherIndexName)
),
Map.of(
new Tuple<>("semantic_field_1", List.of(indexName)),
1.0f,
new Tuple<>("semantic_field_2", List.of(indexName)),
1.0f,
new Tuple<>("semantic_field_2", List.of(anotherIndexName)),
1.0f
),
"foo2",
null
);
// No wildcards, per-field boosting
retriever = new RRFRetrieverBuilder(
null,
List.of("field_1", "field_2^1.5", "semantic_field_1", "semantic_field_2^2"),
"bar",
DEFAULT_RANK_WINDOW_SIZE,
RRFRetrieverBuilder.DEFAULT_RANK_CONSTANT,
new float[0]
);
assertMultiIndexMultiFieldsParamsRewrite(
retriever,
queryRewriteContext,
Map.of(
Map.of("field_1", 1.0f, "field_2", 1.5f),
List.of(indexName),
Map.of("field_1", 1.0f, "field_2", 1.5f, "semantic_field_1", 1.0f),
List.of(anotherIndexName)
),
Map.of(
new Tuple<>("semantic_field_1", List.of(indexName)),
1.0f,
new Tuple<>("semantic_field_2", List.of(indexName)),
2.0f,
new Tuple<>("semantic_field_2", List.of(anotherIndexName)),
2.0f
),
"bar",
null
);
// Glob matching on inference and non-inference fields with per-field boosting
retriever = new RRFRetrieverBuilder(
null,
List.of("field_*^1.5", "*_field_1^2.5"),
"baz",
DEFAULT_RANK_WINDOW_SIZE,
RRFRetrieverBuilder.DEFAULT_RANK_CONSTANT,
new float[0]
);
assertMultiIndexMultiFieldsParamsRewrite(
retriever,
queryRewriteContext,
Map.of(Map.of("field_*", 1.5f, "*_field_1", 2.5f), List.of()),
Map.of(new Tuple<>("semantic_field_1", List.of(indexName)), 2.5f),
"baz",
null
);
// Multiple boosts defined on the same field
retriever = new RRFRetrieverBuilder(
null,
List.of("field_*^1.5", "field_1^3.0", "*_field_1^2.5", "semantic_*^1.5"),
"baz2",
DEFAULT_RANK_WINDOW_SIZE,
RRFRetrieverBuilder.DEFAULT_RANK_CONSTANT,
new float[0]
);
assertMultiIndexMultiFieldsParamsRewrite(
retriever,
queryRewriteContext,
Map.of(Map.of("field_*", 1.5f, "field_1", 3.0f, "*_field_1", 2.5f, "semantic_*", 1.5f), List.of()),
Map.of(
new Tuple<>("semantic_field_1", List.of(indexName)),
3.75f,
new Tuple<>("semantic_field_2", List.of(indexName)),
1.5f,
new Tuple<>("semantic_field_2", List.of(anotherIndexName)),
1.5f,
new Tuple<>("semantic_field_3", List.of(anotherIndexName)),
1.5f
),
"baz2",
null
);
// All-fields wildcard
retriever = new RRFRetrieverBuilder(
null,
List.of("*"),
"qux",
DEFAULT_RANK_WINDOW_SIZE,
RRFRetrieverBuilder.DEFAULT_RANK_CONSTANT,
new float[0]
);
assertMultiIndexMultiFieldsParamsRewrite(
retriever,
queryRewriteContext,
Map.of(Map.of("*", 1.0f), List.of()), // no index filter for the lexical retriever
Map.of(
new Tuple<>("semantic_field_1", List.of(indexName)),
1.0f,
new Tuple<>("semantic_field_2", List.of(indexName)),
1.0f,
new Tuple<>("semantic_field_2", List.of(anotherIndexName)),
1.0f,
new Tuple<>("semantic_field_3", List.of(anotherIndexName)),
1.0f
),
"qux",
null
);
}
public void testMultiIndexMultiFieldsParamsRewriteWithSameInferenceIds() {
String indexName = "test-index";
String anotherIndexName = "test-another-index";
final ResolvedIndices resolvedIndices = createMockResolvedIndices(
Map.of(
indexName,
List.of("semantic_field_1", "semantic_field_2"),
anotherIndexName,
List.of("semantic_field_2", "semantic_field_3")
),
null,
Map.of("semantic_field_2", "common_inference_id") // use the same inference ID for semantic_field_2
);
final QueryRewriteContext queryRewriteContext = new QueryRewriteContext(
parserConfig(),
null,
null,
TransportVersion.current(),
RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY,
resolvedIndices,
new PointInTimeBuilder(new BytesArray("pitid")),
null,
null
);
// No wildcards, no per-field boosting
RRFRetrieverBuilder retriever = new RRFRetrieverBuilder(
null,
List.of("field_1", "field_2", "semantic_field_1", "semantic_field_2"),
"foo",
DEFAULT_RANK_WINDOW_SIZE,
RRFRetrieverBuilder.DEFAULT_RANK_CONSTANT,
new float[0]
);
assertMultiIndexMultiFieldsParamsRewrite(
retriever,
queryRewriteContext,
Map.of(
Map.of("field_1", 1.0f, "field_2", 1.0f),
List.of(indexName),
Map.of("field_1", 1.0f, "field_2", 1.0f, "semantic_field_1", 1.0f),
List.of(anotherIndexName)
),
Map.of(new Tuple<>("semantic_field_1", List.of(indexName)), 1.0f, new Tuple<>("semantic_field_2", List.of()), 1.0f),
"foo",
null
);
// Non-default rank window size and rank constant
retriever = new RRFRetrieverBuilder(
null,
List.of("field_1", "field_2", "semantic_field_1", "semantic_field_2"),
"foo2",
DEFAULT_RANK_WINDOW_SIZE * 2,
RRFRetrieverBuilder.DEFAULT_RANK_CONSTANT * 2,
new float[0]
);
assertMultiIndexMultiFieldsParamsRewrite(
retriever,
queryRewriteContext,
Map.of(
Map.of("field_1", 1.0f, "field_2", 1.0f),
List.of(indexName),
Map.of("field_1", 1.0f, "field_2", 1.0f, "semantic_field_1", 1.0f),
List.of(anotherIndexName)
),
Map.of(new Tuple<>("semantic_field_1", List.of(indexName)), 1.0f, new Tuple<>("semantic_field_2", List.of()), 1.0f),
"foo2",
null
);
// No wildcards, per-field boosting
retriever = new RRFRetrieverBuilder(
null,
List.of("field_1", "field_2^1.5", "semantic_field_1", "semantic_field_2^2"),
"bar",
DEFAULT_RANK_WINDOW_SIZE,
RRFRetrieverBuilder.DEFAULT_RANK_CONSTANT,
new float[0]
);
assertMultiIndexMultiFieldsParamsRewrite(
retriever,
queryRewriteContext,
Map.of(
Map.of("field_1", 1.0f, "field_2", 1.5f),
List.of(indexName),
Map.of("field_1", 1.0f, "field_2", 1.5f, "semantic_field_1", 1.0f),
List.of(anotherIndexName)
),
Map.of(new Tuple<>("semantic_field_1", List.of(indexName)), 1.0f, new Tuple<>("semantic_field_2", List.of()), 2.0f),
"bar",
null
);
// Glob matching on inference and non-inference fields with per-field boosting
retriever = new RRFRetrieverBuilder(
null,
List.of("field_*^1.5", "*_field_1^2.5"),
"baz",
DEFAULT_RANK_WINDOW_SIZE,
RRFRetrieverBuilder.DEFAULT_RANK_CONSTANT,
new float[0]
);
assertMultiIndexMultiFieldsParamsRewrite(
retriever,
queryRewriteContext,
Map.of(Map.of("field_*", 1.5f, "*_field_1", 2.5f), List.of()), // on index filter on the lexical query
Map.of(new Tuple<>("semantic_field_1", List.of(indexName)), 2.5f),
"baz",
null
);
// Multiple boosts defined on the same field
retriever = new RRFRetrieverBuilder(
null,
List.of("field_*^1.5", "field_1^3.0", "*_field_1^2.5", "semantic_*^1.5"),
"baz2",
DEFAULT_RANK_WINDOW_SIZE,
RRFRetrieverBuilder.DEFAULT_RANK_CONSTANT,
new float[0]
);
assertMultiIndexMultiFieldsParamsRewrite(
retriever,
queryRewriteContext,
Map.of(Map.of("field_*", 1.5f, "field_1", 3.0f, "*_field_1", 2.5f, "semantic_*", 1.5f), List.of()),
Map.of(
new Tuple<>("semantic_field_1", List.of(indexName)),
3.75f,
new Tuple<>("semantic_field_2", List.of()), // no index filter since both indices have this field
1.5f,
new Tuple<>("semantic_field_3", List.of(anotherIndexName)),
1.5f
),
"baz2",
null
);
// All-fields wildcard
retriever = new RRFRetrieverBuilder(
null,
List.of("*"),
"qux",
DEFAULT_RANK_WINDOW_SIZE,
RRFRetrieverBuilder.DEFAULT_RANK_CONSTANT,
new float[0]
);
assertMultiIndexMultiFieldsParamsRewrite(
retriever,
queryRewriteContext,
Map.of(Map.of("*", 1.0f), List.of()), // on index filter on the lexical query
Map.of(
new Tuple<>("semantic_field_1", List.of(indexName)),
1.0f,
new Tuple<>("semantic_field_2", List.of()), // no index filter since both indices have this field
1.0f,
new Tuple<>("semantic_field_3", List.of(anotherIndexName)),
1.0f
),
"qux",
null
);
}
public void testSearchRemoteIndex() {
final ResolvedIndices resolvedIndices = createMockResolvedIndices(
Map.of("local-index", List.of()),
Map.of("remote-cluster", "remote-index"),
Map.of()
);
final QueryRewriteContext queryRewriteContext = new QueryRewriteContext(
parserConfig(),
null,
null,
TransportVersion.current(),
RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY,
resolvedIndices,
new PointInTimeBuilder(new BytesArray("pitid")),
null,
null
);
RRFRetrieverBuilder rrfRetrieverBuilder = new RRFRetrieverBuilder(
null,
null,
"foo",
DEFAULT_RANK_WINDOW_SIZE,
RRFRetrieverBuilder.DEFAULT_RANK_CONSTANT,
new float[0]
);
IllegalArgumentException iae = expectThrows(
IllegalArgumentException.class,
() -> rrfRetrieverBuilder.doRewrite(queryRewriteContext)
);
assertEquals("[rrf] cannot specify [query] when querying remote indices", iae.getMessage());
}
@Override
protected NamedXContentRegistry xContentRegistry() {
List<NamedXContentRegistry.Entry> entries = new SearchModule(Settings.EMPTY, List.of()).getNamedXContents();
entries.add(
new NamedXContentRegistry.Entry(
RetrieverBuilder.class,
new ParseField(RRFRetrieverBuilder.NAME),
(p, c) -> RRFRetrieverBuilder.fromXContent(p, (RetrieverParserContext) c)
)
);
// Add an entry with no license requirement for unit testing
entries.add(
new NamedXContentRegistry.Entry(
RetrieverBuilder.class,
new ParseField(RRFRetrieverBuilder.NAME + "_nl"),
(p, c) -> RRFRetrieverBuilder.PARSER.apply(p, (RetrieverParserContext) c)
)
);
return new NamedXContentRegistry(entries);
}
private void assertMultiFieldsParamsRewrite(
RRFRetrieverBuilder retriever,
QueryRewriteContext ctx,
Map<String, Float> expectedNonInferenceFields,
Map<String, Float> expectedInferenceFields,
String expectedQuery
) {
assertMultiFieldsParamsRewrite(retriever, ctx, expectedNonInferenceFields, expectedInferenceFields, expectedQuery, null);
}
@Override
protected float[] getWeights(RRFRetrieverBuilder builder) {
return builder.weights();
}
@Override
protected ScoreNormalizer[] getScoreNormalizers(RRFRetrieverBuilder builder) {
return null;
}
@Override
protected void assertCompoundRetriever(RRFRetrieverBuilder originalRetriever, RetrieverBuilder rewrittenRetriever) {
assert (rewrittenRetriever instanceof RRFRetrieverBuilder);
RRFRetrieverBuilder actualRetrieverBuilder = (RRFRetrieverBuilder) rewrittenRetriever;
assertEquals(originalRetriever.rankWindowSize(), actualRetrieverBuilder.rankWindowSize());
assertEquals(originalRetriever.rankConstant(), actualRetrieverBuilder.rankConstant());
}
}
| RRFRetrieverBuilderTests |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/query/hql/GroupByTest.java | {
"start": 1062,
"end": 4603
} | class ____ {
@Test
@JiraKey( value = "HHH-1615")
public void testGroupByEntity(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
session.createQuery( "select e, count(*) from Contact e group by e", Tuple.class ).list();
}
);
}
@Test
@JiraKey( value = "HHH-9301" )
public void testGroupByAliasedBasicPart(SessionFactoryScope scope) {
final SQLStatementInspector sqlStatementInspector = scope.getCollectingStatementInspector();
sqlStatementInspector.clear();
scope.inSession( (session) -> {
final String qryString = "select c.id as id_alias, count(1) as occurrences"
+ " from Contact c"
+ " group by id_alias"
+ " order by id_alias";
final Tuple result = session.createQuery( qryString, Tuple.class ).uniqueResult();
assertThat( result ).isNotNull();
assertThat( result.get( "id_alias" ) ).isEqualTo( 123 );
assertThat( result.get( "occurrences" ) ).isEqualTo( 1L );
assertThat( sqlStatementInspector.getSqlQueries() ).hasSize( 1 );
assertThat( sqlStatementInspector.getSqlQueries().get( 0 ) ).isNotNull();
} );
}
@Test
@JiraKey( value = "HHH-9301" )
public void testGroupByAliasedCompositePart(SessionFactoryScope scope) {
final SQLStatementInspector sqlStatementInspector = scope.getCollectingStatementInspector();
sqlStatementInspector.clear();
scope.inTransaction( (session) -> {
final String qryString = "select c.name as name_alias, count(1) as occurrences"
+ " from Contact c"
+ " group by name_alias"
+ " order by name_alias";
final Tuple result = session.createQuery( qryString, Tuple.class ).uniqueResult();
assertThat( result ).isNotNull();
assertThat( result.get( "name_alias" ) ).isInstanceOf( Contact.Name.class );
final Contact.Name name = result.get( "name_alias", Contact.Name.class );
assertThat( name.getFirst() ).isEqualTo( "Johnny" );
assertThat( name.getLast() ).isEqualTo( "Lawrence" );
assertThat( result.get( "occurrences" ) ).isEqualTo( 1L );
assertThat( sqlStatementInspector.getSqlQueries() ).hasSize( 1 );
assertThat( sqlStatementInspector.getSqlQueries().get( 0 ) ).isNotNull();
} );
}
@Test
@JiraKey( value = "HHH-9301" )
public void testGroupByMultipleAliases(SessionFactoryScope scope) {
final SQLStatementInspector sqlStatementInspector = scope.getCollectingStatementInspector();
sqlStatementInspector.clear();
scope.inTransaction( (session) -> {
final String qryString = "select c.id as id_alias, c.gender as gender_alias, count(1) as occurrences"
+ " from Contact c"
+ " group by id_alias, gender_alias"
+ " order by id_alias, gender_alias";
final Tuple result = session.createQuery( qryString, Tuple.class ).uniqueResult();
assertThat( result ).isNotNull();
assertThat( result.get( "id_alias" ) ).isEqualTo( 123 );
assertThat( result.get( "gender_alias" ) ).isEqualTo( Contact.Gender.MALE );
assertThat( result.get( "occurrences" ) ).isEqualTo( 1L );
assertThat( sqlStatementInspector.getSqlQueries() ).hasSize( 1 );
assertThat( sqlStatementInspector.getSqlQueries().get( 0 ) ).isNotNull();
} );
}
@BeforeEach
public void prepareData(SessionFactoryScope scope) {
scope.inTransaction( (em) -> {
Contact entity1 = new Contact( 123, new Contact.Name( "Johnny", "Lawrence" ), Contact.Gender.MALE, LocalDate.of(1970, 1, 1) );
em.persist( entity1 );
} );
}
@AfterEach
public void dropTestData(SessionFactoryScope scope) {
scope.getSessionFactory().getSchemaManager().truncate();
}
}
| GroupByTest |
java | google__dagger | javatests/dagger/internal/codegen/ScopingValidationTest.java | {
"start": 19476,
"end": 19842
} | interface ____ {",
" SimpleType type();",
"}");
Source componentB1 =
CompilerTests.javaSource(
"test.ComponentB1",
"package test;",
"",
"import dagger.Component;",
"",
"@ScopeB",
"@Component(dependencies = ComponentA.class)",
" | ComponentA |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/GooglePubsubEndpointBuilderFactory.java | {
"start": 39206,
"end": 42130
} | class ____ {
/**
* The internal instance of the builder used to access to all the
* methods representing the name of headers.
*/
private static final GooglePubsubHeaderNameBuilder INSTANCE = new GooglePubsubHeaderNameBuilder();
/**
* The ID of the message, assigned by the server when the message is
* published.
*
* The option is a: {@code String} type.
*
* Group: common
*
* @return the name of the header {@code GooglePubsubMessageId}.
*/
public String googlePubsubMessageId() {
return "CamelGooglePubsubMessageId";
}
/**
* The ID used to acknowledge the received message.
*
* The option is a: {@code String} type.
*
* Group: consumer
*
* @return the name of the header {@code GooglePubsubMsgAckId}.
*/
public String googlePubsubMsgAckId() {
return "CamelGooglePubsubMsgAckId";
}
/**
* The time at which the message was published.
*
* The option is a: {@code com.google.protobuf.Timestamp} type.
*
* Group: consumer
*
* @return the name of the header {@code GooglePubsubPublishTime}.
*/
public String googlePubsubPublishTime() {
return "CamelGooglePubsubPublishTime";
}
/**
* The attributes of the message.
*
* The option is a: {@code Map<String, String>} type.
*
* Group: common
*
* @return the name of the header {@code GooglePubsubAttributes}.
*/
@Deprecated
public String googlePubsubAttributes() {
return "CamelGooglePubsubAttributes";
}
/**
* If non-empty, identifies related messages for which publish order
* should be respected.
*
* The option is a: {@code String} type.
*
* Group: producer
*
* @return the name of the header {@code GooglePubsubOrderingKey}.
*/
public String googlePubsubOrderingKey() {
return "CamelGooglePubsubOrderingKey";
}
/**
* Can be used to manually acknowledge or negative-acknowledge a message
* when ackMode=NONE.
*
* The option is a: {@code
* org.apache.camel.component.google.pubsub.consumer.GooglePubsubAcknowledge} type.
*
* Group: consumer
*
* @return the name of the header {@code GooglePubsubAcknowledge}.
*/
public String googlePubsubAcknowledge() {
return "CamelGooglePubsubAcknowledge";
}
}
static GooglePubsubEndpointBuilder endpointBuilder(String componentName, String path) {
| GooglePubsubHeaderNameBuilder |
java | apache__camel | dsl/camel-jbang/camel-jbang-core/src/main/java/org/apache/camel/dsl/jbang/core/commands/bind/TemplateProvider.java | {
"start": 1125,
"end": 4095
} | interface ____ {
default InputStream getPipeTemplate() {
return TemplateProvider.class.getClassLoader().getResourceAsStream("templates/pipe.yaml.tmpl");
}
default InputStream getStepTemplate(String stepType) {
return TemplateProvider.class.getClassLoader().getResourceAsStream("templates/step-%s.yaml.tmpl".formatted(stepType));
}
default InputStream getEndpointTemplate(String endpointType) {
return TemplateProvider.class.getClassLoader()
.getResourceAsStream("templates/endpoint-%s.yaml.tmpl".formatted(endpointType));
}
default InputStream getErrorHandlerTemplate(String type) {
return TemplateProvider.class.getClassLoader()
.getResourceAsStream("templates/error-handler-%s.yaml.tmpl".formatted(type));
}
/**
* Creates YAML snippet representing the endpoint properties section.
*
* @param props the properties to set as endpoint properties.
* @return
*/
default String asEndpointProperties(Map<String, Object> props) {
StringBuilder sb = new StringBuilder();
if (props.isEmpty()) {
// create a dummy placeholder, so it is easier to add new properties manually
return sb.append("#properties:\n ").append("#key: \"value\"").toString();
}
sb.append("properties:\n");
for (Map.Entry<String, Object> propertyEntry : props.entrySet()) {
String propValue = propertyEntry.getValue().toString();
// Take care of Strings with colon - need to quote these values to avoid breaking YAML
if (propValue.contains(":") && !StringHelper.isQuoted(propValue)) {
propValue = "\"%s\"".formatted(propValue);
}
sb.append(" ").append(propertyEntry.getKey()).append(": ")
.append(propValue).append("\n");
}
return sb.toString().trim();
}
/**
* Creates YAML snippet representing the error handler parameters section.
*
* @param props the properties to set as error handler parameters.
*/
default String asErrorHandlerParameters(Map<String, Object> props) {
if (props.isEmpty()) {
return "parameters: {}";
}
StringBuilder sb = new StringBuilder();
sb.append("parameters:\n");
for (Map.Entry<String, Object> propertyEntry : props.entrySet()) {
sb.append(" ").append(propertyEntry.getKey()).append(": ").append(propertyEntry.getValue()).append("\n");
}
return sb.toString().trim();
}
/**
* Get additional indent that should be applied to endpoint templates.
*
* @param type the endpoint type.
* @return
*/
default int getAdditionalIndent(BindingProvider.EndpointType type) {
if (type == BindingProvider.EndpointType.ERROR_HANDLER) {
return 4;
}
return 0;
}
}
| TemplateProvider |
java | elastic__elasticsearch | server/src/internalClusterTest/java/org/elasticsearch/search/suggest/phrase/PhraseSuggesterIT.java | {
"start": 1526,
"end": 7750
} | class ____ extends ESIntegTestCase {
/**
* Reproduces the IllegalArgumentException: "At least one unigram is required but all tokens were ngrams"
*
* This happens when:
* 1. A phrase suggester is configured to use an analyzer that only produces n-grams (no unigrams)
* 2. The NoisyChannelSpellChecker is created with requireUnigram=true (which is the default)
* 3. The input text is analyzed and all resulting tokens are marked as n-grams
* 4. The NoisyChannelSpellChecker throws an IllegalArgumentException because it expects at least one unigram
*/
public void testPhraseSuggestionWithNgramOnlyAnalyzerThrowsException() throws IOException {
createIndexAndDocs(false);
// Create a phrase suggestion that uses the ngram-only field
// This should trigger the IllegalArgumentException because:
// 1. The "text.ngrams" field uses an analyzer that only produces n-grams
// 2. When "hello world" is analyzed, it produces only n-grams, but no unigrams
// 3. The DirectCandidateGenerator.analyze() method sets anyTokens=true but anyUnigram=false
// 4. NoisyChannelSpellChecker.end() throws IllegalArgumentException
SearchRequestBuilder searchBuilder = createSuggesterSearch("text.ngrams");
try {
assertResponse(searchBuilder, response -> {
// We didn't fail all shards - we get a response with failed shards
assertThat(response.status(), equalTo(RestStatus.OK));
assertThat(response.getFailedShards(), greaterThan(0));
assertThat(response.getShardFailures().length, greaterThan(0));
checkShardFailures(response.getShardFailures());
});
} catch (SearchPhaseExecutionException e) {
// If all shards fail, we get a SearchPhaseExecutionException
checkShardFailures(e.shardFailures());
}
}
private static void checkShardFailures(ShardSearchFailure[] shardFailures) {
for (ShardSearchFailure shardFailure : shardFailures) {
assertTrue(shardFailure.getCause() instanceof IllegalArgumentException);
assertEquals("At least one unigram is required but all tokens were ngrams", shardFailure.getCause().getMessage());
}
}
private static SearchRequestBuilder createSuggesterSearch(String fieldName) {
PhraseSuggestionBuilder phraseSuggestion = phraseSuggestion(fieldName).text("hello world")
.addCandidateGenerator(new DirectCandidateGeneratorBuilder("text").suggestMode("always").minWordLength(1).maxEdits(2));
SearchRequestBuilder searchBuilder = prepareSearch("test").setSize(0)
.suggest(new SuggestBuilder().addSuggestion("test_suggestion", phraseSuggestion));
return searchBuilder;
}
/**
* Demonstrates that the same configuration works fine when using a different field that produces unigrams
*/
public void testPhraseSuggestionWithUnigramFieldWorks() throws IOException {
createIndexAndDocs(false);
// Use the main "text" field instead of "text.ngrams" - this should work fine
// because the standard analyzer produces unigrams
SearchRequestBuilder searchRequestBuilder = createSuggesterSearch("text");
// This should NOT throw an exception
assertNoFailuresAndResponse(searchRequestBuilder, response -> {
// Just verify we get a response without exceptions
assertNotNull(response.getSuggest());
});
}
/**
* Test showing the same ngram-only configuration works when shingle filter allows output_unigrams=true
*/
public void testPhraseSuggestionWithNgramsAndUnigramsWorks() throws IOException {
createIndexAndDocs(true);
// Use the ngrams field, but this time it should work because the analyzer produces unigrams too
SearchRequestBuilder searchRequestBuilder = createSuggesterSearch("text.ngrams");
// This should NOT throw an exception because unigrams are available
assertNoFailuresAndResponse(searchRequestBuilder, response -> { assertNotNull(response.getSuggest()); });
}
private void createIndexAndDocs(boolean outputUnigrams) throws IOException {
// Create an index with a shingle analyzer that outputs unigrams or not
assertAcked(
prepareCreate("test").setSettings(
Settings.builder()
.put(SETTING_NUMBER_OF_SHARDS, randomIntBetween(1, 10))
.put("index.analysis.analyzer.ngram_only.tokenizer", "standard")
.putList("index.analysis.analyzer.ngram_only.filter", "my_shingle", "lowercase")
.put("index.analysis.filter.my_shingle.type", "shingle")
.put("index.analysis.filter.my_shingle.output_unigrams", outputUnigrams)
.put("index.analysis.filter.my_shingle.min_shingle_size", 2)
.put("index.analysis.filter.my_shingle.max_shingle_size", 3)
)
.setMapping(
XContentFactory.jsonBuilder()
.startObject()
.startObject("_doc")
.startObject("properties")
.startObject("text")
.field("type", "text")
.field("analyzer", "standard")
.startObject("fields")
.startObject("ngrams")
.field("type", "text")
.field("analyzer", "ngram_only") // Use our ngram-only analyzer for suggestions
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
)
);
ensureGreen();
// Index some test documents
indexDoc("test", "1", "text", "hello world test");
indexDoc("test", "2", "text", "another test phrase");
indexDoc("test", "3", "text", "some more content");
refresh();
}
}
| PhraseSuggesterIT |
java | elastic__elasticsearch | x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/ttest/TTestAggregationBuilderTests.java | {
"start": 1239,
"end": 4779
} | class ____ extends AbstractXContentSerializingTestCase<TTestAggregationBuilder> {
String aggregationName;
@Before
public void setupName() {
aggregationName = randomAlphaOfLength(10);
}
@Override
protected TTestAggregationBuilder doParseInstance(XContentParser parser) throws IOException {
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
AggregatorFactories.Builder parsed = AggregatorFactories.parseAggregators(parser);
assertThat(parsed.getAggregatorFactories(), hasSize(1));
assertThat(parsed.getPipelineAggregatorFactories(), hasSize(0));
TTestAggregationBuilder agg = (TTestAggregationBuilder) parsed.getAggregatorFactories().iterator().next();
assertNull(parser.nextToken());
assertNotNull(agg);
return agg;
}
@Override
protected TTestAggregationBuilder createTestInstance() {
MultiValuesSourceFieldConfig.Builder aConfig;
TTestType tTestType = randomFrom(TTestType.values());
if (randomBoolean()) {
aConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("a_field");
} else {
aConfig = new MultiValuesSourceFieldConfig.Builder().setScript(new Script(randomAlphaOfLength(10)));
}
MultiValuesSourceFieldConfig.Builder bConfig;
if (randomBoolean()) {
bConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("b_field");
} else {
bConfig = new MultiValuesSourceFieldConfig.Builder().setScript(new Script(randomAlphaOfLength(10)));
}
if (tTestType != TTestType.PAIRED && randomBoolean()) {
aConfig.setFilter(QueryBuilders.queryStringQuery(randomAlphaOfLength(10)));
}
if (tTestType != TTestType.PAIRED && randomBoolean()) {
bConfig.setFilter(QueryBuilders.queryStringQuery(randomAlphaOfLength(10)));
}
TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder(aggregationName).a(aConfig.build()).b(bConfig.build());
if (randomBoolean()) {
aggregationBuilder.tails(randomIntBetween(1, 2));
}
if (tTestType != TTestType.HETEROSCEDASTIC || randomBoolean()) {
aggregationBuilder.testType(randomFrom(tTestType));
}
return aggregationBuilder;
}
@Override
protected TTestAggregationBuilder mutateInstance(TTestAggregationBuilder instance) {
return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929
}
@Override
protected Writeable.Reader<TTestAggregationBuilder> instanceReader() {
return TTestAggregationBuilder::new;
}
@Override
protected NamedWriteableRegistry getNamedWriteableRegistry() {
return new NamedWriteableRegistry(new SearchModule(Settings.EMPTY, Collections.emptyList()).getNamedWriteables());
}
@Override
protected NamedXContentRegistry xContentRegistry() {
List<NamedXContentRegistry.Entry> namedXContent = new ArrayList<>();
namedXContent.add(
new NamedXContentRegistry.Entry(
BaseAggregationBuilder.class,
new ParseField(TTestAggregationBuilder.NAME),
(p, n) -> TTestAggregationBuilder.PARSER.apply(p, (String) n)
)
);
namedXContent.addAll(new SearchModule(Settings.EMPTY, Collections.emptyList()).getNamedXContents());
return new NamedXContentRegistry(namedXContent);
}
}
| TTestAggregationBuilderTests |
java | spring-projects__spring-framework | spring-tx/src/test/java/org/springframework/transaction/annotation/EnableTransactionManagementTests.java | {
"start": 17038,
"end": 17482
} | class ____ {
@Transactional(label = "${myLabel}", timeoutString = "${myTimeout}", readOnly = true)
public Collection<?> findAllFoos() {
return null;
}
@Transactional("qualifiedTransactionManager")
public void saveQualifiedFoo() {
}
@Transactional(transactionManager = "${myTransactionManager}")
public void saveQualifiedFooWithAttributeAlias() {
}
}
@Service
@Qualifier("qualified")
public static | TransactionalTestBean |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/objectarray/ObjectArrayAssert_flatExtracting_Test.java | {
"start": 1541,
"end": 11026
} | class ____ {
private CartoonCharacter bart;
private CartoonCharacter lisa;
private CartoonCharacter maggie;
private CartoonCharacter homer;
private CartoonCharacter pebbles;
private CartoonCharacter fred;
private static final ThrowingExtractor<CartoonCharacter, List<CartoonCharacter>, Exception> childrenThrowingExtractor = CartoonCharacter::getChildren;
private static final Function<CartoonCharacter, List<CartoonCharacter>> children = CartoonCharacter::getChildren;
private static final Function<CartoonCharacter, List<CartoonCharacter>> childrenExtractor = new Function<CartoonCharacter, List<CartoonCharacter>>() {
@Override
public List<CartoonCharacter> apply(CartoonCharacter input) {
return input.getChildren();
}
};
@BeforeEach
void setUp() {
bart = new CartoonCharacter("Bart Simpson");
lisa = new CartoonCharacter("Lisa Simpson");
maggie = new CartoonCharacter("Maggie Simpson");
homer = new CartoonCharacter("Homer Simpson");
homer.addChildren(bart, lisa, maggie);
pebbles = new CartoonCharacter("Pebbles Flintstone");
fred = new CartoonCharacter("Fred Flintstone");
fred.addChildren(pebbles);
}
@Test
void should_allow_assertions_on_joined_lists_when_extracting_children_with_extractor() {
assertThat(array(homer, fred)).flatExtracting(childrenExtractor).containsOnly(bart, lisa, maggie, pebbles);
}
@Test
void should_allow_assertions_on_joined_lists_when_extracting_children() {
assertThat(array(homer, fred)).flatExtracting(children).containsOnly(bart, lisa, maggie, pebbles);
}
@Test
void should_allow_assertions_on_empty_result_lists_with_extractor() {
assertThat(array(bart, lisa, maggie)).flatExtracting(childrenExtractor).isEmpty();
}
@Test
void should_allow_assertions_on_empty_result_lists() {
assertThat(array(bart, lisa, maggie)).flatExtracting(children).isEmpty();
}
@Test
void should_throw_null_pointer_exception_when_extracting_from_null_with_extractor() {
assertThatNullPointerException().isThrownBy(() -> assertThat(array(homer, null)).flatExtracting(childrenExtractor));
}
@Test
void should_throw_null_pointer_exception_when_extracting_from_null() {
assertThatNullPointerException().isThrownBy(() -> assertThat(array(homer, null)).flatExtracting(children));
}
@Test
void should_keep_existing_description_if_set_when_extracting_using_property() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> assertThat(array(homer)).as("expected description")
.flatExtracting("children")
.isEmpty())
.withMessageContaining("[expected description]");
}
@Test
void should_keep_existing_description_if_set_when_extracting_using_extractor() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> assertThat(array(homer)).as("expected description")
.flatExtracting(childrenExtractor)
.isEmpty())
.withMessageContaining("[expected description]");
}
@Test
void should_rethrow_throwing_extractor_checked_exception_as_a_runtime_exception() {
CartoonCharacter[] childCharacters = array(bart, lisa, maggie);
assertThatExceptionOfType(RuntimeException.class).isThrownBy(() -> assertThat(childCharacters).flatExtracting(cartoonCharacter -> {
if (cartoonCharacter.getChildren().isEmpty()) throw new Exception("no children");
return cartoonCharacter.getChildren();
})).withMessage("java.lang.Exception: no children");
}
@Test
void should_let_throwing_extractor_runtime_exception_bubble_up() {
CartoonCharacter[] childCharacters = array(bart, lisa, maggie);
assertThatExceptionOfType(RuntimeException.class).isThrownBy(() -> assertThat(childCharacters).flatExtracting(cartoonCharacter -> {
if (cartoonCharacter.getChildren().isEmpty()) throw new RuntimeException("no children");
return cartoonCharacter.getChildren();
})).withMessage("no children");
}
@Test
void should_allow_assertions_on_joined_lists_when_extracting_children_with_throwing_extractor() {
CartoonCharacter[] cartoonCharacters = array(homer, fred);
assertThat(cartoonCharacters).flatExtracting(cartoonCharacter -> {
if (cartoonCharacter.getChildren().isEmpty()) throw new Exception("no children");
return cartoonCharacter.getChildren();
}).containsOnly(bart, lisa, maggie, pebbles);
}
@Test
void should_allow_assertions_on_joined_lists_when_extracting_children_with_anonymous_class_throwing_extractor() {
CartoonCharacter[] cartoonCharacters = array(homer, fred);
assertThat(cartoonCharacters).flatExtracting(new ThrowingExtractor<CartoonCharacter, List<CartoonCharacter>, Exception>() {
@Override
public List<CartoonCharacter> extractThrows(CartoonCharacter cartoonCharacter) throws Exception {
if (cartoonCharacter.getChildren().isEmpty()) throw new Exception("no children");
return cartoonCharacter.getChildren();
}
}).containsOnly(bart, lisa, maggie, pebbles);
}
@Test
void flatExtracting_should_keep_assertion_state_with_extractor() {
// GIVEN
AlwaysEqualComparator<CartoonCharacter> cartoonCharacterAlwaysEqualComparator = alwaysEqual();
// WHEN
// not all comparators are used but we want to test that they are passed correctly after extracting
// @format:off
AbstractListAssert<?, ?, ?, ?> assertion
= assertThat(array(homer, fred)).as("test description")
.withFailMessage("error message")
.withRepresentation(UNICODE_REPRESENTATION)
.usingComparatorForType(cartoonCharacterAlwaysEqualComparator, CartoonCharacter.class)
.flatExtracting(childrenExtractor)
.contains(bart, lisa, new CartoonCharacter("Unknown"));
// @format:on
// THEN
assertThat(assertion.descriptionText()).isEqualTo("test description");
assertThat(assertion.info.representation()).isEqualTo(UNICODE_REPRESENTATION);
assertThat(assertion.info.overridingErrorMessage()).isEqualTo("error message");
assertThat(comparatorsByTypeOf(assertion).getComparatorForType(CartoonCharacter.class)).isSameAs(cartoonCharacterAlwaysEqualComparator);
}
@Test
void flatExtracting_should_keep_assertion_state() {
// GIVEN
AlwaysEqualComparator<CartoonCharacter> cartoonCharacterAlwaysEqualComparator = alwaysEqual();
// WHEN
// not all comparators are used but we want to test that they are passed correctly after extracting
// @format:off
AbstractListAssert<?, ?, ?, ?> assertion
= assertThat(array(homer, fred)).as("test description")
.withFailMessage("error message")
.withRepresentation(UNICODE_REPRESENTATION)
.usingComparatorForType(cartoonCharacterAlwaysEqualComparator, CartoonCharacter.class)
.flatExtracting(children)
.contains(bart, lisa, new CartoonCharacter("Unknown"));
// @format:on
// THEN
assertThat(assertion.descriptionText()).isEqualTo("test description");
assertThat(assertion.info.representation()).isEqualTo(UNICODE_REPRESENTATION);
assertThat(assertion.info.overridingErrorMessage()).isEqualTo("error message");
assertThat(comparatorsByTypeOf(assertion).getComparatorForType(CartoonCharacter.class)).isSameAs(cartoonCharacterAlwaysEqualComparator);
}
@Test
void flatExtracting_with_ThrowingExtractor_should_keep_assertion_state() {
// GIVEN
AlwaysEqualComparator<CartoonCharacter> cartoonCharacterAlwaysEqualComparator = alwaysEqual();
// WHEN
// not all comparators are used but we want to test that they are passed correctly after extracting
// @format:off
AbstractListAssert<?, ?, ?, ?> assertion
= assertThat(array(homer, fred)).as("test description")
.withFailMessage("error message")
.withRepresentation(UNICODE_REPRESENTATION)
.usingComparatorForType(cartoonCharacterAlwaysEqualComparator, CartoonCharacter.class)
.flatExtracting(childrenThrowingExtractor)
.contains(bart, lisa, new CartoonCharacter("Unknown"));
// @format:on
// THEN
assertThat(assertion.descriptionText()).isEqualTo("test description");
assertThat(assertion.info.representation()).isEqualTo(UNICODE_REPRESENTATION);
assertThat(assertion.info.overridingErrorMessage()).isEqualTo("error message");
assertThat(comparatorsByTypeOf(assertion).getComparatorForType(CartoonCharacter.class)).isSameAs(cartoonCharacterAlwaysEqualComparator);
}
}
| ObjectArrayAssert_flatExtracting_Test |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/pubsub/PubSubCommandHandler.java | {
"start": 1411,
"end": 9034
} | class ____<K, V> extends CommandHandler {
private static final InternalLogger logger = InternalLoggerFactory.getInstance(PubSubCommandHandler.class);
private final PubSubEndpoint<K, V> endpoint;
private final RedisCodec<K, V> codec;
private final Deque<ReplayOutput<K, V>> queue = new ArrayDeque<>();
private final DecodeBufferPolicy decodeBufferPolicy;
private ResponseHeaderReplayOutput<K, V> replay;
private PubSubOutput<K, V> output;
/**
* Initialize a new instance.
*
* @param clientOptions client options for this connection, must not be {@code null}
* @param clientResources client resources for this connection
* @param codec Codec.
* @param endpoint the Pub/Sub endpoint for Pub/Sub callback.
*/
public PubSubCommandHandler(ClientOptions clientOptions, ClientResources clientResources, RedisCodec<K, V> codec,
PubSubEndpoint<K, V> endpoint) {
super(clientOptions, clientResources, endpoint);
this.endpoint = endpoint;
this.codec = codec;
this.decodeBufferPolicy = clientOptions.getDecodeBufferPolicy();
this.output = new PubSubOutput<>(codec);
}
@Override
public void channelInactive(ChannelHandlerContext ctx) throws Exception {
replay = null;
queue.clear();
super.channelInactive(ctx);
}
@SuppressWarnings("unchecked")
@Override
protected void decode(ChannelHandlerContext ctx, ByteBuf buffer) throws InterruptedException {
if (output.type() != null && !output.isCompleted()) {
if (!super.decode(buffer, output)) {
decodeBufferPolicy.afterPartialDecode(buffer);
return;
}
RedisCommand<?, ?, ?> peek = getStack().peek();
canComplete(peek);
doNotifyMessage(output);
output = new PubSubOutput<>(codec);
}
if (!getStack().isEmpty() || isPushDecode(buffer)) {
super.decode(ctx, buffer);
}
ReplayOutput<K, V> replay;
while ((replay = queue.poll()) != null) {
replay.replay(output);
doNotifyMessage(output);
output = new PubSubOutput<>(codec);
}
while (super.getStack().isEmpty() && buffer.isReadable()) {
if (!super.decode(buffer, output)) {
decodeBufferPolicy.afterPartialDecode(buffer);
return;
}
doNotifyMessage(output);
output = new PubSubOutput<>(codec);
}
decodeBufferPolicy.afterDecoding(buffer);
}
@Override
protected boolean canDecode(ByteBuf buffer) {
return super.canDecode(buffer) && output.type() == null;
}
@Override
protected boolean canComplete(RedisCommand<?, ?, ?> command) {
if (isResp2PubSubMessage(replay)) {
queue.add(replay);
replay = null;
return false;
}
return super.canComplete(command);
}
@Override
protected void complete(RedisCommand<?, ?, ?> command) {
if (replay != null && command.getOutput() != null) {
try {
replay.replay(command.getOutput());
} catch (Exception e) {
command.completeExceptionally(e);
}
replay = null;
}
super.complete(command);
}
/**
* Check whether {@link ResponseHeaderReplayOutput} contains a Pub/Sub message that requires Pub/Sub dispatch instead of to
* be used as Command output.
*
* @param replay
* @return
*/
private static boolean isResp2PubSubMessage(ResponseHeaderReplayOutput<?, ?> replay) {
if (replay == null) {
return false;
}
String firstElement = replay.firstElement;
if (replay.multiCount != null && firstElement != null) {
if (replay.multiCount == 3 && firstElement.equalsIgnoreCase(PubSubOutput.Type.message.name())) {
return true;
}
if (replay.multiCount == 4 && firstElement.equalsIgnoreCase(PubSubOutput.Type.pmessage.name())) {
return true;
}
}
return false;
}
@Override
protected CommandOutput<?, ?, ?> getCommandOutput(RedisCommand<?, ?, ?> command) {
if (getStack().isEmpty() || command.getOutput() == null) {
return super.getCommandOutput(command);
}
if (replay == null) {
replay = new ResponseHeaderReplayOutput<>();
}
return replay;
}
protected void notifyPushListeners(PushMessage notification) {
if (PubSubOutput.Type.isPubSubType(notification.getType())) {
PubSubOutput.Type type = PubSubOutput.Type.valueOf(notification.getType());
RedisCommand<?, ?, ?> command = getStack().peek();
if (command != null && shouldCompleteCommand(type, command)) {
completeCommand(notification, command);
}
doNotifyMessage(toPubSubMessage(notification));
}
super.notifyPushListeners(notification);
}
private boolean shouldCompleteCommand(PubSubOutput.Type type, RedisCommand<?, ?, ?> command) {
String commandType = command.getType().toString();
switch (type) {
case subscribe:
return commandType.equalsIgnoreCase("SUBSCRIBE");
case ssubscribe:
return commandType.equalsIgnoreCase("SSUBSCRIBE");
case psubscribe:
return commandType.equalsIgnoreCase("PSUBSCRIBE");
case unsubscribe:
return commandType.equalsIgnoreCase("UNSUBSCRIBE");
case sunsubscribe:
return commandType.equalsIgnoreCase("SUNSUBSCRIBE");
case punsubscribe:
return commandType.equalsIgnoreCase("PUNSUBSCRIBE");
}
return false;
}
private void completeCommand(PushMessage notification, RedisCommand<?, ?, ?> command) {
CommandOutput<?, ?, ?> output = command.getOutput();
for (Object value : notification.getContent()) {
if (value instanceof Long) {
output.set((Long) value);
} else {
output.set((ByteBuffer) value);
}
}
getStack().poll().complete();
}
private PubSubMessage<K, V> toPubSubMessage(PushMessage notification) {
PubSubOutput<K, V> output = new PubSubOutput<>(codec);
for (Object argument : notification.getContent()) {
if (argument instanceof Long) {
output.set((Long) argument);
} else {
output.set((ByteBuffer) argument);
}
}
return output;
}
@Override
@SuppressWarnings("unchecked")
protected void afterDecode(ChannelHandlerContext ctx, RedisCommand<?, ?, ?> command) {
super.afterDecode(ctx, command);
if (command.getOutput() instanceof PubSubOutput) {
doNotifyMessage((PubSubOutput) command.getOutput());
}
}
private void doNotifyMessage(PubSubMessage<K, V> message) {
try {
endpoint.notifyMessage(message);
} catch (Exception e) {
logger.error("Unexpected error occurred in PubSubEndpoint.notifyMessage", e);
}
}
/**
* Inspectable {@link ReplayOutput} to investigate the first multi and string response elements.
*
* @param <K>
* @param <V>
*/
static | PubSubCommandHandler |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/aop/aspectj/DeclarationOrderIndependenceTests.java | {
"start": 2520,
"end": 2800
} | class ____ implements BeanNameAware {
@SuppressWarnings("unused")
private String beanName;
@Override
public void setBeanName(String name) {
this.beanName = name;
}
}
/** public visibility is required */
@SuppressWarnings("serial")
public static | BeanNameAwareMixin |
java | elastic__elasticsearch | x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java | {
"start": 132480,
"end": 133724
} | class ____ extends RelationPrimaryContext {
public QueryNoWithContext queryNoWith() {
return getRuleContext(QueryNoWithContext.class, 0);
}
public QualifiedNameContext qualifiedName() {
return getRuleContext(QualifiedNameContext.class, 0);
}
public TerminalNode AS() {
return getToken(SqlBaseParser.AS, 0);
}
public AliasedQueryContext(RelationPrimaryContext ctx) {
copyFrom(ctx);
}
@Override
public void enterRule(ParseTreeListener listener) {
if (listener instanceof SqlBaseListener) ((SqlBaseListener) listener).enterAliasedQuery(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if (listener instanceof SqlBaseListener) ((SqlBaseListener) listener).exitAliasedQuery(this);
}
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if (visitor instanceof SqlBaseVisitor) return ((SqlBaseVisitor<? extends T>) visitor).visitAliasedQuery(this);
else return visitor.visitChildren(this);
}
}
@SuppressWarnings("CheckReturnValue")
public static | AliasedQueryContext |
java | dropwizard__dropwizard | dropwizard-jersey/src/test/java/io/dropwizard/jersey/optional/OptionalLongMessageBodyWriterTest.java | {
"start": 791,
"end": 3300
} | class ____ extends AbstractJerseyTest {
@Override
protected Application configure() {
return DropwizardResourceConfig.forTesting()
.register(new EmptyOptionalExceptionMapper())
.register(OptionalLongReturnResource.class);
}
@Test
void presentOptionalsReturnTheirValue() {
assertThat(target("optional-return")
.queryParam("id", "1").request()
.get(Long.class))
.isEqualTo(1L);
}
@Test
void presentOptionalsReturnTheirValueWithResponse() {
assertThat(target("optional-return/response-wrapped")
.queryParam("id", "1").request()
.get(Long.class))
.isEqualTo(1L);
}
@Test
void absentOptionalsThrowANotFound() {
Invocation.Builder request = target("optional-return").request();
assertThatExceptionOfType(WebApplicationException.class)
.isThrownBy(() -> request.get(Long.class))
.satisfies(e -> assertThat(e.getResponse().getStatus()).isEqualTo(404));
}
@Test
void valueSetIgnoresDefault() {
assertThat(target("optional-return/default").queryParam("id", "1").request().get(Long.class))
.isEqualTo(target("optional-return/long/default").queryParam("id", "1").request().get(Long.class))
.isEqualTo(1L);
}
@Test
void valueNotSetReturnsDefault() {
assertThat(target("optional-return/default").request().get(Long.class))
.isEqualTo(target("optional-return/long/default").request().get(Long.class))
.isEqualTo(0L);
}
@Test
void valueEmptyReturns404() {
assertThat(target("optional-return/default").queryParam("id", "").request().get())
.extracting(Response::getStatus)
.isEqualTo(404);
}
@Test
void valueInvalidReturns404() {
Invocation.Builder request = target("optional-return/default").queryParam("id", "invalid")
.request();
assertThatExceptionOfType(NotFoundException.class)
.isThrownBy(() -> request.get(Long.class));
Invocation.Builder longRequest = target("optional-return/long/default").queryParam("id", "invalid")
.request();
assertThatExceptionOfType(NotFoundException.class)
.isThrownBy(() -> longRequest.get(Long.class));
}
@Path("optional-return")
@Produces(MediaType.TEXT_PLAIN)
public static | OptionalLongMessageBodyWriterTest |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/references/samename/b/CustomMapper.java | {
"start": 209,
"end": 320
} | class ____ {
public String longToString(long l) {
return String.valueOf( l * 2 );
}
}
| CustomMapper |
java | elastic__elasticsearch | x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Stats.java | {
"start": 506,
"end": 1064
} | class ____ extends CompoundNumericAggregate {
public Stats(Source source, Expression field) {
super(source, field);
}
@Override
protected NodeInfo<Stats> info() {
return NodeInfo.create(this, Stats::new, field());
}
@Override
public Stats replaceChildren(List<Expression> newChildren) {
return new Stats(source(), newChildren.get(0));
}
public static boolean isTypeCompatible(Expression e) {
return e instanceof Min || e instanceof Max || e instanceof Avg || e instanceof Sum;
}
}
| Stats |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/propertyref/DoesNotWorkPk.java | {
"start": 296,
"end": 1475
} | class ____ implements Serializable {
private static final long serialVersionUID = 1L;
@Column(name = "track_no")
private String id1;
@Column(name = "track_ext")
private String id2;
public String getId1() {
return id1;
}
public void setId1(String id1) {
this.id1 = id1;
}
public String getId2() {
return id2;
}
public void setId2(String id2) {
this.id2 = id2;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((id1 == null) ? 0 : id1.hashCode());
result = prime * result + ((id2 == null) ? 0 : id2.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if ( this == obj ) {
return true;
}
if ( obj == null ) {
return false;
}
if ( !(obj instanceof DoesNotWorkPk) ) {
return false;
}
DoesNotWorkPk other = (DoesNotWorkPk) obj;
if ( id1 == null ) {
if ( other.id1 != null ) {
return false;
}
}
else if ( !id1.equals( other.id1 ) ) {
return false;
}
if ( id2 == null ) {
if ( other.id2 != null ) {
return false;
}
}
else if ( !id2.equals( other.id2 ) ) {
return false;
}
return true;
}
}
| DoesNotWorkPk |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlus.java | {
"start": 7632,
"end": 8967
} | class ____ extends AbstractHyperLogLog implements Releasable {
private final BigArrays bigArrays;
// array for holding the runlens.
private ByteArray runLens;
HyperLogLog(BigArrays bigArrays, long initialBucketCount, int precision) {
super(precision);
this.runLens = bigArrays.newByteArray(initialBucketCount << precision);
this.bigArrays = bigArrays;
}
public long maxOrd() {
return runLens.size() >>> precision();
}
@Override
protected void addRunLen(long bucketOrd, int register, int encoded) {
final long bucketIndex = (bucketOrd << p) + register;
runLens.set(bucketIndex, (byte) Math.max(encoded, runLens.get(bucketIndex)));
}
@Override
protected RunLenIterator getRunLens(long bucketOrd) {
return new HyperLogLogIterator(this, bucketOrd);
}
protected void reset(long bucketOrd) {
runLens.fill(bucketOrd << p, (bucketOrd << p) + m, (byte) 0);
}
protected void ensureCapacity(long numBuckets) {
runLens = bigArrays.grow(runLens, numBuckets << p);
}
@Override
public void close() {
Releasables.close(runLens);
}
}
private static | HyperLogLog |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/script/ReindexScript.java | {
"start": 1474,
"end": 1597
} | interface ____ {
ReindexScript newInstance(Map<String, Object> params, CtxMap<ReindexMetadata> ctxMap);
}
}
| Factory |
java | google__guava | android/guava/src/com/google/common/collect/Maps.java | {
"start": 122347,
"end": 122968
} | class ____<
K extends @Nullable Object, V extends @Nullable Object>
extends FilteredEntryMap<K, V> implements SortedMap<K, V> {
FilteredEntrySortedMap(
SortedMap<K, V> unfiltered, Predicate<? super Entry<K, V>> entryPredicate) {
super(unfiltered, entryPredicate);
}
SortedMap<K, V> sortedMap() {
return (SortedMap<K, V>) unfiltered;
}
@Override
public SortedSet<K> keySet() {
return (SortedSet<K>) super.keySet();
}
@Override
SortedSet<K> createKeySet() {
return new SortedKeySet();
}
@WeakOuter
final | FilteredEntrySortedMap |
java | apache__camel | components/camel-telemetry/src/test/java/org/apache/camel/telemetry/mock/MockTrace.java | {
"start": 975,
"end": 1381
} | class ____ {
private List<Span> spanList;
public MockTrace() {
spanList = new ArrayList<>();
}
public void addSpan(Span span) {
spanList.add(span);
}
public List<Span> spans() {
Collections.sort(spanList, new SpanComparator());
return spanList;
}
@Override
public String toString() {
return spans().toString();
}
}
| MockTrace |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/creators/TestCreators.java | {
"start": 7994,
"end": 16489
} | class ____ extends TreeMap<Object,Object>
{
Boolean _b;
private MapWithFactory(Boolean b) {
_b = b;
}
@JsonCreator
static MapWithFactory createIt(@JsonProperty("b") Boolean b)
{
return new MapWithFactory(b);
}
}
/*
/**********************************************************************
/* Test methods, valid cases, non-deferred, no-mixins
/**********************************************************************
*/
private final ObjectMapper MAPPER = newJsonMapper();
@Test
public void testSimpleConstructor() throws Exception
{
ConstructorBean bean = MAPPER.readValue("{ \"x\" : 42 }", ConstructorBean.class);
assertEquals(42, bean.x);
}
// [JACKSON-850]
@Test
public void testNoArgsFactory() throws Exception
{
NoArgFactoryBean value = MAPPER.readValue("{\"y\":13}", NoArgFactoryBean.class);
assertEquals(13, value.y);
assertEquals(123, value.x);
}
@Test
public void testSimpleDoubleConstructor() throws Exception
{
Double exp = Double.valueOf("0.25");
DoubleConstructorBean bean = MAPPER.readValue(exp.toString(), DoubleConstructorBean.class);
assertEquals(exp, bean.d);
}
@Test
public void testSimpleBooleanConstructor() throws Exception
{
BooleanConstructorBean bean = MAPPER.readValue(" true ", BooleanConstructorBean.class);
assertEquals(Boolean.TRUE, bean.b);
BooleanConstructorBean2 bean2 = MAPPER.readValue(" true ", BooleanConstructorBean2.class);
assertTrue(bean2.b);
}
@Test
public void testSimpleBigIntegerConstructor() throws Exception
{
// 10-Dec-2020, tatu: Small (magnitude) values will NOT trigger path
// we want; must use something outside of Long range...
BigInteger INPUT = BigInteger.valueOf(Long.MAX_VALUE).add(BigInteger.TEN);
final BigIntegerWrapper result = MAPPER.readValue(INPUT.toString(), BigIntegerWrapper.class);
assertEquals(INPUT, result._value);
}
@Test
public void testSimpleBigDecimalConstructor() throws Exception
{
// 10-Dec-2020, tatu: not sure we can ever trigger this with JSON;
// but should be possible to handle via TokenBuffer?
BigDecimal INPUT = new BigDecimal("42.5");
try (TokenBuffer buf = TokenBuffer.forGeneration()) {
buf.writeNumber(INPUT);
try (JsonParser p = buf.asParser()) {
final BigDecimalWrapper result = MAPPER.readValue(p,
BigDecimalWrapper.class);
assertEquals(INPUT, result._value);
}
}
}
@Test
public void testSimpleFactory() throws Exception
{
FactoryBean bean = MAPPER.readValue("{ \"f\" : 0.25 }", FactoryBean.class);
assertEquals(0.25, bean.d);
}
@Test
public void testLongFactory() throws Exception
{
long VALUE = 123456789000L;
LongFactoryBean bean = MAPPER.readValue(String.valueOf(VALUE), LongFactoryBean.class);
assertEquals(VALUE, bean.value);
}
@Test
public void testStringFactory() throws Exception
{
String str = "abc";
StringFactoryBean bean = MAPPER.readValue(q(str), StringFactoryBean.class);
assertEquals(str, bean.value);
}
@Test
public void testStringFactoryAlt() throws Exception
{
String str = "xyz";
FromStringBean bean = MAPPER.readValue(q(str), FromStringBean.class);
assertEquals(str, bean.value);
}
@Test
public void testConstructorAndFactoryCreator() throws Exception
{
CreatorBeanWithBoth bean = MAPPER.readValue
("{ \"a\" : \"xyz\", \"x\" : 12 }", CreatorBeanWithBoth.class);
assertEquals(13, bean.x);
assertEquals("ctor:xyz", bean.a);
}
@Test
public void testConstructorAndProps() throws Exception
{
ConstructorAndPropsBean bean = MAPPER.readValue
("{ \"a\" : \"1\", \"b\": 2, \"c\" : true }", ConstructorAndPropsBean.class);
assertEquals(1, bean.a);
assertEquals(2, bean.b);
assertTrue(bean.c);
}
@Test
public void testFactoryAndProps() throws Exception
{
FactoryAndPropsBean bean = MAPPER.readValue
("{ \"a\" : [ false, true, false ], \"b\": 2, \"c\" : -1 }", FactoryAndPropsBean.class);
assertEquals(2, bean.arg2);
assertEquals(-1, bean.arg3);
boolean[] arg1 = bean.arg1;
assertNotNull(arg1);
assertEquals(3, arg1.length);
assertFalse(arg1[0]);
assertTrue(arg1[1]);
assertFalse(arg1[2]);
}
/**
* Test to verify that multiple creators may co-exist, iff
* they use different JSON type as input
*/
@Test
public void testMultipleCreators() throws Exception
{
MultiBean bean = MAPPER.readValue("123", MultiBean.class);
assertEquals(Integer.valueOf(123), bean.value);
bean = MAPPER.readValue(q("abc"), MultiBean.class);
assertEquals("abc", bean.value);
bean = MAPPER.readValue("0.25", MultiBean.class);
assertEquals(Double.valueOf(0.25), bean.value);
}
/*
/**********************************************************************
/* Test methods, valid cases, deferred, no mixins
/**********************************************************************
*/
@Test
public void testDeferredConstructorAndProps() throws Exception
{
DeferredConstructorAndPropsBean bean = MAPPER.readValue
("{ \"propB\" : \"...\", \"createA\" : [ 1 ], \"propA\" : null }",
DeferredConstructorAndPropsBean.class);
assertEquals("...", bean.propB);
assertNull(bean.propA);
assertNotNull(bean.createA);
assertEquals(1, bean.createA.length);
assertEquals(1, bean.createA[0]);
}
@Test
public void testDeferredFactoryAndProps() throws Exception
{
DeferredFactoryAndPropsBean bean = MAPPER.readValue
("{ \"prop\" : \"1\", \"ctor\" : \"2\" }", DeferredFactoryAndPropsBean.class);
assertEquals("1", bean.prop);
assertEquals("2", bean.ctor);
}
/*
/**********************************************************************
/* Test methods, valid cases, mixins
/**********************************************************************
*/
@Test
public void testFactoryCreatorWithMixin() throws Exception
{
ObjectMapper m = jsonMapperBuilder()
.addMixIn(CreatorBeanWithBoth.class, MixIn.class)
.build();
CreatorBeanWithBoth bean = m.readValue
("{ \"a\" : \"xyz\", \"x\" : 12 }", CreatorBeanWithBoth.class);
assertEquals(11, bean.x);
assertEquals("factory:xyz", bean.a);
}
@Test
public void testFactoryCreatorWithRenamingMixin() throws Exception
{
ObjectMapper m = jsonMapperBuilder()
.addMixIn(FactoryBean.class, FactoryBeanMixIn.class)
.build();
// override changes property name from "f" to "mixed"
FactoryBean bean = m.readValue("{ \"mixed\" : 20.5 }", FactoryBean.class);
assertEquals(20.5, bean.d);
}
/*
/**********************************************************************
/* Test methods, valid cases, Map with creator
/**********************************************************************
*/
@Test
public void testMapWithConstructor() throws Exception
{
MapWithCtor result = MAPPER.readValue
("{\"text\":\"abc\", \"entry\":true, \"number\":123, \"xy\":\"yx\"}",
MapWithCtor.class);
// regular Map entries:
assertEquals(Boolean.TRUE, result.get("entry"));
assertEquals("yx", result.get("xy"));
assertEquals(2, result.size());
// then ones passed via constructor
assertEquals("abc", result._text);
assertEquals(123, result._number);
}
@Test
public void testMapWithFactory() throws Exception
{
MapWithFactory result = MAPPER.readValue
("{\"x\":\"...\",\"b\":true }",
MapWithFactory.class);
assertEquals("...", result.get("x"));
assertEquals(1, result.size());
assertEquals(Boolean.TRUE, result._b);
}
}
| MapWithFactory |
java | apache__logging-log4j2 | log4j-core-test/src/test/java/org/apache/logging/log4j/core/async/DiscardingAsyncQueueFullPolicyTest.java | {
"start": 1279,
"end": 5629
} | class ____ {
private static long currentThreadId() {
return Thread.currentThread().getId();
}
private static long otherThreadId() {
return -1;
}
@Test
void testConstructorDisallowsNullThresholdLevel() {
assertThrows(NullPointerException.class, () -> {
new DiscardingAsyncQueueFullPolicy(null);
});
}
@Test
void testThresholdLevelIsConstructorValue() {
assertSame(Level.ALL, new DiscardingAsyncQueueFullPolicy(Level.ALL).getThresholdLevel());
assertSame(Level.OFF, new DiscardingAsyncQueueFullPolicy(Level.OFF).getThresholdLevel());
assertSame(Level.INFO, new DiscardingAsyncQueueFullPolicy(Level.INFO).getThresholdLevel());
}
@Test
void testGetRouteDiscardsIfThresholdCapacityReachedAndLevelEqualOrLessSpecificThanThreshold() {
final DiscardingAsyncQueueFullPolicy router = new DiscardingAsyncQueueFullPolicy(Level.WARN);
for (final Level level : new Level[] {Level.WARN, Level.INFO, Level.DEBUG, Level.TRACE, Level.ALL}) {
assertEquals(EventRoute.DISCARD, router.getRoute(currentThreadId(), level), level.name());
assertEquals(EventRoute.DISCARD, router.getRoute(otherThreadId(), level), level.name());
assertEquals(EventRoute.DISCARD, router.getRoute(currentThreadId(), level), level.name());
assertEquals(EventRoute.DISCARD, router.getRoute(otherThreadId(), level), level.name());
}
}
@Test
void testGetRouteDiscardsIfQueueFullAndLevelEqualOrLessSpecificThanThreshold() {
final DiscardingAsyncQueueFullPolicy router = new DiscardingAsyncQueueFullPolicy(Level.WARN);
for (final Level level : new Level[] {Level.WARN, Level.INFO, Level.DEBUG, Level.TRACE, Level.ALL}) {
assertEquals(EventRoute.DISCARD, router.getRoute(currentThreadId(), level), level.name());
assertEquals(EventRoute.DISCARD, router.getRoute(otherThreadId(), level), level.name());
}
}
@Test
void testGetRouteEnqueuesIfThresholdCapacityReachedButLevelMoreSpecificThanThreshold() {
final DiscardingAsyncQueueFullPolicy router = new DiscardingAsyncQueueFullPolicy(Level.WARN);
for (final Level level : new Level[] {Level.ERROR, Level.FATAL, Level.OFF}) {
assertEquals(EventRoute.SYNCHRONOUS, router.getRoute(currentThreadId(), level), level.name());
assertEquals(EventRoute.ENQUEUE, router.getRoute(otherThreadId(), level), level.name());
assertEquals(EventRoute.SYNCHRONOUS, router.getRoute(currentThreadId(), level), level.name());
assertEquals(EventRoute.ENQUEUE, router.getRoute(otherThreadId(), level), level.name());
}
}
@Test
void testGetRouteEnqueueIfOtherThreadQueueFullAndLevelMoreSpecificThanThreshold() {
final DiscardingAsyncQueueFullPolicy router = new DiscardingAsyncQueueFullPolicy(Level.WARN);
for (final Level level : new Level[] {Level.ERROR, Level.FATAL, Level.OFF}) {
assertEquals(EventRoute.ENQUEUE, router.getRoute(otherThreadId(), level), level.name());
}
}
@Test
void testGetRouteSynchronousIfCurrentThreadQueueFullAndLevelMoreSpecificThanThreshold() {
final DiscardingAsyncQueueFullPolicy router = new DiscardingAsyncQueueFullPolicy(Level.WARN);
for (final Level level : new Level[] {Level.ERROR, Level.FATAL, Level.OFF}) {
assertEquals(EventRoute.SYNCHRONOUS, router.getRoute(currentThreadId(), level), level.name());
}
}
@Test
void testGetDiscardCount() {
final DiscardingAsyncQueueFullPolicy router = new DiscardingAsyncQueueFullPolicy(Level.INFO);
assertEquals(0, DiscardingAsyncQueueFullPolicy.getDiscardCount(router), "initially");
assertEquals(EventRoute.DISCARD, router.getRoute(-1L, Level.INFO));
assertEquals(1, DiscardingAsyncQueueFullPolicy.getDiscardCount(router), "increase");
assertEquals(EventRoute.DISCARD, router.getRoute(-1L, Level.INFO));
assertEquals(2, DiscardingAsyncQueueFullPolicy.getDiscardCount(router), "increase");
assertEquals(EventRoute.DISCARD, router.getRoute(-1L, Level.INFO));
assertEquals(3, DiscardingAsyncQueueFullPolicy.getDiscardCount(router), "increase");
}
}
| DiscardingAsyncQueueFullPolicyTest |
java | apache__camel | components/camel-cyberark-vault/src/test/java/org/apache/camel/component/cyberark/vault/integration/CyberArkVaultPropertiesSourceIT.java | {
"start": 2627,
"end": 12967
} | class ____ extends CamelTestSupport {
private static final Logger LOG = LoggerFactory.getLogger(CyberArkVaultPropertiesSourceIT.class);
private static HttpClient httpClient;
private static String authToken;
@BeforeAll
public static void init() throws Exception {
httpClient = HttpClient.newBuilder()
.connectTimeout(Duration.ofSeconds(10))
.build();
// Authenticate and get token
authToken = authenticate();
// Create test secrets in Conjur
createSecret("database", "{\"username\":\"dbuser\",\"password\":\"dbpass\",\"host\":\"localhost\"}");
createSecret("api/credentials", "{\"token\":\"secret-token\",\"key\":\"api-key-123\"}");
createSecret("simple-secret", "my-simple-value");
}
private static String authenticate() throws IOException, InterruptedException {
String url = String.format("%s/authn/%s/%s/authenticate",
System.getProperty("camel.cyberark.url"),
System.getProperty("camel.cyberark.account"),
System.getProperty("camel.cyberark.username"));
HttpRequest request = HttpRequest.newBuilder()
.uri(URI.create(url))
.header("Content-Type", "text/plain")
.POST(HttpRequest.BodyPublishers.ofString(System.getProperty("camel.cyberark.apiKey")))
.build();
HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString());
if (response.statusCode() == 200) {
return response.body();
}
throw new IOException("Failed to authenticate: " + response.statusCode());
}
private static void createSecret(String secretId, String secretValue) {
try {
String url = String.format("%s/secrets/%s/variable/%s",
System.getProperty("camel.cyberark.url"),
System.getProperty("camel.cyberark.account"),
secretId);
HttpRequest request = HttpRequest.newBuilder()
.uri(URI.create(url))
.header("Authorization", "Token token=\"" + Base64.getEncoder()
.encodeToString(authToken.getBytes(StandardCharsets.UTF_8)) + "\"")
.header("Content-Type", "application/octet-stream")
.POST(HttpRequest.BodyPublishers.ofString(secretValue))
.build();
HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString());
LOG.info("Created secret '{}': HTTP {}", secretId, response.statusCode());
} catch (Exception e) {
LOG.warn("Could not create secret '{}': {}", secretId, e.getMessage());
// Continue anyway - some tests expect missing secrets
}
}
@Test
public void testSimpleSecretRetrieval() throws Exception {
context.getVaultConfiguration().cyberark().setUrl(System.getProperty("camel.cyberark.url"));
context.getVaultConfiguration().cyberark().setAccount(System.getProperty("camel.cyberark.account"));
context.getVaultConfiguration().cyberark().setUsername(System.getProperty("camel.cyberark.username"));
context.getVaultConfiguration().cyberark().setApiKey(System.getProperty("camel.cyberark.apiKey"));
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from("direct:start")
.setBody(simple("{{cyberark:simple-secret}}"))
.to("mock:result");
}
});
context.start();
getMockEndpoint("mock:result").expectedBodiesReceived("my-simple-value");
template.sendBody("direct:start", "test");
MockEndpoint.assertIsSatisfied(context);
}
@Test
public void testJsonFieldExtraction() throws Exception {
context.getVaultConfiguration().cyberark().setUrl(System.getProperty("camel.cyberark.url"));
context.getVaultConfiguration().cyberark().setAccount(System.getProperty("camel.cyberark.account"));
context.getVaultConfiguration().cyberark().setUsername(System.getProperty("camel.cyberark.username"));
context.getVaultConfiguration().cyberark().setApiKey(System.getProperty("camel.cyberark.apiKey"));
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from("direct:username")
.setBody(simple("{{cyberark:database#username}}"))
.to("mock:result");
from("direct:password")
.setBody(simple("{{cyberark:database#password}}"))
.to("mock:result");
}
});
context.start();
getMockEndpoint("mock:result").expectedBodiesReceived("dbuser", "dbpass");
template.sendBody("direct:username", "test");
template.sendBody("direct:password", "test");
MockEndpoint.assertIsSatisfied(context);
}
@Test
public void testDefaultValue() throws Exception {
context.getVaultConfiguration().cyberark().setUrl(System.getProperty("camel.cyberark.url"));
context.getVaultConfiguration().cyberark().setAccount(System.getProperty("camel.cyberark.account"));
context.getVaultConfiguration().cyberark().setUsername(System.getProperty("camel.cyberark.username"));
context.getVaultConfiguration().cyberark().setApiKey(System.getProperty("camel.cyberark.apiKey"));
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from("direct:start")
.setBody(simple("{{cyberark:nonexistent:defaultValue}}"))
.to("mock:result");
}
});
context.start();
getMockEndpoint("mock:result").expectedBodiesReceived("defaultValue");
template.sendBody("direct:start", "test");
MockEndpoint.assertIsSatisfied(context);
}
@Test
public void testFieldWithDefaultValue() throws Exception {
context.getVaultConfiguration().cyberark().setUrl(System.getProperty("camel.cyberark.url"));
context.getVaultConfiguration().cyberark().setAccount(System.getProperty("camel.cyberark.account"));
context.getVaultConfiguration().cyberark().setUsername(System.getProperty("camel.cyberark.username"));
context.getVaultConfiguration().cyberark().setApiKey(System.getProperty("camel.cyberark.apiKey"));
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from("direct:start")
.setBody(simple("{{cyberark:database#nonexistent:defaultUser}}"))
.to("mock:result");
}
});
context.start();
getMockEndpoint("mock:result").expectedBodiesReceived("defaultUser");
template.sendBody("direct:start", "test");
MockEndpoint.assertIsSatisfied(context);
}
@Test
public void testSecretNotFound() {
assertThrows(FailedToCreateRouteException.class, () -> {
context.getVaultConfiguration().cyberark().setUrl(System.getProperty("camel.cyberark.url"));
context.getVaultConfiguration().cyberark().setAccount(System.getProperty("camel.cyberark.account"));
context.getVaultConfiguration().cyberark().setUsername(System.getProperty("camel.cyberark.username"));
context.getVaultConfiguration().cyberark().setApiKey(System.getProperty("camel.cyberark.apiKey"));
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from("direct:start")
.setBody(simple("{{cyberark:nonexistent}}"))
.to("mock:result");
}
});
context.start();
template.sendBody("direct:start", "test");
});
}
@Test
public void testMultipleSecretsWithFields() throws Exception {
context.getVaultConfiguration().cyberark().setUrl(System.getProperty("camel.cyberark.url"));
context.getVaultConfiguration().cyberark().setAccount(System.getProperty("camel.cyberark.account"));
context.getVaultConfiguration().cyberark().setUsername(System.getProperty("camel.cyberark.username"));
context.getVaultConfiguration().cyberark().setApiKey(System.getProperty("camel.cyberark.apiKey"));
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from("direct:db")
.setBody(simple("{{cyberark:database#host}}"))
.to("mock:result");
from("direct:api")
.setBody(simple("{{cyberark:api/credentials#key}}"))
.to("mock:result");
}
});
context.start();
getMockEndpoint("mock:result").expectedBodiesReceived("localhost", "api-key-123");
template.sendBody("direct:db", "test");
template.sendBody("direct:api", "test");
MockEndpoint.assertIsSatisfied(context);
}
@Test
public void testComplexSecretPath() throws Exception {
context.getVaultConfiguration().cyberark().setUrl(System.getProperty("camel.cyberark.url"));
context.getVaultConfiguration().cyberark().setAccount(System.getProperty("camel.cyberark.account"));
context.getVaultConfiguration().cyberark().setUsername(System.getProperty("camel.cyberark.username"));
context.getVaultConfiguration().cyberark().setApiKey(System.getProperty("camel.cyberark.apiKey"));
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from("direct:start")
.setBody(simple("{{cyberark:api/credentials#token}}"))
.to("mock:result");
}
});
context.start();
getMockEndpoint("mock:result").expectedBodiesReceived("secret-token");
template.sendBody("direct:start", "test");
MockEndpoint.assertIsSatisfied(context);
}
}
| CyberArkVaultPropertiesSourceIT |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/annotation/ConfigurationClassPostProcessorAotContributionTests.java | {
"start": 3937,
"end": 4244
} | class ____ {
private final TestGenerationContext generationContext = new TestGenerationContext();
private final MockBeanFactoryInitializationCode beanFactoryInitializationCode =
new MockBeanFactoryInitializationCode(this.generationContext);
@Nested
| ConfigurationClassPostProcessorAotContributionTests |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/threadsafety/ImmutableCheckerTest.java | {
"start": 20579,
"end": 20872
} | class ____<T> {
final ImmutableList<T> xs = null;
}
""")
.addSourceLines(
"Test.java",
"""
import com.google.errorprone.annotations.Immutable;
import java.util.List;
@Immutable
| X |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/IndexService.java | {
"start": 55341,
"end": 58804
} | class ____ extends BaseAsyncTask {
AsyncRetentionLeaseSyncTask(final IndexService indexService) {
super(
indexService,
indexService.threadPool.executor(ThreadPool.Names.MANAGEMENT),
RETENTION_LEASE_SYNC_INTERVAL_SETTING.get(indexService.getIndexSettings().getSettings())
);
}
@Override
protected void runInternal() {
indexService.syncRetentionLeases();
}
@Override
public String toString() {
return "retention_lease_sync";
}
}
AsyncRefreshTask getRefreshTask() { // for tests
return refreshTask;
}
AsyncTranslogFSync getFsyncTask() { // for tests
return fsyncTask;
}
AsyncTrimTranslogTask getTrimTranslogTask() { // for tests
return trimTranslogTask;
}
/**
* Clears the caches for the given shard id if the shard is still allocated on this node
*/
public boolean clearCaches(boolean queryCache, boolean fieldDataCache, String... fields) {
boolean clearedAtLeastOne = false;
if (queryCache) {
clearedAtLeastOne = true;
indexCache.query().clear("api");
}
if (fieldDataCache) {
clearedAtLeastOne = true;
if (fields.length == 0) {
indexFieldData.clear();
} else {
for (String field : fields) {
indexFieldData.clearField(field);
}
}
}
if (clearedAtLeastOne == false) {
if (fields.length == 0) {
indexCache.clear("api");
indexFieldData.clear();
} else {
// only clear caches relating to the specified fields
for (String field : fields) {
indexFieldData.clearField(field);
}
}
}
return clearedAtLeastOne;
}
public static Map<String, MappedFieldType> parseRuntimeMappings(
Map<String, Object> runtimeMappings,
MapperService mapperService,
IndexSettings indexSettings,
MappingLookup lookup
) {
if (runtimeMappings.isEmpty()) {
return Collections.emptyMap();
}
// TODO add specific tests to SearchExecutionTests similar to the ones in FieldTypeLookupTests
MappingParserContext parserContext = mapperService.parserContext();
Map<String, RuntimeField> runtimeFields = RuntimeField.parseRuntimeFields(new HashMap<>(runtimeMappings), parserContext, false);
Map<String, MappedFieldType> runtimeFieldTypes = RuntimeField.collectFieldTypes(runtimeFields.values());
if (false == indexSettings.getIndexMetadata().getRoutingPaths().isEmpty()) {
for (String r : runtimeMappings.keySet()) {
if (Regex.simpleMatch(indexSettings.getIndexMetadata().getRoutingPaths(), r)) {
throw new IllegalArgumentException("runtime fields may not match [routing_path] but [" + r + "] matched");
}
}
}
runtimeFieldTypes.keySet().forEach(lookup::validateDoesNotShadow);
return runtimeFieldTypes;
}
public IndexFieldData<?> loadFielddata(MappedFieldType fieldType, FieldDataContext fieldDataContext) {
return indexFieldData.getForField(fieldType, fieldDataContext);
}
}
| AsyncRetentionLeaseSyncTask |
java | apache__camel | components/camel-pdf/src/test/java/org/apache/camel/component/pdf/PdfCreationTest.java | {
"start": 1943,
"end": 5582
} | class ____ extends CamelTestSupport {
@EndpointInject("mock:result")
protected MockEndpoint resultEndpoint;
@Test
public void testPdfCreation() throws Exception {
final String expectedText = "expectedText";
template.sendBody("direct:start", expectedText);
resultEndpoint.setExpectedMessageCount(1);
resultEndpoint.expectedMessagesMatches(new Predicate() {
@Override
public boolean matches(Exchange exchange) {
Object body = exchange.getIn().getBody();
assertThat(body, instanceOf(ByteArrayOutputStream.class));
try {
PDDocument doc = Loader.loadPDF(
new RandomAccessReadBuffer(new ByteArrayInputStream(((ByteArrayOutputStream) body).toByteArray())));
PDFTextStripper pdfTextStripper = new PDFTextStripper();
String text = pdfTextStripper.getText(doc);
assertEquals(1, doc.getNumberOfPages());
assertThat(text, containsString(expectedText));
} catch (IOException e) {
throw new RuntimeCamelException(e);
}
return true;
}
});
resultEndpoint.assertIsSatisfied();
}
@Test
public void testPdfCreationWithEncryption() throws Exception {
final String ownerPass = "ownerPass";
final String userPass = "userPass";
final String expectedText = "expectedText";
AccessPermission accessPermission = new AccessPermission();
accessPermission.setCanPrint(false);
StandardProtectionPolicy protectionPolicy = new StandardProtectionPolicy(ownerPass, userPass, accessPermission);
protectionPolicy.setEncryptionKeyLength(128);
template.sendBodyAndHeader("direct:start",
expectedText,
PdfHeaderConstants.PROTECTION_POLICY_HEADER_NAME,
protectionPolicy);
resultEndpoint.setExpectedMessageCount(1);
resultEndpoint.expectedMessagesMatches(new Predicate() {
@Override
public boolean matches(Exchange exchange) {
Object body = exchange.getIn().getBody();
assertThat(body, instanceOf(ByteArrayOutputStream.class));
try {
PDDocument doc
= Loader.loadPDF(new RandomAccessReadBuffer(
new ByteArrayInputStream(((ByteArrayOutputStream) body).toByteArray())), userPass);
assertTrue(doc.isEncrypted(), "Expected encrypted document");
assertFalse(doc.getCurrentAccessPermission().canPrint(), "Printing should not be permitted");
PDFTextStripper pdfTextStripper = new PDFTextStripper();
String text = pdfTextStripper.getText(doc);
assertEquals(1, doc.getNumberOfPages());
assertThat(text, containsString(expectedText));
} catch (Exception e) {
throw new RuntimeCamelException(e);
}
return true;
}
});
resultEndpoint.assertIsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start")
.to("pdf:create?fontSize=6&font=COURIER&pageSize=PAGE_SIZE_A1")
.to("mock:result");
}
};
}
}
| PdfCreationTest |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/time/FromTemporalAccessorTest.java | {
"start": 5862,
"end": 6277
} | class ____ {
void from(LocalDate value) {
Year.from(value);
}
}
""")
.doTest();
}
@Test
public void typeFromType_threeTenExtra_knownGood() {
helper
.addSourceLines(
"TestClass.java",
"""
import java.time.LocalTime;
import org.threeten.extra.AmPm;
public | TestClass |
java | apache__kafka | clients/src/test/java/org/apache/kafka/clients/consumer/ConsumerRecordTest.java | {
"start": 1197,
"end": 4653
} | class ____ {
@Test
public void testShortConstructor() {
String topic = "topic";
int partition = 0;
long offset = 23;
String key = "key";
String value = "value";
ConsumerRecord<String, String> record = new ConsumerRecord<>(topic, partition, offset, key, value);
assertEquals(topic, record.topic());
assertEquals(partition, record.partition());
assertEquals(offset, record.offset());
assertEquals(key, record.key());
assertEquals(value, record.value());
assertEquals(TimestampType.NO_TIMESTAMP_TYPE, record.timestampType());
assertEquals(ConsumerRecord.NO_TIMESTAMP, record.timestamp());
assertEquals(ConsumerRecord.NULL_SIZE, record.serializedKeySize());
assertEquals(ConsumerRecord.NULL_SIZE, record.serializedValueSize());
assertEquals(Optional.empty(), record.leaderEpoch());
assertEquals(Optional.empty(), record.deliveryCount());
assertEquals(new RecordHeaders(), record.headers());
}
@Test
public void testLongConstructor() {
String topic = "topic";
int partition = 0;
long offset = 23;
long timestamp = 23434217432432L;
TimestampType timestampType = TimestampType.CREATE_TIME;
String key = "key";
String value = "value";
int serializedKeySize = 100;
int serializedValueSize = 1142;
RecordHeaders headers = new RecordHeaders();
headers.add(new RecordHeader("header key", "header value".getBytes(StandardCharsets.UTF_8)));
ConsumerRecord<String, String> record = new ConsumerRecord<>(topic, partition, offset, timestamp, timestampType,
serializedKeySize, serializedValueSize, key, value, headers, Optional.empty());
assertEquals(topic, record.topic());
assertEquals(partition, record.partition());
assertEquals(offset, record.offset());
assertEquals(key, record.key());
assertEquals(value, record.value());
assertEquals(timestampType, record.timestampType());
assertEquals(timestamp, record.timestamp());
assertEquals(serializedKeySize, record.serializedKeySize());
assertEquals(serializedValueSize, record.serializedValueSize());
assertEquals(Optional.empty(), record.leaderEpoch());
assertEquals(Optional.empty(), record.deliveryCount());
assertEquals(headers, record.headers());
Optional<Integer> leaderEpoch = Optional.of(10);
Optional<Short> deliveryCount = Optional.of((short) 1);
record = new ConsumerRecord<>(topic, partition, offset, timestamp, timestampType,
serializedKeySize, serializedValueSize, key, value, headers, leaderEpoch, deliveryCount);
assertEquals(topic, record.topic());
assertEquals(partition, record.partition());
assertEquals(offset, record.offset());
assertEquals(key, record.key());
assertEquals(value, record.value());
assertEquals(timestampType, record.timestampType());
assertEquals(timestamp, record.timestamp());
assertEquals(serializedKeySize, record.serializedKeySize());
assertEquals(serializedValueSize, record.serializedValueSize());
assertEquals(leaderEpoch, record.leaderEpoch());
assertEquals(deliveryCount, record.deliveryCount());
assertEquals(headers, record.headers());
}
}
| ConsumerRecordTest |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/state/TaskLocalStateStore.java | {
"start": 1134,
"end": 1565
} | interface ____ as a task-manager-level local storage for local
* checkpointed state. The purpose is to provide access to a state that is stored locally for a
* faster recovery compared to the state that is stored remotely in a stable store DFS. For now,
* this storage is only complementary to the stable storage and local state is typically lost in
* case of machine failures. In such cases (and others), client code of this | serve |
java | elastic__elasticsearch | x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereRequestManager.java | {
"start": 481,
"end": 1006
} | class ____ extends BaseRequestManager {
protected CohereRequestManager(ThreadPool threadPool, CohereModel model) {
super(threadPool, model.getInferenceEntityId(), RateLimitGrouping.of(model), model.rateLimitServiceSettings().rateLimitSettings());
}
record RateLimitGrouping(int apiKeyHash) {
public static RateLimitGrouping of(CohereModel model) {
Objects.requireNonNull(model);
return new RateLimitGrouping(model.apiKey().hashCode());
}
}
}
| CohereRequestManager |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/annotations/Imported.java | {
"start": 346,
"end": 446
} | class ____ available for use in HQL queries by its unqualified name.
* <p>
* By default, non-entity | as |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/web/servlet/setup/StandaloneMockMvcBuilder.java | {
"start": 21209,
"end": 21714
} | class ____ implements StringValueResolver {
private final PropertyPlaceholderHelper helper;
private final PlaceholderResolver resolver;
public StaticStringValueResolver(Map<String, String> values) {
this.helper = new PropertyPlaceholderHelper("${", "}", ":", null, false);
this.resolver = values::get;
}
@Override
public String resolveStringValue(String strVal) throws BeansException {
return this.helper.replacePlaceholders(strVal, this.resolver);
}
}
}
| StaticStringValueResolver |
java | junit-team__junit5 | junit-platform-launcher/src/main/java/org/junit/platform/launcher/LauncherConstants.java | {
"start": 4413,
"end": 9711
} | class ____ is
* exactly {@code MyListener}.
* <li>{@code *System*, *Dev*}: deactivates every listener whose FQCN contains
* {@code System} or {@code Dev}.
* <li>{@code org.example.MyListener, org.example.TheirListener}: deactivates
* listeners whose FQCN is exactly {@code org.example.MyListener} or
* {@code org.example.TheirListener}.
* </ul>
*
* <p>Only listeners registered via the {@code ServiceLoader} mechanism can
* be deactivated. In other words, any listener registered explicitly via the
* {@link LauncherDiscoveryRequest} cannot be deactivated via this
* configuration parameter.
*
* <p>In addition, since execution listeners are registered before the test
* run starts, this configuration parameter can only be supplied as a JVM
* system property or via the JUnit Platform configuration file but cannot
* be supplied in the {@link LauncherDiscoveryRequest}} that is passed to
* the {@link Launcher}.
*
* @see #DEACTIVATE_ALL_LISTENERS_PATTERN
* @see org.junit.platform.launcher.TestExecutionListener
*/
public static final String DEACTIVATE_LISTENERS_PATTERN_PROPERTY_NAME = "junit.platform.execution.listeners.deactivate";
/**
* Wildcard pattern which signals that all listeners registered via the
* {@link java.util.ServiceLoader ServiceLoader} mechanism should be deactivated:
* {@value}
*
* @see #DEACTIVATE_LISTENERS_PATTERN_PROPERTY_NAME
* @see org.junit.platform.launcher.TestExecutionListener
*/
public static final String DEACTIVATE_ALL_LISTENERS_PATTERN = ClassNamePatternFilterUtils.ALL_PATTERN;
/**
* Property name used to enable support for
* {@link LauncherInterceptor} instances to be registered via the
* {@link java.util.ServiceLoader ServiceLoader} mechanism: {@value}
*
* <p>By default, interceptor registration is disabled.
*
* <p>Since interceptors are registered before the test run starts, this
* configuration parameter can only be supplied as a JVM system property or
* via the JUnit Platform configuration file but cannot be supplied in the
* {@link LauncherDiscoveryRequest}} that is passed to the {@link Launcher}.
*
* @see LauncherInterceptor
*/
@API(status = MAINTAINED, since = "1.13.3")
public static final String ENABLE_LAUNCHER_INTERCEPTORS = "junit.platform.launcher.interceptors.enabled";
/**
* Property name used to enable dry-run mode for test execution.
*
* <p>When dry-run mode is enabled, no tests will be executed. Instead, all
* registered {@link TestExecutionListener TestExecutionListeners} will
* receive events for all test descriptors that are part of the discovered
* {@link TestPlan}. All containers will be reported as successful and all
* tests as skipped. This can be useful to test changes in the configuration
* of a build or to verify a listener is called as expected without having
* to wait for all tests to be executed.
*
* <p>Value must be either {@code true} or {@code false}; defaults to {@code false}.
*/
@API(status = MAINTAINED, since = "1.13.3")
public static final String DRY_RUN_PROPERTY_NAME = "junit.platform.execution.dryRun.enabled";
/**
* Property name used to enable or disable stack trace pruning.
*
* <p>By default, stack trace pruning is enabled.
*
* @see org.junit.platform.launcher.core.EngineExecutionOrchestrator
*/
@API(status = MAINTAINED, since = "1.13.3")
public static final String STACKTRACE_PRUNING_ENABLED_PROPERTY_NAME = "junit.platform.stacktrace.pruning.enabled";
/**
* Property name used to configure the output directory for reporting.
*
* <p>If set, value must be a valid path that will be created if it doesn't
* exist. If not set, the default output directory will be determined by the
* reporting engine based on the current working directory.
*
* @since 1.12
* @see #OUTPUT_DIR_UNIQUE_NUMBER_PLACEHOLDER
* @see org.junit.platform.engine.OutputDirectoryCreator
* @see EngineDiscoveryRequest#getOutputDirectoryCreator()
* @see TestPlan#getOutputDirectoryCreator()
*/
@API(status = MAINTAINED, since = "1.13.3")
public static final String OUTPUT_DIR_PROPERTY_NAME = "junit.platform.reporting.output.dir";
/**
* Placeholder for use in {@link #OUTPUT_DIR_PROPERTY_NAME} that will be
* replaced with a unique number.
*
* <p>This can be used to create a unique output directory for each test
* run. For example, if multiple forks are used, each fork can be configured
* to write its output to a separate directory.
*
* @since 1.12
* @see #OUTPUT_DIR_PROPERTY_NAME
*/
@API(status = MAINTAINED, since = "1.13.3")
public static final String OUTPUT_DIR_UNIQUE_NUMBER_PLACEHOLDER = "{uniqueNumber}";
/**
* Property name used to configure the critical severity of issues
* encountered during test discovery.
*
* <p>If an engine reports an issue with a severity equal to or higher than
* the configured critical severity, its tests will not be executed.
* Depending on {@link #DISCOVERY_ISSUE_FAILURE_PHASE_PROPERTY_NAME}, a
* {@link org.junit.platform.launcher.core.DiscoveryIssueException} listing
* all critical issues will be thrown during discovery or be reported as
* engine-level failure during execution.
*
* <h4>Supported Values</h4>
*
* <p>Supported values include names of | name |
java | alibaba__fastjson | src/test/java/com/alibaba/json/test/benchmark/decode/BooleanArray1000Decode.java | {
"start": 153,
"end": 740
} | class ____ extends BenchmarkCase {
private String text;
public BooleanArray1000Decode(){
super("BooleanArray1000Decode");
StringBuilder buf = new StringBuilder();
buf.append('[');
for (int i = 0; i < 1000; ++i) {
if (i != 0) {
buf.append(",");
}
buf.append(i % 2 == 0 ? "true" : "false");
}
buf.append(']');
this.text = buf.toString();
}
@Override
public void execute(Codec codec) throws Exception {
codec.decode(text);
}
}
| BooleanArray1000Decode |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/internal/operators/flowable/FlowableJoinTest.java | {
"start": 1337,
"end": 17622
} | class ____ extends RxJavaTest {
Subscriber<Object> subscriber = TestHelper.mockSubscriber();
BiFunction<Integer, Integer, Integer> add = new BiFunction<Integer, Integer, Integer>() {
@Override
public Integer apply(Integer t1, Integer t2) {
return t1 + t2;
}
};
<T> Function<Integer, Flowable<T>> just(final Flowable<T> flowable) {
return new Function<Integer, Flowable<T>>() {
@Override
public Flowable<T> apply(Integer t1) {
return flowable;
}
};
}
@Before
public void before() {
MockitoAnnotations.openMocks(this);
}
@Test
public void normal1() {
PublishProcessor<Integer> source1 = PublishProcessor.create();
PublishProcessor<Integer> source2 = PublishProcessor.create();
Flowable<Integer> m = source1.join(source2,
just(Flowable.never()),
just(Flowable.never()), add);
m.subscribe(subscriber);
source1.onNext(1);
source1.onNext(2);
source1.onNext(4);
source2.onNext(16);
source2.onNext(32);
source2.onNext(64);
source1.onComplete();
source2.onComplete();
verify(subscriber, times(1)).onNext(17);
verify(subscriber, times(1)).onNext(18);
verify(subscriber, times(1)).onNext(20);
verify(subscriber, times(1)).onNext(33);
verify(subscriber, times(1)).onNext(34);
verify(subscriber, times(1)).onNext(36);
verify(subscriber, times(1)).onNext(65);
verify(subscriber, times(1)).onNext(66);
verify(subscriber, times(1)).onNext(68);
verify(subscriber, times(1)).onComplete();
verify(subscriber, never()).onError(any(Throwable.class));
}
@Test
public void normal1WithDuration() {
PublishProcessor<Integer> source1 = PublishProcessor.create();
PublishProcessor<Integer> source2 = PublishProcessor.create();
PublishProcessor<Integer> duration1 = PublishProcessor.create();
Flowable<Integer> m = source1.join(source2,
just(duration1),
just(Flowable.never()), add);
m.subscribe(subscriber);
source1.onNext(1);
source1.onNext(2);
source2.onNext(16);
duration1.onNext(1);
source1.onNext(4);
source1.onNext(8);
source1.onComplete();
source2.onComplete();
verify(subscriber, times(1)).onNext(17);
verify(subscriber, times(1)).onNext(18);
verify(subscriber, times(1)).onNext(20);
verify(subscriber, times(1)).onNext(24);
verify(subscriber, times(1)).onComplete();
verify(subscriber, never()).onError(any(Throwable.class));
}
@Test
public void normal2() {
PublishProcessor<Integer> source1 = PublishProcessor.create();
PublishProcessor<Integer> source2 = PublishProcessor.create();
Flowable<Integer> m = source1.join(source2,
just(Flowable.never()),
just(Flowable.never()), add);
m.subscribe(subscriber);
source1.onNext(1);
source1.onNext(2);
source1.onComplete();
source2.onNext(16);
source2.onNext(32);
source2.onNext(64);
source2.onComplete();
verify(subscriber, times(1)).onNext(17);
verify(subscriber, times(1)).onNext(18);
verify(subscriber, times(1)).onNext(33);
verify(subscriber, times(1)).onNext(34);
verify(subscriber, times(1)).onNext(65);
verify(subscriber, times(1)).onNext(66);
verify(subscriber, times(1)).onComplete();
verify(subscriber, never()).onError(any(Throwable.class));
}
@Test
public void leftThrows() {
PublishProcessor<Integer> source1 = PublishProcessor.create();
PublishProcessor<Integer> source2 = PublishProcessor.create();
Flowable<Integer> m = source1.join(source2,
just(Flowable.never()),
just(Flowable.never()), add);
m.subscribe(subscriber);
source2.onNext(1);
source1.onError(new RuntimeException("Forced failure"));
verify(subscriber, times(1)).onError(any(Throwable.class));
verify(subscriber, never()).onComplete();
verify(subscriber, never()).onNext(any());
}
@Test
public void rightThrows() {
PublishProcessor<Integer> source1 = PublishProcessor.create();
PublishProcessor<Integer> source2 = PublishProcessor.create();
Flowable<Integer> m = source1.join(source2,
just(Flowable.never()),
just(Flowable.never()), add);
m.subscribe(subscriber);
source1.onNext(1);
source2.onError(new RuntimeException("Forced failure"));
verify(subscriber, times(1)).onError(any(Throwable.class));
verify(subscriber, never()).onComplete();
verify(subscriber, never()).onNext(any());
}
@Test
public void leftDurationThrows() {
PublishProcessor<Integer> source1 = PublishProcessor.create();
PublishProcessor<Integer> source2 = PublishProcessor.create();
Flowable<Integer> duration1 = Flowable.<Integer> error(new RuntimeException("Forced failure"));
Flowable<Integer> m = source1.join(source2,
just(duration1),
just(Flowable.never()), add);
m.subscribe(subscriber);
source1.onNext(1);
verify(subscriber, times(1)).onError(any(Throwable.class));
verify(subscriber, never()).onComplete();
verify(subscriber, never()).onNext(any());
}
@Test
public void rightDurationThrows() {
PublishProcessor<Integer> source1 = PublishProcessor.create();
PublishProcessor<Integer> source2 = PublishProcessor.create();
Flowable<Integer> duration1 = Flowable.<Integer> error(new RuntimeException("Forced failure"));
Flowable<Integer> m = source1.join(source2,
just(Flowable.never()),
just(duration1), add);
m.subscribe(subscriber);
source2.onNext(1);
verify(subscriber, times(1)).onError(any(Throwable.class));
verify(subscriber, never()).onComplete();
verify(subscriber, never()).onNext(any());
}
@Test
public void leftDurationSelectorThrows() {
PublishProcessor<Integer> source1 = PublishProcessor.create();
PublishProcessor<Integer> source2 = PublishProcessor.create();
Function<Integer, Flowable<Integer>> fail = new Function<Integer, Flowable<Integer>>() {
@Override
public Flowable<Integer> apply(Integer t1) {
throw new RuntimeException("Forced failure");
}
};
Flowable<Integer> m = source1.join(source2,
fail,
just(Flowable.never()), add);
m.subscribe(subscriber);
source1.onNext(1);
verify(subscriber, times(1)).onError(any(Throwable.class));
verify(subscriber, never()).onComplete();
verify(subscriber, never()).onNext(any());
}
@Test
public void rightDurationSelectorThrows() {
PublishProcessor<Integer> source1 = PublishProcessor.create();
PublishProcessor<Integer> source2 = PublishProcessor.create();
Function<Integer, Flowable<Integer>> fail = new Function<Integer, Flowable<Integer>>() {
@Override
public Flowable<Integer> apply(Integer t1) {
throw new RuntimeException("Forced failure");
}
};
Flowable<Integer> m = source1.join(source2,
just(Flowable.never()),
fail, add);
m.subscribe(subscriber);
source2.onNext(1);
verify(subscriber, times(1)).onError(any(Throwable.class));
verify(subscriber, never()).onComplete();
verify(subscriber, never()).onNext(any());
}
@Test
public void resultSelectorThrows() {
PublishProcessor<Integer> source1 = PublishProcessor.create();
PublishProcessor<Integer> source2 = PublishProcessor.create();
BiFunction<Integer, Integer, Integer> fail = new BiFunction<Integer, Integer, Integer>() {
@Override
public Integer apply(Integer t1, Integer t2) {
throw new RuntimeException("Forced failure");
}
};
Flowable<Integer> m = source1.join(source2,
just(Flowable.never()),
just(Flowable.never()), fail);
m.subscribe(subscriber);
source1.onNext(1);
source2.onNext(2);
verify(subscriber, times(1)).onError(any(Throwable.class));
verify(subscriber, never()).onComplete();
verify(subscriber, never()).onNext(any());
}
@Test
public void dispose() {
TestHelper.checkDisposed(PublishProcessor.<Integer>create().join(Flowable.just(1),
Functions.justFunction(Flowable.never()),
Functions.justFunction(Flowable.never()), new BiFunction<Integer, Integer, Integer>() {
@Override
public Integer apply(Integer a, Integer b) throws Exception {
return a + b;
}
}));
}
@Test
public void take() {
Flowable.just(1).join(
Flowable.just(2),
Functions.justFunction(Flowable.never()),
Functions.justFunction(Flowable.never()),
new BiFunction<Integer, Integer, Integer>() {
@Override
public Integer apply(Integer a, Integer b) throws Exception {
return a + b;
}
})
.take(1)
.test()
.assertResult(3);
}
@Test
public void rightClose() {
PublishProcessor<Integer> pp = PublishProcessor.create();
TestSubscriber<Integer> ts = pp.join(Flowable.just(2),
Functions.justFunction(Flowable.never()),
Functions.justFunction(Flowable.empty()),
new BiFunction<Integer, Integer, Integer>() {
@Override
public Integer apply(Integer a, Integer b) throws Exception {
return a + b;
}
})
.test()
.assertEmpty();
pp.onNext(1);
ts.assertEmpty();
}
@Test
public void resultSelectorThrows2() {
PublishProcessor<Integer> pp = PublishProcessor.create();
TestSubscriber<Integer> ts = pp.join(
Flowable.just(2),
Functions.justFunction(Flowable.never()),
Functions.justFunction(Flowable.never()),
new BiFunction<Integer, Integer, Integer>() {
@Override
public Integer apply(Integer a, Integer b) throws Exception {
throw new TestException();
}
})
.test();
pp.onNext(1);
pp.onComplete();
ts.assertFailure(TestException.class);
}
@Test
public void badOuterSource() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
new Flowable<Integer>() {
@Override
protected void subscribeActual(Subscriber<? super Integer> subscriber) {
subscriber.onSubscribe(new BooleanSubscription());
subscriber.onError(new TestException("First"));
subscriber.onError(new TestException("Second"));
}
}
.join(Flowable.just(2),
Functions.justFunction(Flowable.never()),
Functions.justFunction(Flowable.never()),
new BiFunction<Integer, Integer, Integer>() {
@Override
public Integer apply(Integer a, Integer b) throws Exception {
return a + b;
}
})
.to(TestHelper.<Integer>testConsumer())
.assertFailureAndMessage(TestException.class, "First");
TestHelper.assertUndeliverable(errors, 0, TestException.class, "Second");
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void badEndSource() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
@SuppressWarnings("rawtypes")
final Subscriber[] o = { null };
TestSubscriberEx<Integer> ts = Flowable.just(1)
.join(Flowable.just(2),
Functions.justFunction(Flowable.never()),
Functions.justFunction(new Flowable<Integer>() {
@Override
protected void subscribeActual(Subscriber<? super Integer> subscriber) {
o[0] = subscriber;
subscriber.onSubscribe(new BooleanSubscription());
subscriber.onError(new TestException("First"));
}
}),
new BiFunction<Integer, Integer, Integer>() {
@Override
public Integer apply(Integer a, Integer b) throws Exception {
return a + b;
}
})
.to(TestHelper.<Integer>testConsumer());
o[0].onError(new TestException("Second"));
ts
.assertFailureAndMessage(TestException.class, "First");
TestHelper.assertUndeliverable(errors, 0, TestException.class, "Second");
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void backpressureOverflowRight() {
PublishProcessor<Integer> pp1 = PublishProcessor.create();
PublishProcessor<Integer> pp2 = PublishProcessor.create();
TestSubscriber<Object> ts = pp1.join(pp2, Functions.justFunction(Flowable.never()), Functions.justFunction(Flowable.never()),
new BiFunction<Integer, Integer, Object>() {
@Override
public Object apply(Integer a, Integer b) throws Exception {
return a + b;
}
})
.test(0L);
pp1.onNext(1);
pp2.onNext(2);
ts.assertFailure(MissingBackpressureException.class);
}
@Test
public void backpressureOverflowLeft() {
PublishProcessor<Integer> pp1 = PublishProcessor.create();
PublishProcessor<Integer> pp2 = PublishProcessor.create();
TestSubscriber<Object> ts = pp1.join(pp2, Functions.justFunction(Flowable.never()), Functions.justFunction(Flowable.never()),
new BiFunction<Integer, Integer, Object>() {
@Override
public Object apply(Integer a, Integer b) throws Exception {
return a + b;
}
})
.test(0L);
pp2.onNext(2);
pp1.onNext(1);
ts.assertFailure(MissingBackpressureException.class);
}
@Test
public void badRequest() {
PublishProcessor<Integer> pp1 = PublishProcessor.create();
PublishProcessor<Integer> pp2 = PublishProcessor.create();
TestHelper.assertBadRequestReported(pp1.join(pp2, Functions.justFunction(Flowable.never()), Functions.justFunction(Flowable.never()), (a, b) -> a + b));
}
@Test
public void bothTerminateWithWorkRemaining() {
PublishProcessor<Integer> pp1 = PublishProcessor.create();
PublishProcessor<Integer> pp2 = PublishProcessor.create();
TestSubscriber<Integer> ts = pp1.join(
pp2,
v -> Flowable.never(),
v -> Flowable.never(),
(a, b) -> a + b)
.doOnNext(v -> {
pp1.onComplete();
pp2.onNext(2);
pp2.onComplete();
})
.test();
pp1.onNext(0);
pp2.onNext(1);
ts.assertComplete();
}
}
| FlowableJoinTest |
java | apache__rocketmq | remoting/src/main/java/org/apache/rocketmq/remoting/netty/NettyEventType.java | {
"start": 854,
"end": 938
} | enum ____ {
CONNECT,
CLOSE,
IDLE,
EXCEPTION,
ACTIVE
}
| NettyEventType |
java | elastic__elasticsearch | modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/FailProcessorException.java | {
"start": 628,
"end": 840
} | class ____ by {@link FailProcessor}.
*
* This exception is caught in the {@link CompoundProcessor} and
* then changes the state of {@link IngestDocument}. This
* exception should get serialized.
*/
public | thrown |
java | spring-projects__spring-boot | module/spring-boot-micrometer-metrics/src/test/java/org/springframework/boot/micrometer/metrics/autoconfigure/export/stackdriver/StackdriverPropertiesTests.java | {
"start": 1053,
"end": 1598
} | class ____ extends StepRegistryPropertiesTests {
@Test
void defaultValuesAreConsistent() {
StackdriverProperties properties = new StackdriverProperties();
StackdriverConfig config = (key) -> null;
assertStepRegistryDefaultValues(properties, config);
assertThat(properties.getResourceType()).isEqualTo(config.resourceType());
assertThat(properties.isUseSemanticMetricTypes()).isEqualTo(config.useSemanticMetricTypes());
assertThat(properties.getMetricTypePrefix()).isEqualTo(config.metricTypePrefix());
}
}
| StackdriverPropertiesTests |
java | elastic__elasticsearch | x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/completion/AlibabaCloudSearchCompletionServiceSettingsTests.java | {
"start": 827,
"end": 3199
} | class ____ extends AbstractWireSerializingTestCase<
AlibabaCloudSearchCompletionServiceSettings> {
public static AlibabaCloudSearchCompletionServiceSettings createRandom() {
var commonSettings = AlibabaCloudSearchServiceSettingsTests.createRandom();
return new AlibabaCloudSearchCompletionServiceSettings(commonSettings);
}
public void testFromMap() {
var model = "model";
var host = "host";
var workspaceName = "default";
var httpSchema = "https";
var serviceSettings = AlibabaCloudSearchCompletionServiceSettings.fromMap(
new HashMap<>(
Map.of(
AlibabaCloudSearchServiceSettings.HOST,
host,
AlibabaCloudSearchServiceSettings.SERVICE_ID,
model,
AlibabaCloudSearchServiceSettings.WORKSPACE_NAME,
workspaceName,
AlibabaCloudSearchServiceSettings.HTTP_SCHEMA_NAME,
httpSchema
)
),
null
);
MatcherAssert.assertThat(
serviceSettings,
is(
new AlibabaCloudSearchCompletionServiceSettings(
new AlibabaCloudSearchServiceSettings(model, host, workspaceName, httpSchema, null)
)
)
);
}
@Override
protected Writeable.Reader<AlibabaCloudSearchCompletionServiceSettings> instanceReader() {
return AlibabaCloudSearchCompletionServiceSettings::new;
}
@Override
protected AlibabaCloudSearchCompletionServiceSettings createTestInstance() {
return createRandom();
}
@Override
protected AlibabaCloudSearchCompletionServiceSettings mutateInstance(AlibabaCloudSearchCompletionServiceSettings instance)
throws IOException {
return createRandom();
}
public static Map<String, Object> getServiceSettingsMap(String serviceId, String host, String workspaceName) {
var map = new HashMap<String, Object>();
map.put(AlibabaCloudSearchServiceSettings.SERVICE_ID, serviceId);
map.put(AlibabaCloudSearchServiceSettings.HOST, host);
map.put(AlibabaCloudSearchServiceSettings.WORKSPACE_NAME, workspaceName);
return map;
}
}
| AlibabaCloudSearchCompletionServiceSettingsTests |
java | micronaut-projects__micronaut-core | inject-java/src/test/groovy/io/micronaut/aop/introduction/StubIntroducer.java | {
"start": 1065,
"end": 1729
} | class ____ implements MethodInterceptor<Object, Object> {
public Map<String, AnnotationMetadata> visitedMethods = new LinkedHashMap<>();
public static final int POSITION = 0;
@Override
public int getOrder() {
return POSITION;
}
@Override
public @Nullable Object intercept(MethodInvocationContext<Object, Object> context) {
visitedMethods.put(context.getMethodName(), context.getAnnotationMetadata());
Iterator<MutableArgumentValue<?>> iterator = context.getParameters().values().iterator();
if(iterator.hasNext())
return iterator.next().getValue();
return null;
}
}
| StubIntroducer |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/time/JavaInstantGetSecondsGetNanoTest.java | {
"start": 10349,
"end": 10733
} | class ____ {
private static final Instant INSTANT = Instant.EPOCH;
public static void foo() {
// BUG: Diagnostic contains: JavaInstantGetSecondsGetNano
Runnable r = () -> INSTANT.getNano();
long seconds = INSTANT.getEpochSecond();
}
}
""")
.doTest();
}
}
| TestCase |
java | alibaba__nacos | console/src/test/java/com/alibaba/nacos/console/handler/impl/remote/ai/McpRemoteHandlerTest.java | {
"start": 1558,
"end": 5058
} | class ____ extends AbstractRemoteHandlerTest {
McpRemoteHandler mcpRemoteHandler;
@BeforeEach
void setUp() {
super.setUpWithAi();
mcpRemoteHandler = new McpRemoteHandler(clientHolder);
}
@AfterEach
void tearDown() {
}
@Test
void listMcpServersForBlur() throws NacosException {
Page<McpServerBasicInfo> mockPage = new Page<>();
when(aiMaintainerService.searchMcpServer("", "", 1, 100)).thenReturn(mockPage);
Page<McpServerBasicInfo> actual = mcpRemoteHandler.listMcpServers("", "", Constants.MCP_LIST_SEARCH_BLUR, 1,
100);
assertEquals(mockPage, actual);
}
@Test
void listMcpServersForAccurate() throws NacosException {
Page<McpServerBasicInfo> mockPage = new Page<>();
when(aiMaintainerService.listMcpServer("", "", 1, 100)).thenReturn(mockPage);
Page<McpServerBasicInfo> actual = mcpRemoteHandler.listMcpServers("", "", Constants.MCP_LIST_SEARCH_ACCURATE, 1,
100);
assertEquals(mockPage, actual);
}
@Test
void getMcpServer() throws NacosException {
McpServerDetailInfo mock = new McpServerDetailInfo();
when(aiMaintainerService.getMcpServerDetail("", "test", "id", "version")).thenReturn(mock);
McpServerDetailInfo actual = mcpRemoteHandler.getMcpServer("", "test", "id", "version");
assertEquals(mock, actual);
}
@Test
void createMcpServer() throws NacosException {
McpServerBasicInfo mcpServerBasicInfo = new McpServerBasicInfo();
mcpServerBasicInfo.setName("test");
mcpRemoteHandler.createMcpServer(AiConstants.Mcp.MCP_DEFAULT_NAMESPACE, mcpServerBasicInfo,
new McpToolSpecification(), new McpEndpointSpec());
verify(aiMaintainerService).createMcpServer(eq(AiConstants.Mcp.MCP_DEFAULT_NAMESPACE), eq("test"),
any(McpServerBasicInfo.class), any(McpToolSpecification.class), any(McpEndpointSpec.class));
}
@Test
void updateMcpServer() throws NacosException {
McpServerBasicInfo mcpServerBasicInfo = new McpServerBasicInfo();
mcpServerBasicInfo.setName("test");
mcpRemoteHandler.updateMcpServer(AiConstants.Mcp.MCP_DEFAULT_NAMESPACE, true, mcpServerBasicInfo,
new McpToolSpecification(), new McpEndpointSpec(), false);
verify(aiMaintainerService).updateMcpServer(eq(AiConstants.Mcp.MCP_DEFAULT_NAMESPACE), eq("test"), eq(true),
any(McpServerBasicInfo.class), any(McpToolSpecification.class), any(McpEndpointSpec.class), eq(false));
}
@Test
void updateMcpServerWithOverrideExisting() throws NacosException {
McpServerBasicInfo mcpServerBasicInfo = new McpServerBasicInfo();
mcpServerBasicInfo.setName("test");
mcpRemoteHandler.updateMcpServer(AiConstants.Mcp.MCP_DEFAULT_NAMESPACE, true, mcpServerBasicInfo,
new McpToolSpecification(), new McpEndpointSpec(), true);
verify(aiMaintainerService).updateMcpServer(eq(AiConstants.Mcp.MCP_DEFAULT_NAMESPACE), eq("test"), eq(true),
any(McpServerBasicInfo.class), any(McpToolSpecification.class), any(McpEndpointSpec.class), eq(true));
}
@Test
void deleteMcpServer() throws NacosException {
mcpRemoteHandler.deleteMcpServer("", "test", "id", "version");
verify(aiMaintainerService).deleteMcpServer("", "test", "id", "version");
}
}
| McpRemoteHandlerTest |
java | elastic__elasticsearch | x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AnomalyJobCRUDIT.java | {
"start": 2691,
"end": 11269
} | class ____ extends MlSingleNodeTestCase {
private JobResultsPersister jobResultsPersister;
@Before
public void createComponents() throws Exception {
ThreadPool tp = mockThreadPool();
ClusterSettings clusterSettings = new ClusterSettings(
Settings.EMPTY,
new HashSet<>(
Arrays.asList(
InferenceProcessor.MAX_INFERENCE_PROCESSORS,
MasterService.MASTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING,
OperationRouting.USE_ADAPTIVE_REPLICA_SELECTION_SETTING,
ResultsPersisterService.PERSIST_RESULTS_MAX_RETRIES,
ClusterService.USER_DEFINED_METADATA,
ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING,
ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_THREAD_DUMP_TIMEOUT_SETTING,
ClusterApplierService.CLUSTER_APPLIER_THREAD_WATCHDOG_INTERVAL,
ClusterApplierService.CLUSTER_APPLIER_THREAD_WATCHDOG_QUIET_TIME
)
)
);
ClusterService clusterService = new ClusterService(Settings.EMPTY, clusterSettings, tp, null);
OriginSettingClient originSettingClient = new OriginSettingClient(client(), ML_ORIGIN);
ResultsPersisterService resultsPersisterService = new ResultsPersisterService(
tp,
originSettingClient,
clusterService,
Settings.EMPTY
);
jobResultsPersister = new JobResultsPersister(originSettingClient, resultsPersisterService);
waitForMlTemplates();
}
public void testUpdateModelMemoryLimitOnceEstablished() {
String jobId = "memory-limit-established";
createJob(jobId);
jobResultsPersister.persistModelSizeStats(
new ModelSizeStats.Builder(jobId).setTimestamp(new Date()).setLogTime(new Date()).setModelBytes(10000000).build(),
() -> false
);
jobResultsPersister.commitWrites(jobId, JobResultsPersister.CommitType.RESULTS);
ElasticsearchStatusException iae = expectThrows(
ElasticsearchStatusException.class,
() -> client().execute(
UpdateJobAction.INSTANCE,
new UpdateJobAction.Request(jobId, new JobUpdate.Builder(jobId).setAnalysisLimits(new AnalysisLimits(5L, 0L)).build())
).actionGet()
);
assertThat(iae.getMessage(), containsString("model_memory_limit cannot be decreased below current usage"));
// Shouldn't throw
client().execute(
UpdateJobAction.INSTANCE,
new UpdateJobAction.Request(jobId, new JobUpdate.Builder(jobId).setAnalysisLimits(new AnalysisLimits(30L, 0L)).build())
).actionGet();
}
public void testCreateWithExistingCategorizerDocs() {
String jobId = "job-id-with-existing-docs";
testCreateWithExistingDocs(
prepareIndex(".ml-state-000001").setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
.setId(jobId + "_categorizer_state#1")
.setSource("{}", XContentType.JSON)
.request(),
jobId
);
}
public void testCreateWithExistingQuantilesDocs() {
String jobId = "job-id-with-existing-docs";
testCreateWithExistingDocs(
prepareIndex(".ml-state-000001").setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
.setId(jobId + "_quantiles")
.setSource("{}", XContentType.JSON)
.request(),
jobId
);
}
public void testCreateWithExistingResultsDocs() {
String jobId = "job-id-with-existing-docs";
testCreateWithExistingDocs(
prepareIndex(".ml-anomalies-shared-000001").setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
.setId(jobId + "_1464739200000_1")
.setSource("{\"job_id\": \"" + jobId + "\"}", XContentType.JSON)
.request(),
jobId
);
}
public void testPutJobWithClosedResultsIndex() {
String jobId = "job-with-closed-results-index";
client().admin().indices().prepareCreate(".ml-anomalies-shared-000001").get();
client().admin().indices().prepareClose(".ml-anomalies-shared-000001").get();
ElasticsearchStatusException ex = expectThrows(ElasticsearchStatusException.class, () -> createJob(jobId));
assertThat(
ex.getMessage(),
containsString("Cannot create job [job-with-closed-results-index] as it requires closed index [.ml-anomalies-*]")
);
client().admin().indices().prepareDelete(".ml-anomalies-shared-000001").get();
}
public void testPutJobWithClosedStateIndex() {
String jobId = "job-with-closed-results-index";
client().admin().indices().prepareCreate(".ml-state-000001").get();
client().admin().indices().prepareClose(".ml-state-000001").setWaitForActiveShards(0).get();
ElasticsearchStatusException ex = expectThrows(ElasticsearchStatusException.class, () -> createJob(jobId));
assertThat(
ex.getMessage(),
containsString("Cannot create job [job-with-closed-results-index] as it requires closed index [.ml-state*]")
);
client().admin().indices().prepareDelete(".ml-state-000001").get();
}
public void testOpenJobWithOldSnapshot() {
String jobId = "open-job-with-old-model-snapshot";
Date timestamp = new Date();
createJob(jobId);
ModelSnapshot snapshot = new ModelSnapshot.Builder(jobId).setMinVersion("6.0.0")
.setSnapshotId("snap_1")
.setQuantiles(new Quantiles(jobId, timestamp, "quantiles-1"))
.setSnapshotDocCount(1)
.setModelSizeStats(new ModelSizeStats.Builder(jobId).setTimestamp(timestamp).setLogTime(timestamp))
.build();
indexModelSnapshot(snapshot);
GetModelSnapshotsAction.Response getResponse = client().execute(
GetModelSnapshotsAction.INSTANCE,
new GetModelSnapshotsAction.Request(jobId, "snap_1")
).actionGet();
assertThat(getResponse.getResources().results(), hasSize(1));
client().execute(RevertModelSnapshotAction.INSTANCE, new RevertModelSnapshotAction.Request(jobId, "snap_1")).actionGet();
// should fail?
ElasticsearchStatusException ex = expectThrows(
ElasticsearchStatusException.class,
() -> client().execute(OpenJobAction.INSTANCE, new OpenJobAction.Request(jobId)).actionGet()
);
assertThat(
ex.getMessage(),
containsString(
"[open-job-with-old-model-snapshot] job model snapshot [snap_1] has min version before [8.3.0], "
+ "please revert to a newer model snapshot or reset the job"
)
);
assertThat(ex.status(), is(RestStatus.BAD_REQUEST));
}
private void indexModelSnapshot(ModelSnapshot snapshot) {
jobResultsPersister.persistModelSnapshot(snapshot, WriteRequest.RefreshPolicy.IMMEDIATE, () -> true);
}
private void testCreateWithExistingDocs(IndexRequest indexRequest, String jobId) {
OriginSettingClient client = new OriginSettingClient(client(), ML_ORIGIN);
client.index(indexRequest).actionGet();
ElasticsearchStatusException ex = expectThrows(ElasticsearchStatusException.class, () -> createJob(jobId));
assertThat(ex.getMessage(), containsString("state documents exist for a prior job with Id [job-id-with-existing-docs]"));
}
private Job.Builder createJob(String jobId) {
Job.Builder builder = new Job.Builder(jobId);
AnalysisConfig.Builder ac = createAnalysisConfig("by_field");
DataDescription.Builder dc = new DataDescription.Builder();
builder.setAnalysisConfig(ac);
builder.setDataDescription(dc);
PutJobAction.Request request = new PutJobAction.Request(builder);
client().execute(PutJobAction.INSTANCE, request).actionGet();
return builder;
}
private AnalysisConfig.Builder createAnalysisConfig(String byFieldName) {
Detector.Builder detector = new Detector.Builder("mean", "field");
detector.setByFieldName(byFieldName);
List<DetectionRule> rules = new ArrayList<>();
detector.setRules(rules);
return new AnalysisConfig.Builder(Collections.singletonList(detector.build()));
}
}
| AnomalyJobCRUDIT |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/context/annotation/ConfigurationClassPostProcessor.java | {
"start": 11661,
"end": 19451
} | class ____ instead of standard component overriding).
* <p>Note that this strategy does <em>not</em> apply to {@link Bean} methods.
* <p>This setter is typically only appropriate when configuring the post-processor as a
* standalone bean definition in XML, for example, not using the dedicated {@code AnnotationConfig*}
* application contexts or the {@code <context:annotation-config>} element. Any bean name
* generator specified against the application context will take precedence over any set here.
* @since 3.1.1
* @see AnnotationConfigApplicationContext#setBeanNameGenerator(BeanNameGenerator)
* @see AnnotationConfigUtils#CONFIGURATION_BEAN_NAME_GENERATOR
*/
public void setBeanNameGenerator(BeanNameGenerator beanNameGenerator) {
Assert.notNull(beanNameGenerator, "BeanNameGenerator must not be null");
this.localBeanNameGeneratorSet = true;
this.componentScanBeanNameGenerator = beanNameGenerator;
this.importBeanNameGenerator = beanNameGenerator;
}
@Override
public void setEnvironment(Environment environment) {
Assert.notNull(environment, "Environment must not be null");
this.environment = environment;
}
@Override
public void setResourceLoader(ResourceLoader resourceLoader) {
Assert.notNull(resourceLoader, "ResourceLoader must not be null");
this.resourceLoader = resourceLoader;
if (!this.setMetadataReaderFactoryCalled) {
this.metadataReaderFactory = new CachingMetadataReaderFactory(resourceLoader);
}
}
@Override
public void setBeanClassLoader(ClassLoader beanClassLoader) {
this.beanClassLoader = beanClassLoader;
if (!this.setMetadataReaderFactoryCalled) {
this.metadataReaderFactory = new CachingMetadataReaderFactory(beanClassLoader);
}
}
@Override
public void setApplicationStartup(ApplicationStartup applicationStartup) {
this.applicationStartup = applicationStartup;
}
/**
* Derive further bean definitions from the configuration classes in the registry.
*/
@Override
public void postProcessBeanDefinitionRegistry(BeanDefinitionRegistry registry) {
int registryId = System.identityHashCode(registry);
if (this.registriesPostProcessed.contains(registryId)) {
throw new IllegalStateException(
"postProcessBeanDefinitionRegistry already called on this post-processor against " + registry);
}
if (this.factoriesPostProcessed.contains(registryId)) {
throw new IllegalStateException(
"postProcessBeanFactory already called on this post-processor against " + registry);
}
this.registriesPostProcessed.add(registryId);
processConfigBeanDefinitions(registry);
}
/**
* Prepare the Configuration classes for servicing bean requests at runtime
* by replacing them with CGLIB-enhanced subclasses.
*/
@Override
public void postProcessBeanFactory(ConfigurableListableBeanFactory beanFactory) {
int factoryId = System.identityHashCode(beanFactory);
if (this.factoriesPostProcessed.contains(factoryId)) {
throw new IllegalStateException(
"postProcessBeanFactory already called on this post-processor against " + beanFactory);
}
this.factoriesPostProcessed.add(factoryId);
if (!this.registriesPostProcessed.contains(factoryId)) {
// BeanDefinitionRegistryPostProcessor hook apparently not supported...
// Simply call processConfigurationClasses lazily at this point then.
processConfigBeanDefinitions((BeanDefinitionRegistry) beanFactory);
}
enhanceConfigurationClasses(beanFactory);
beanFactory.addBeanPostProcessor(new ImportAwareBeanPostProcessor(beanFactory));
}
@Override
public @Nullable BeanRegistrationAotContribution processAheadOfTime(RegisteredBean registeredBean) {
Object configClassAttr = registeredBean.getMergedBeanDefinition()
.getAttribute(ConfigurationClassUtils.CONFIGURATION_CLASS_ATTRIBUTE);
if (ConfigurationClassUtils.CONFIGURATION_CLASS_FULL.equals(configClassAttr)) {
return BeanRegistrationAotContribution.withCustomCodeFragments(codeFragments ->
new ConfigurationClassProxyBeanRegistrationCodeFragments(codeFragments, registeredBean));
}
return null;
}
@Override
public @Nullable BeanFactoryInitializationAotContribution processAheadOfTime(ConfigurableListableBeanFactory beanFactory) {
boolean hasPropertySourceDescriptors = !CollectionUtils.isEmpty(this.propertySourceDescriptors);
boolean hasImportRegistry = beanFactory.containsBean(IMPORT_REGISTRY_BEAN_NAME);
boolean hasBeanRegistrars = !this.beanRegistrars.isEmpty();
if (hasPropertySourceDescriptors || hasImportRegistry || hasBeanRegistrars) {
return (generationContext, code) -> {
if (hasPropertySourceDescriptors) {
new PropertySourcesAotContribution(this.propertySourceDescriptors, this::resolvePropertySourceLocation)
.applyTo(generationContext, code);
}
if (hasImportRegistry) {
new ImportAwareAotContribution(beanFactory).applyTo(generationContext, code);
}
if (hasBeanRegistrars) {
new BeanRegistrarAotContribution(this.beanRegistrars, beanFactory).applyTo(generationContext, code);
}
};
}
return null;
}
private @Nullable Resource resolvePropertySourceLocation(String location) {
try {
String resolvedLocation = (this.environment != null ?
this.environment.resolveRequiredPlaceholders(location) : location);
return this.resourceLoader.getResource(resolvedLocation);
}
catch (Exception ex) {
return null;
}
}
/**
* Build and validate a configuration model based on the registry of
* {@link Configuration} classes.
*/
public void processConfigBeanDefinitions(BeanDefinitionRegistry registry) {
List<BeanDefinitionHolder> configCandidates = new ArrayList<>();
String[] candidateNames = registry.getBeanDefinitionNames();
for (String beanName : candidateNames) {
BeanDefinition beanDef = registry.getBeanDefinition(beanName);
if (beanDef.getAttribute(ConfigurationClassUtils.CONFIGURATION_CLASS_ATTRIBUTE) != null) {
if (logger.isDebugEnabled()) {
logger.debug("Bean definition has already been processed as a configuration class: " + beanDef);
}
}
else if (ConfigurationClassUtils.checkConfigurationClassCandidate(beanDef, this.metadataReaderFactory)) {
configCandidates.add(new BeanDefinitionHolder(beanDef, beanName));
}
}
// Return immediately if no @Configuration classes were found
if (configCandidates.isEmpty()) {
return;
}
// Sort by previously determined @Order value, if applicable
configCandidates.sort((bd1, bd2) -> {
int i1 = ConfigurationClassUtils.getOrder(bd1.getBeanDefinition());
int i2 = ConfigurationClassUtils.getOrder(bd2.getBeanDefinition());
return Integer.compare(i1, i2);
});
// Detect any custom bean name generation strategy supplied through the enclosing application context
SingletonBeanRegistry singletonRegistry = null;
if (registry instanceof SingletonBeanRegistry sbr) {
singletonRegistry = sbr;
BeanNameGenerator configurationGenerator = (BeanNameGenerator) singletonRegistry.getSingleton(
AnnotationConfigUtils.CONFIGURATION_BEAN_NAME_GENERATOR);
if (configurationGenerator != null) {
if (this.localBeanNameGeneratorSet) {
if (configurationGenerator instanceof ConfigurationBeanNameGenerator &
configurationGenerator != this.importBeanNameGenerator) {
throw new IllegalStateException("Context-level ConfigurationBeanNameGenerator [" +
configurationGenerator + "] must not be overridden with processor-level generator [" +
this.importBeanNameGenerator + "]");
}
}
else {
this.componentScanBeanNameGenerator = configurationGenerator;
this.importBeanNameGenerator = configurationGenerator;
}
}
}
if (this.environment == null) {
this.environment = new StandardEnvironment();
}
// Parse each @Configuration | names |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/schemaupdate/QuotedTableNameSchemaUpdateTest.java | {
"start": 1353,
"end": 2734
} | class ____ {
@Test
@JiraKey(value = "HHH-10820")
@DisabledOnOs(value = OS.WINDOWS, disabledReason = "On Windows, MySQL is case insensitive!")
public void testSchemaUpdateWithQuotedTableName(
ServiceRegistryScope registryScope,
@TempDir File tempDir) throws Exception {
var output = new File( tempDir, "update_script.sql" );
var metadata = (MetadataImplementor) new MetadataSources( registryScope.getRegistry() )
.addAnnotatedClass( QuotedTable.class )
.buildMetadata();
metadata.orderColumns( false );
metadata.validate();
new SchemaExport()
.setOutputFile( output.getAbsolutePath() )
.setFormat( false )
.create( EnumSet.of( TargetType.DATABASE ), metadata );
new SchemaUpdate().setHaltOnError( true )
.setOutputFile( output.getAbsolutePath() )
.setFormat( false )
.execute( EnumSet.of( TargetType.DATABASE, TargetType.SCRIPT ), metadata );
final List<String> sqlLines = Files.readAllLines( output.toPath(), Charset.defaultCharset() );
assertThat( "The update should recognize the existing table", sqlLines.isEmpty(), is( true ) );
new SchemaExport().setHaltOnError( true )
.setOutputFile( output.getAbsolutePath() )
.setFormat( false )
.drop( EnumSet.of( TargetType.DATABASE ), metadata );
}
@Entity(name = "QuotedTable")
@Table(name = "\"QuotedTable\"")
public static | QuotedTableNameSchemaUpdateTest |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.