language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | mockito__mockito | mockito-extensions/mockito-errorprone/src/test/java/org/mockito/errorprone/bugpatterns/MockitoAnyIncorrectPrimitiveTypeTest.java | {
"start": 5757,
"end": 6112
} | class ____ {",
" public void test() {",
" Foo foo = mock(Foo.class);",
" when(foo.run(anyInt())).thenReturn(5);",
" when(foo.runWithBoth(any(String.class), anyInt())).thenReturn(5);",
" }",
" static | Test |
java | spring-projects__spring-security | webauthn/src/main/java/org/springframework/security/web/webauthn/api/ImmutableCredentialRecord.java | {
"start": 875,
"end": 3969
} | class ____ implements CredentialRecord {
private final @Nullable PublicKeyCredentialType credentialType;
private final Bytes credentialId;
private final Bytes userEntityUserId;
private final PublicKeyCose publicKey;
private final long signatureCount;
private final boolean uvInitialized;
private final Set<AuthenticatorTransport> transports;
private final boolean backupEligible;
private final boolean backupState;
private final @Nullable Bytes attestationObject;
private final @Nullable Bytes attestationClientDataJSON;
private final Instant created;
private final Instant lastUsed;
private final String label;
private ImmutableCredentialRecord(@Nullable PublicKeyCredentialType credentialType, Bytes credentialId,
Bytes userEntityUserId, PublicKeyCose publicKey, long signatureCount, boolean uvInitialized,
Set<AuthenticatorTransport> transports, boolean backupEligible, boolean backupState,
@Nullable Bytes attestationObject, @Nullable Bytes attestationClientDataJSON, Instant created,
Instant lastUsed, String label) {
this.credentialType = credentialType;
this.credentialId = credentialId;
this.userEntityUserId = userEntityUserId;
this.publicKey = publicKey;
this.signatureCount = signatureCount;
this.uvInitialized = uvInitialized;
this.transports = transports;
this.backupEligible = backupEligible;
this.backupState = backupState;
this.attestationObject = attestationObject;
this.attestationClientDataJSON = attestationClientDataJSON;
this.created = created;
this.lastUsed = lastUsed;
this.label = label;
}
@Override
public @Nullable PublicKeyCredentialType getCredentialType() {
return this.credentialType;
}
@Override
public Bytes getCredentialId() {
return this.credentialId;
}
@Override
public Bytes getUserEntityUserId() {
return this.userEntityUserId;
}
@Override
public PublicKeyCose getPublicKey() {
return this.publicKey;
}
@Override
public long getSignatureCount() {
return this.signatureCount;
}
@Override
public boolean isUvInitialized() {
return this.uvInitialized;
}
@Override
public Set<AuthenticatorTransport> getTransports() {
return this.transports;
}
@Override
public boolean isBackupEligible() {
return this.backupEligible;
}
@Override
public boolean isBackupState() {
return this.backupState;
}
@Override
public @Nullable Bytes getAttestationObject() {
return this.attestationObject;
}
@Override
public @Nullable Bytes getAttestationClientDataJSON() {
return this.attestationClientDataJSON;
}
@Override
public Instant getCreated() {
return this.created;
}
@Override
public Instant getLastUsed() {
return this.lastUsed;
}
@Override
public String getLabel() {
return this.label;
}
public static ImmutableCredentialRecordBuilder builder() {
return new ImmutableCredentialRecordBuilder();
}
public static ImmutableCredentialRecordBuilder fromCredentialRecord(CredentialRecord credentialRecord) {
return new ImmutableCredentialRecordBuilder(credentialRecord);
}
public static final | ImmutableCredentialRecord |
java | spring-projects__spring-framework | spring-aop/src/main/java/org/springframework/aop/scope/ScopedProxyFactoryBean.java | {
"start": 1702,
"end": 2231
} | class ____ the {@link ScopedObject} interface.
* This presently allows for removing the corresponding object from the scope,
* seamlessly creating a new instance in the scope on next access.
*
* <p>Please note that the proxies created by this factory are
* <i>class-based</i> proxies by default. This can be customized
* through switching the "proxyTargetClass" property to "false".
*
* @author Rod Johnson
* @author Juergen Hoeller
* @since 2.0
* @see #setProxyTargetClass
*/
@SuppressWarnings("serial")
public | implement |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/operators/chaining/SynchronousChainedCombineDriver.java | {
"start": 2616,
"end": 10026
} | class ____<IN, OUT> extends ChainedDriver<IN, OUT> {
private static final Logger LOG =
LoggerFactory.getLogger(SynchronousChainedCombineDriver.class);
/**
* Fix length records with a length below this threshold will be in-place sorted, if possible.
*/
private static final int THRESHOLD_FOR_IN_PLACE_SORTING = 32;
// --------------------------------------------------------------------------------------------
private InMemorySorter<IN> sorter;
private GroupCombineFunction<IN, OUT> combiner;
private TypeSerializer<IN> serializer;
private TypeComparator<IN> groupingComparator;
private AbstractInvokable parent;
private final QuickSort sortAlgo = new QuickSort();
private List<MemorySegment> memory;
private volatile boolean running = true;
// --------------------------------------------------------------------------------------------
@Override
public void setup(AbstractInvokable parent) {
this.parent = parent;
@SuppressWarnings("unchecked")
final GroupCombineFunction<IN, OUT> combiner =
BatchTask.instantiateUserCode(
this.config, userCodeClassLoader, GroupCombineFunction.class);
this.combiner = combiner;
FunctionUtils.setFunctionRuntimeContext(combiner, getUdfRuntimeContext());
}
@Override
public void openTask() throws Exception {
// open the stub first
final Configuration stubConfig = this.config.getStubParameters();
BatchTask.openUserCode(this.combiner, stubConfig);
// ----------------- Set up the sorter -------------------------
// instantiate the serializer / comparator
final TypeSerializerFactory<IN> serializerFactory =
this.config.getInputSerializer(0, this.userCodeClassLoader);
final TypeComparatorFactory<IN> sortingComparatorFactory =
this.config.getDriverComparator(0, this.userCodeClassLoader);
final TypeComparatorFactory<IN> groupingComparatorFactory =
this.config.getDriverComparator(1, this.userCodeClassLoader);
this.serializer = serializerFactory.getSerializer();
TypeComparator<IN> sortingComparator = sortingComparatorFactory.createComparator();
this.groupingComparator = groupingComparatorFactory.createComparator();
MemoryManager memManager = this.parent.getEnvironment().getMemoryManager();
final int numMemoryPages =
memManager.computeNumberOfPages(this.config.getRelativeMemoryDriver());
this.memory = memManager.allocatePages(this.parent, numMemoryPages);
// instantiate a fix-length in-place sorter, if possible, otherwise the out-of-place sorter
if (sortingComparator.supportsSerializationWithKeyNormalization()
&& this.serializer.getLength() > 0
&& this.serializer.getLength() <= THRESHOLD_FOR_IN_PLACE_SORTING) {
this.sorter =
new FixedLengthRecordSorter<IN>(
this.serializer, sortingComparator.duplicate(), this.memory);
} else {
this.sorter =
new NormalizedKeySorter<IN>(
this.serializer, sortingComparator.duplicate(), this.memory);
}
if (LOG.isDebugEnabled()) {
LOG.debug(
"SynchronousChainedCombineDriver object reuse: "
+ (this.objectReuseEnabled ? "ENABLED" : "DISABLED")
+ ".");
}
}
@Override
public void closeTask() throws Exception {
if (this.running) {
BatchTask.closeUserCode(this.combiner);
}
}
@Override
public void cancelTask() {
this.running = false;
dispose(true);
}
// --------------------------------------------------------------------------------------------
public Function getStub() {
return this.combiner;
}
public String getTaskName() {
return this.taskName;
}
@Override
public void collect(IN record) {
this.numRecordsIn.inc();
// try writing to the sorter first
try {
if (this.sorter.write(record)) {
return;
}
} catch (IOException e) {
throw new ExceptionInChainedStubException(this.taskName, e);
}
// do the actual sorting
try {
sortAndCombine();
} catch (Exception e) {
throw new ExceptionInChainedStubException(this.taskName, e);
}
this.sorter.reset();
try {
if (!this.sorter.write(record)) {
throw new IOException(
"Cannot write record to fresh sort buffer. Record too large.");
}
} catch (IOException e) {
throw new ExceptionInChainedStubException(this.taskName, e);
}
}
// --------------------------------------------------------------------------------------------
@Override
public void close() {
try {
sortAndCombine();
} catch (Exception e) {
throw new ExceptionInChainedStubException(this.taskName, e);
}
this.outputCollector.close();
dispose(false);
}
private void dispose(boolean ignoreException) {
try {
sorter.dispose();
} catch (Exception e) {
// May happen during concurrent modification when canceling. Ignore.
if (!ignoreException) {
throw e;
}
} finally {
parent.getEnvironment().getMemoryManager().release(this.memory);
}
}
private void sortAndCombine() throws Exception {
final InMemorySorter<IN> sorter = this.sorter;
if (objectReuseEnabled) {
if (!sorter.isEmpty()) {
this.sortAlgo.sort(sorter);
// run the combiner
final ReusingKeyGroupedIterator<IN> keyIter =
new ReusingKeyGroupedIterator<IN>(
sorter.getIterator(), this.serializer, this.groupingComparator);
// cache references on the stack
final GroupCombineFunction<IN, OUT> stub = this.combiner;
final Collector<OUT> output = this.outputCollector;
// run stub implementation
while (this.running && keyIter.nextKey()) {
stub.combine(keyIter.getValues(), output);
}
}
} else {
if (!sorter.isEmpty()) {
this.sortAlgo.sort(sorter);
// run the combiner
final NonReusingKeyGroupedIterator<IN> keyIter =
new NonReusingKeyGroupedIterator<IN>(
sorter.getIterator(), this.groupingComparator);
// cache references on the stack
final GroupCombineFunction<IN, OUT> stub = this.combiner;
final Collector<OUT> output = this.outputCollector;
// run stub implementation
while (this.running && keyIter.nextKey()) {
stub.combine(keyIter.getValues(), output);
}
}
}
}
}
| SynchronousChainedCombineDriver |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/model/DynamicFeatures.java | {
"start": 391,
"end": 1219
} | class ____ {
private final List<ResourceDynamicFeature> resourceDynamicFeatures = new ArrayList<>();
public void addFeature(ResourceDynamicFeature resourceFeature) {
resourceDynamicFeatures.add(resourceFeature);
}
public List<ResourceDynamicFeature> getResourceDynamicFeatures() {
return resourceDynamicFeatures;
}
public void initializeDefaultFactories(Function<String, BeanFactory<?>> factoryCreator) {
for (int i = 0; i < resourceDynamicFeatures.size(); i++) {
ResourceDynamicFeature resourceFeature = resourceDynamicFeatures.get(i);
if (resourceFeature.getFactory() == null) {
resourceFeature.setFactory((BeanFactory<DynamicFeature>) factoryCreator.apply(resourceFeature.getClassName()));
}
}
}
}
| DynamicFeatures |
java | processing__processing4 | app/src/processing/app/ui/Editor.java | {
"start": 41316,
"end": 41654
} | class ____ extends UpdatableAction {
public FindNextAction() {
super(Language.text("menu.edit.find_next"));
}
@Override
public void actionPerformed(ActionEvent e) {
if (find != null) find.findNext();
}
public boolean canDo() {
return find != null && find.canFindNext();
}
}
| FindNextAction |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/nullness/ReturnMissingNullableTest.java | {
"start": 67435,
"end": 67988
} | class ____ {
java.lang.@Nullable @UnderInitialization Object method(boolean b) {
if (b) {
return null;
} else {
return null;
}
}
}
""")
.doTest();
}
@Test
public void annotationNotNamedNullable() {
createRefactoringTestHelper()
.setArgs("-XepOpt:Nullness:DefaultNullnessAnnotation=javax.annotation.CheckForNull")
.addInputLines(
"in/Test.java",
"""
| T |
java | apache__camel | components/camel-netty/src/test/java/org/apache/camel/component/netty/MyCustomCodec.java | {
"start": 1854,
"end": 2477
} | class ____ extends MessageToMessageDecoder<ByteBuf> {
@Override
protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List<Object> out) {
// it may be empty, then return null
if (msg.isReadable()) {
// ByteBuf may not expose array method for accessing the under layer bytes
byte[] bytes = new byte[msg.readableBytes()];
int readerIndex = msg.readerIndex();
msg.getBytes(readerIndex, bytes);
out.add(bytes);
}
}
}
@ChannelHandler.Sharable
public static | BytesDecoder |
java | google__dagger | javatests/dagger/internal/codegen/MissingBindingValidationTest.java | {
"start": 34996,
"end": 35379
} | interface ____ {",
" Object object();",
"}");
Source parentModule =
CompilerTests.javaSource(
"ParentModule",
"import dagger.Module;",
"import dagger.Provides;",
"import dagger.multibindings.IntoSet;",
"import java.util.Set;",
"",
"@Module",
" | Grandchild |
java | netty__netty | microbench/src/main/java/io/netty/microbench/http2/NoopHttp2LocalFlowController.java | {
"start": 1061,
"end": 2442
} | class ____ implements Http2LocalFlowController {
public static final NoopHttp2LocalFlowController INSTANCE = new NoopHttp2LocalFlowController();
private NoopHttp2LocalFlowController() { }
@Override
public void initialWindowSize(int newWindowSize) throws Http2Exception {
}
@Override
public int initialWindowSize() {
return MAX_INITIAL_WINDOW_SIZE;
}
@Override
public int windowSize(Http2Stream stream) {
return MAX_INITIAL_WINDOW_SIZE;
}
@Override
public int initialWindowSize(Http2Stream stream) {
return MAX_INITIAL_WINDOW_SIZE;
}
@Override
public void incrementWindowSize(Http2Stream stream, int delta) throws Http2Exception {
}
@Override
public void receiveFlowControlledFrame(Http2Stream stream, ByteBuf data, int padding, boolean endOfStream)
throws Http2Exception {
}
@Override
public boolean consumeBytes(Http2Stream stream, int numBytes) throws Http2Exception {
return false;
}
@Override
public int unconsumedBytes(Http2Stream stream) {
return 0;
}
@Override
public void channelHandlerContext(ChannelHandlerContext ctx) throws Http2Exception {
}
@Override
public Http2LocalFlowController frameWriter(Http2FrameWriter frameWriter) {
return this;
}
}
| NoopHttp2LocalFlowController |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/generated/GeneratedWritableIdTest.java | {
"start": 1725,
"end": 2058
} | class ____ {
@Id
@Generated(writable = true)
@ColumnDefault("1")
private Long id;
private String name;
public TestEntity() {
}
public TestEntity(Long id, String name) {
this.id = id;
this.name = name;
}
public Long getId() {
return id;
}
public String getName() {
return name;
}
}
}
| TestEntity |
java | apache__flink | flink-rpc/flink-rpc-akka/src/test/java/org/apache/flink/runtime/rpc/pekko/PekkoRpcActorTest.java | {
"start": 30204,
"end": 30612
} | class ____ extends RpcEndpoint implements RpcGateway {
protected FailingOnStopEndpoint(RpcService rpcService, String endpointId) {
super(rpcService, endpointId);
}
@Override
public CompletableFuture<Void> onStop() {
return FutureUtils.completedExceptionally(new OnStopException("Test exception."));
}
private static | FailingOnStopEndpoint |
java | quarkusio__quarkus | integration-tests/main/src/main/java/io/quarkus/it/jaxb/mapper/codegen/feed/Feed.java | {
"start": 25435,
"end": 28304
} | class ____ {
@XmlValue
protected String value;
@XmlAttribute(name = "rel")
protected String rel;
@XmlAttribute(name = "type")
protected String type;
@XmlAttribute(name = "href")
@XmlSchemaType(name = "anyURI")
protected String href;
/**
* Gets the value of the value property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getValue() {
return value;
}
/**
* Sets the value of the value property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setValue(String value) {
this.value = value;
}
/**
* Gets the value of the rel property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getRel() {
return rel;
}
/**
* Sets the value of the rel property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setRel(String value) {
this.rel = value;
}
/**
* Gets the value of the type property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getType() {
return type;
}
/**
* Sets the value of the type property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setType(String value) {
this.type = value;
}
/**
* Gets the value of the href property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getHref() {
return href;
}
/**
* Sets the value of the href property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setHref(String value) {
this.href = value;
}
}
/**
* <p>
* Java | Link |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/CharacterGetNumericValueTest.java | {
"start": 968,
"end": 3176
} | class ____ {
private final CompilationTestHelper helper =
CompilationTestHelper.newInstance(CharacterGetNumericValue.class, getClass());
@Test
public void characterGetNumericValue_unexpectedBehavior() {
// Verify that the unexpected behavior still holds
// Expect 'A' - 'Z' to map to 10 - 35
for (int c = (int) 'A'; c < (int) 'Z'; c++) {
assertThat(Character.getNumericValue((char) c)).isEqualTo(c - (int) 'A' + 10);
assertThat(Character.getNumericValue(c)).isEqualTo(c - (int) 'A' + 10);
}
assertThat(Character.getNumericValue('Ⅴ' /* U+2164, Roman numeral 5 */)).isEqualTo(5);
assertThat(Character.getNumericValue('V')).isEqualTo(31);
}
@Test
public void uCharacterGetNumericValue_unexpectedBehavior() {
// Verify that the unexpected behavior still holds
// Expect 'A' - 'Z' to map to 10 - 35
for (int c = (int) 'A'; c < (int) 'Z'; c++) {
assertThat(UCharacter.getNumericValue((char) c)).isEqualTo(c - (int) 'A' + 10);
assertThat(UCharacter.getNumericValue(c)).isEqualTo(c - (int) 'A' + 10);
}
assertThat(UCharacter.getNumericValue('Ⅴ' /* U+2164, Roman numeral 5 */)).isEqualTo(5);
assertThat(UCharacter.getNumericValue('V')).isEqualTo(31);
}
@Test
public void characterDigit_expectedBehavior() {
assertThat(Character.digit('Z', 36)).isEqualTo(35);
assertThat(Character.digit('௧' /* U+0BE7, Tamil digit 1 */, 36)).isEqualTo(1);
assertThat(Character.digit('௲' /* U+0BF2, Tamil number 1000 */, 36)).isEqualTo(-1);
assertThat(Character.digit('Ⅴ' /* U+2164, Roman numeral 5 */, 36)).isEqualTo(-1);
}
@Test
public void uCharacterDigit_expectedBehavior() {
assertThat(UCharacter.digit('Z', 36)).isEqualTo(35);
assertThat(UCharacter.digit('௧' /* U+0BE7, Tamil digit 1 */, 36)).isEqualTo(1);
assertThat(UCharacter.digit('௲' /* U+0BF2, Tamil number 1000 */, 36)).isEqualTo(-1);
assertThat(UCharacter.digit('Ⅴ' /* U+2164, Roman numeral 5 */, 36)).isEqualTo(-1);
}
@Test
public void character_getNumericValue_char() {
helper
.addSourceLines(
"Test.java",
"""
import java.lang.Character;
| CharacterGetNumericValueTest |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/inlineme/InlinerTest.java | {
"start": 32441,
"end": 32864
} | class ____ {
public void doTest() {
Client client = new Client();
int x = 42;
}
}
""")
.doTest();
}
@Test
public void orderOfOperations() {
refactoringTestHelper
.addInputLines(
"Client.java",
"""
import com.google.errorprone.annotations.InlineMe;
public final | Caller |
java | google__guava | guava/src/com/google/common/collect/AbstractMapBasedMultimap.java | {
"start": 41292,
"end": 42436
} | class ____ extends Maps.EntrySet<K, Collection<V>> {
@Override
Map<K, Collection<V>> map() {
return AsMap.this;
}
@Override
public Iterator<Entry<K, Collection<V>>> iterator() {
return new AsMapIterator();
}
@Override
public Spliterator<Entry<K, Collection<V>>> spliterator() {
return CollectSpliterators.map(
submap.entrySet().spliterator(),
Spliterator.DISTINCT | Spliterator.NONNULL,
AsMap.this::wrapEntry);
}
// The following methods are included for performance.
@Override
public boolean contains(@Nullable Object o) {
return Collections2.safeContains(submap.entrySet(), o);
}
@Override
public boolean remove(@Nullable Object o) {
if (!contains(o)) {
return false;
}
// requireNonNull is safe because of the contains check.
Entry<?, ?> entry = requireNonNull((Entry<?, ?>) o);
removeValuesForKey(entry.getKey());
return true;
}
}
/** Iterator across all keys and value collections. */
final | AsMapEntries |
java | apache__flink | flink-table/flink-sql-parser/src/main/java/org/apache/flink/sql/parser/dml/RichSqlInsert.java | {
"start": 1447,
"end": 6068
} | class ____ extends SqlInsert {
private final SqlNodeList staticPartitions;
private final SqlNodeList extendedKeywords;
private final SqlNode targetTableID;
private final SqlNodeList tableHints;
public RichSqlInsert(
SqlParserPos pos,
SqlNodeList keywords,
SqlNodeList extendedKeywords,
SqlNode targetTable,
SqlNode source,
SqlNodeList columnList,
SqlNodeList staticPartitions) {
super(pos, keywords, targetTable, source, columnList);
this.extendedKeywords = extendedKeywords;
this.staticPartitions = staticPartitions;
if (targetTable instanceof SqlTableRef) {
SqlTableRef tableRef = (SqlTableRef) targetTable;
this.targetTableID = tableRef.operand(0);
this.tableHints = tableRef.operand(1);
} else {
this.targetTableID = targetTable;
this.tableHints = SqlNodeList.EMPTY;
}
}
/**
* @return the list of partition key-value pairs, returns empty if there is no partition
* specifications.
*/
public SqlNodeList getStaticPartitions() {
return staticPartitions;
}
/**
* Get static partition key value pair as strings.
*
* <p>For character literals we return the unquoted and unescaped values. For other types we use
* {@link SqlLiteral#toString()} to get the string format of the value literal. If the string
* format is not what you need, use {@link #getStaticPartitions()}.
*
* @return the mapping of column names to values of partition specifications, returns an empty
* map if there is no partition specifications.
*/
public LinkedHashMap<String, String> getStaticPartitionKVs() {
LinkedHashMap<String, String> ret = new LinkedHashMap<>();
if (this.staticPartitions.isEmpty()) {
return ret;
}
for (SqlNode node : this.staticPartitions.getList()) {
SqlProperty sqlProperty = (SqlProperty) node;
String value = SqlParseUtils.extractString(sqlProperty.getValue());
ret.put(sqlProperty.getKey().getSimple(), value);
}
return ret;
}
/** Returns the target table identifier. */
public SqlNode getTargetTableID() {
return targetTableID;
}
/** Returns the table hints as list of {@code SqlNode} for current insert node. */
public SqlNodeList getTableHints() {
return this.tableHints;
}
@Override
public void unparse(SqlWriter writer, int leftPrec, int rightPrec) {
writer.startList(SqlWriter.FrameTypeEnum.SELECT);
String insertKeyword = "INSERT INTO";
if (isUpsert()) {
insertKeyword = "UPSERT INTO";
} else if (isOverwrite()) {
insertKeyword = "INSERT OVERWRITE";
}
writer.sep(insertKeyword);
final int opLeft = getOperator().getLeftPrec();
final int opRight = getOperator().getRightPrec();
getTargetTable().unparse(writer, opLeft, opRight);
if (staticPartitions != null && staticPartitions.size() > 0) {
writer.keyword("PARTITION");
staticPartitions.unparse(writer, opLeft, opRight);
writer.newlineAndIndent();
}
if (getTargetColumnList() != null) {
getTargetColumnList().unparse(writer, opLeft, opRight);
}
writer.newlineAndIndent();
getSource().unparse(writer, 0, 0);
}
// ~ Tools ------------------------------------------------------------------
public static boolean isUpsert(List<SqlLiteral> keywords) {
for (SqlNode keyword : keywords) {
SqlInsertKeyword keyword2 = ((SqlLiteral) keyword).symbolValue(SqlInsertKeyword.class);
if (keyword2 == SqlInsertKeyword.UPSERT) {
return true;
}
}
return false;
}
/**
* Returns whether the insert mode is overwrite (for whole table or for specific partitions).
*
* @return true if this is overwrite mode
*/
public boolean isOverwrite() {
return getModifierNode(RichSqlInsertKeyword.OVERWRITE) != null;
}
private SqlNode getModifierNode(RichSqlInsertKeyword modifier) {
for (SqlNode keyword : extendedKeywords) {
RichSqlInsertKeyword keyword2 =
((SqlLiteral) keyword).symbolValue(RichSqlInsertKeyword.class);
if (keyword2 == modifier) {
return keyword;
}
}
return null;
}
}
| RichSqlInsert |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/entity/Tree.java | {
"start": 307,
"end": 589
} | class ____ {
private Integer id;
private String name;
@Id
@GeneratedValue
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
| Tree |
java | resilience4j__resilience4j | resilience4j-rxjava2/src/main/java/io/github/resilience4j/AbstractMaybeObserver.java | {
"start": 134,
"end": 1245
} | class ____<T> extends AbstractDisposable implements
MaybeObserver<T> {
private final MaybeObserver<? super T> downstreamObserver;
public AbstractMaybeObserver(MaybeObserver<? super T> downstreamObserver) {
this.downstreamObserver = requireNonNull(downstreamObserver);
}
@Override
protected void hookOnSubscribe() {
downstreamObserver.onSubscribe(this);
}
@Override
public void onError(Throwable e) {
whenNotCompleted(() -> {
hookOnError(e);
downstreamObserver.onError(e);
});
}
@Override
public void onComplete() {
whenNotCompleted(() -> {
hookOnComplete();
downstreamObserver.onComplete();
});
}
protected abstract void hookOnComplete();
protected abstract void hookOnError(Throwable e);
@Override
public void onSuccess(T value) {
whenNotCompleted(() -> {
hookOnSuccess(value);
downstreamObserver.onSuccess(value);
});
}
protected abstract void hookOnSuccess(T value);
}
| AbstractMaybeObserver |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/oracle/OracleFormatTest2.java | {
"start": 780,
"end": 1431
} | class ____ extends TestCase {
public void test_formatOracle() throws Exception {
String text = "UPDATE MEMBER SET GMT_MODIFIED = SYSDATE, STATUS = ?, email = CASE WHEN status = ? THEN rtrim(email, ? || id || ?) ELSE email END WHERE ID IN (?) AND STATUS <> ?";
String formatedText = SQLUtils.format(text, JdbcUtils.ORACLE);
System.out.println(formatedText);
String formatedText1 = SQLUtils.format(text, JdbcUtils.OCEANBASE_ORACLE);
System.out.println(formatedText1);
String formatedText2 = SQLUtils.format(text, JdbcUtils.ALI_ORACLE);
System.out.println(formatedText2);
}
}
| OracleFormatTest2 |
java | elastic__elasticsearch | test/framework/src/test/java/org/elasticsearch/test/test/LoggingListenerTests.java | {
"start": 13437,
"end": 13556
} | class ____")
@TestIssueLogging(value = "foo.bar:ERROR", issueUrl = "https://example.com")
public static | annotations |
java | spring-projects__spring-security | web/src/test/java/org/springframework/security/web/header/writers/ContentSecurityPolicyHeaderWriterTests.java | {
"start": 1086,
"end": 5247
} | class ____ {
private static final String DEFAULT_POLICY_DIRECTIVES = "default-src 'self'";
private MockHttpServletRequest request;
private MockHttpServletResponse response;
private ContentSecurityPolicyHeaderWriter writer;
private static final String CONTENT_SECURITY_POLICY_HEADER = "Content-Security-Policy";
private static final String CONTENT_SECURITY_POLICY_REPORT_ONLY_HEADER = "Content-Security-Policy-Report-Only";
@BeforeEach
public void setup() {
this.request = new MockHttpServletRequest();
this.request.setSecure(true);
this.response = new MockHttpServletResponse();
this.writer = new ContentSecurityPolicyHeaderWriter(DEFAULT_POLICY_DIRECTIVES);
}
@Test
public void writeHeadersWhenNoPolicyDirectivesThenUsesDefault() {
ContentSecurityPolicyHeaderWriter noPolicyWriter = new ContentSecurityPolicyHeaderWriter();
noPolicyWriter.writeHeaders(this.request, this.response);
assertThat(this.response.getHeaderNames()).hasSize(1);
assertThat(this.response.getHeader("Content-Security-Policy")).isEqualTo(DEFAULT_POLICY_DIRECTIVES);
}
@Test
public void writeHeadersContentSecurityPolicyDefault() {
this.writer.writeHeaders(this.request, this.response);
assertThat(this.response.getHeaderNames()).hasSize(1);
assertThat(this.response.getHeader("Content-Security-Policy")).isEqualTo(DEFAULT_POLICY_DIRECTIVES);
}
@Test
public void writeHeadersContentSecurityPolicyCustom() {
String policyDirectives = "default-src 'self'; " + "object-src plugins1.example.com plugins2.example.com; "
+ "script-src trustedscripts.example.com";
this.writer = new ContentSecurityPolicyHeaderWriter(policyDirectives);
this.writer.writeHeaders(this.request, this.response);
assertThat(this.response.getHeaderNames()).hasSize(1);
assertThat(this.response.getHeader("Content-Security-Policy")).isEqualTo(policyDirectives);
}
@Test
public void writeHeadersWhenNoPolicyDirectivesReportOnlyThenUsesDefault() {
ContentSecurityPolicyHeaderWriter noPolicyWriter = new ContentSecurityPolicyHeaderWriter();
this.writer.setReportOnly(true);
noPolicyWriter.writeHeaders(this.request, this.response);
assertThat(this.response.getHeaderNames()).hasSize(1);
assertThat(this.response.getHeader("Content-Security-Policy")).isEqualTo(DEFAULT_POLICY_DIRECTIVES);
}
@Test
public void writeHeadersContentSecurityPolicyReportOnlyDefault() {
this.writer.setReportOnly(true);
this.writer.writeHeaders(this.request, this.response);
assertThat(this.response.getHeaderNames()).hasSize(1);
assertThat(this.response.getHeader("Content-Security-Policy-Report-Only")).isEqualTo(DEFAULT_POLICY_DIRECTIVES);
}
@Test
public void writeHeadersContentSecurityPolicyReportOnlyCustom() {
String policyDirectives = "default-src https:; report-uri https://example.com/";
this.writer = new ContentSecurityPolicyHeaderWriter(policyDirectives);
this.writer.setReportOnly(true);
this.writer.writeHeaders(this.request, this.response);
assertThat(this.response.getHeaderNames()).hasSize(1);
assertThat(this.response.getHeader("Content-Security-Policy-Report-Only")).isEqualTo(policyDirectives);
}
@Test
public void writeHeadersContentSecurityPolicyInvalid() {
assertThatIllegalArgumentException().isThrownBy(() -> new ContentSecurityPolicyHeaderWriter(""));
assertThatIllegalArgumentException().isThrownBy(() -> new ContentSecurityPolicyHeaderWriter(null));
}
@Test
public void writeContentSecurityPolicyHeaderWhenNotPresent() {
String value = new String("value");
this.response.setHeader(CONTENT_SECURITY_POLICY_HEADER, value);
this.writer.writeHeaders(this.request, this.response);
assertThat(this.response.getHeader(CONTENT_SECURITY_POLICY_HEADER)).isSameAs(value);
}
@Test
public void writeContentSecurityPolicyReportOnlyHeaderWhenNotPresent() {
String value = new String("value");
this.response.setHeader(CONTENT_SECURITY_POLICY_REPORT_ONLY_HEADER, value);
this.writer.setReportOnly(true);
this.writer.writeHeaders(this.request, this.response);
assertThat(this.response.getHeader(CONTENT_SECURITY_POLICY_REPORT_ONLY_HEADER)).isSameAs(value);
}
}
| ContentSecurityPolicyHeaderWriterTests |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/collection/list/Parent.java | {
"start": 261,
"end": 1045
} | class ____ {
private Integer id;
private String name;
private List<Child> children = new ArrayList<>();
public Parent() {
}
public Parent(Integer id, String name) {
this.id = id;
this.name = name;
}
@Id
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public List<Child> getChildren() {
return children;
}
public void setChildren(List<Child> children) {
this.children = children;
for ( Iterator<Child> i = children.iterator(); i.hasNext(); ) {
if ( i.next() == null ) {
i.remove();
}
}
}
public void addChild(Child child) {
this.children.add( child );
child.setParent( this );
}
}
| Parent |
java | spring-projects__spring-security | test/src/test/java/org/springframework/security/test/web/servlet/showcase/secured/WithUserAuthenticationTests.java | {
"start": 3750,
"end": 4333
} | class ____ {
@Bean
SecurityFilterChain filterChain(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeHttpRequests((requests) -> requests
.requestMatchers("/admin/**").hasRole("ADMIN")
.anyRequest().authenticated())
.formLogin(withDefaults());
return http.build();
// @formatter:on
}
@Autowired
void configureGlobal(AuthenticationManagerBuilder auth) throws Exception {
// @formatter:off
auth
.inMemoryAuthentication()
.withUser("user").password("password").roles("USER");
// @formatter:on
}
}
}
| Config |
java | spring-projects__spring-framework | spring-expression/src/test/java/org/springframework/expression/spel/IndexingTests.java | {
"start": 33897,
"end": 34663
} | class ____ implements PropertyAccessor {
@Override
public boolean canRead(EvaluationContext context, Object target, String name) {
return (((Map<?, ?>) target).containsKey(name));
}
@Override
public TypedValue read(EvaluationContext context, Object target, String name) {
return new TypedValue(((Map<?, ?>) target).get(name));
}
@Override
public boolean canWrite(EvaluationContext context, Object target, String name) {
return true;
}
@Override
@SuppressWarnings("unchecked")
public void write(EvaluationContext context, Object target, String name, Object newValue) {
((Map) target).put(name, newValue);
}
@Override
public Class<?>[] getSpecificTargetClasses() {
return new Class<?>[] {Map.class};
}
}
}
| MapAccessor |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/appender/db/jdbc/DriverManagerConnectionSource.java | {
"start": 1637,
"end": 1887
} | class ____ extends AbstractDriverManagerConnectionSource {
/**
* Builds DriverManagerConnectionSource instances.
*
* @param <B>
* This builder type or a subclass.
*/
public static | DriverManagerConnectionSource |
java | mapstruct__mapstruct | core/src/main/java/org/mapstruct/AnnotateWiths.java | {
"start": 609,
"end": 809
} | interface ____ {
/**
* The configuration of the additional annotations.
*
* @return The configuration of the additional annotations.
*/
AnnotateWith[] value();
}
| AnnotateWiths |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/collections/ReadOnlyCollectionsTest.java | {
"start": 6723,
"end": 6860
} | class ____ {
@Id
private Long id;
public TargetEntity() {
}
public TargetEntity(Long id) {
this.id = id;
}
}
}
| TargetEntity |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/resolver/MountTableResolver.java | {
"start": 4116,
"end": 8471
} | class ____
implements FileSubclusterResolver, StateStoreCache {
private static final Logger LOG =
LoggerFactory.getLogger(MountTableResolver.class);
/** Reference to Router. */
private final Router router;
/** Reference to the State Store. */
private final StateStoreService stateStore;
/** Interface to the mount table store. */
private MountTableStore mountTableStore;
/** If the tree has been initialized. */
private boolean init = false;
/** If the mount table is manually disabled*/
private boolean disabled = false;
/** Path -> Remote HDFS location. */
private final TreeMap<String, MountTable> tree = new TreeMap<>();
/** Path -> Remote location. */
private final Cache<String, PathLocation> locationCache;
private final LongAdder locCacheMiss = new LongAdder();
private final LongAdder locCacheAccess = new LongAdder();
/** Default nameservice when no mount matches the math. */
private String defaultNameService = "";
/** If use default nameservice to read and write files. */
private boolean defaultNSEnable = true;
/** Synchronization for both the tree and the cache. */
private final ReadWriteLock readWriteLock = new ReentrantReadWriteLock();
private final Lock readLock = readWriteLock.readLock();
private final Lock writeLock = readWriteLock.writeLock();
/** Trash Current matching pattern. */
private static final String TRASH_PATTERN = "/(Current|[0-9]+)";
@VisibleForTesting
public MountTableResolver(Configuration conf) {
this(conf, (StateStoreService)null);
}
public MountTableResolver(Configuration conf, Router routerService) {
this(conf, routerService, null);
}
public MountTableResolver(Configuration conf, StateStoreService store) {
this(conf, null, store);
}
public MountTableResolver(Configuration conf, Router routerService,
StateStoreService store) {
this.router = routerService;
if (store != null) {
this.stateStore = store;
} else if (this.router != null) {
this.stateStore = this.router.getStateStore();
} else {
this.stateStore = null;
}
boolean mountTableCacheEnable = conf.getBoolean(
FEDERATION_MOUNT_TABLE_CACHE_ENABLE,
FEDERATION_MOUNT_TABLE_CACHE_ENABLE_DEFAULT);
if (mountTableCacheEnable) {
int maxCacheSize = conf.getInt(
FEDERATION_MOUNT_TABLE_MAX_CACHE_SIZE,
FEDERATION_MOUNT_TABLE_MAX_CACHE_SIZE_DEFAULT);
this.locationCache = CacheBuilder.newBuilder()
.maximumSize(maxCacheSize)
.build();
} else {
this.locationCache = null;
}
registerCacheExternal();
initDefaultNameService(conf);
}
/**
* Request cache updates from the State Store for this resolver.
*/
private void registerCacheExternal() {
if (this.stateStore != null) {
this.stateStore.registerCacheExternal(this);
}
}
/**
* Nameservice for APIs that cannot be resolved to a specific one.
*
* @param conf Configuration for this resolver.
*/
private void initDefaultNameService(Configuration conf) {
this.defaultNSEnable = conf.getBoolean(
DFS_ROUTER_DEFAULT_NAMESERVICE_ENABLE,
DFS_ROUTER_DEFAULT_NAMESERVICE_ENABLE_DEFAULT);
if (!this.defaultNSEnable) {
LOG.warn("Default name service is disabled.");
return;
}
this.defaultNameService = conf.get(DFS_ROUTER_DEFAULT_NAMESERVICE, "");
if (this.defaultNameService.equals("")) {
this.defaultNSEnable = false;
LOG.warn("Default name service is not set.");
} else {
LOG.info("Default name service: {}, enabled to read or write",
this.defaultNameService);
}
}
/**
* Get a reference for the Router for this resolver.
*
* @return Router for this resolver.
*/
protected Router getRouter() {
return this.router;
}
/**
* Get the mount table store for this resolver.
*
* @return Mount table store.
* @throws IOException If it cannot connect to the State Store.
*/
protected MountTableStore getMountTableStore() throws IOException {
if (this.mountTableStore == null) {
this.mountTableStore = this.stateStore.getRegisteredRecordStore(
MountTableStore.class);
if (this.mountTableStore == null) {
throw new IOException("State Store does not have an | MountTableResolver |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/ClassUtils.java | {
"start": 66262,
"end": 66429
} | class ____.
* @return the converted name.
* @throws NullPointerException if the className is null.
* @throws IllegalArgumentException Thrown if the | name |
java | elastic__elasticsearch | x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiUnifiedStreamingProcessor.java | {
"start": 11835,
"end": 12970
} | class ____ {
private static final ConstructingObjectParser<
StreamingUnifiedChatCompletionResults.ChatCompletionChunk.Choice.Delta.ToolCall.Function,
Void> PARSER = new ConstructingObjectParser<>(
FUNCTION_FIELD,
true,
args -> new StreamingUnifiedChatCompletionResults.ChatCompletionChunk.Choice.Delta.ToolCall.Function(
(String) args[0],
(String) args[1]
)
);
static {
PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), new ParseField(ARGUMENTS_FIELD));
PARSER.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), new ParseField(NAME_FIELD));
}
public static StreamingUnifiedChatCompletionResults.ChatCompletionChunk.Choice.Delta.ToolCall.Function parse(
XContentParser parser
) throws IOException {
return PARSER.parse(parser, null);
}
}
private static | FunctionParser |
java | apache__flink | flink-filesystems/flink-hadoop-fs/src/main/java/org/apache/flink/runtime/util/HadoopConfigLoader.java | {
"start": 1114,
"end": 5428
} | class ____ {
private static final Logger LOG = LoggerFactory.getLogger(HadoopConfigLoader.class);
/** The prefixes that Flink adds to the Hadoop fs config. */
private final String[] flinkConfigPrefixes;
/**
* Keys that are replaced (after prefix replacement, to give a more uniform experience across
* different file system implementations.
*/
private final String[][] mirroredConfigKeys;
/** Hadoop config prefix to replace Flink prefix. */
private final String hadoopConfigPrefix;
private final Set<String> packagePrefixesToShade;
private final Set<String> configKeysToShade;
private final String flinkShadingPrefix;
/** Flink's configuration object. */
private Configuration flinkConfig;
/** Hadoop's configuration for the file systems, lazily initialized. */
private org.apache.hadoop.conf.Configuration hadoopConfig;
public HadoopConfigLoader(
@Nonnull String[] flinkConfigPrefixes,
@Nonnull String[][] mirroredConfigKeys,
@Nonnull String hadoopConfigPrefix,
Set<String> packagePrefixesToShade,
@Nonnull Set<String> configKeysToShade,
@Nonnull String flinkShadingPrefix) {
this.flinkConfigPrefixes = flinkConfigPrefixes;
this.mirroredConfigKeys = mirroredConfigKeys;
this.hadoopConfigPrefix = hadoopConfigPrefix;
this.packagePrefixesToShade = packagePrefixesToShade;
this.configKeysToShade = configKeysToShade;
this.flinkShadingPrefix = flinkShadingPrefix;
}
public void setFlinkConfig(Configuration config) {
flinkConfig = config;
hadoopConfig = null;
}
/** get the loaded Hadoop config (or fall back to one loaded from the classpath). */
public org.apache.hadoop.conf.Configuration getOrLoadHadoopConfig() {
org.apache.hadoop.conf.Configuration hadoopConfig = this.hadoopConfig;
if (hadoopConfig == null) {
if (flinkConfig != null) {
hadoopConfig = mirrorCertainHadoopConfig(loadHadoopConfigFromFlink());
} else {
LOG.warn(
"Flink configuration is not set prior to loading this configuration."
+ " Cannot forward configuration keys from Flink configuration.");
hadoopConfig = new org.apache.hadoop.conf.Configuration();
}
}
this.hadoopConfig = hadoopConfig;
return hadoopConfig;
}
// add additional config entries from the Flink config to the Hadoop config
private org.apache.hadoop.conf.Configuration loadHadoopConfigFromFlink() {
org.apache.hadoop.conf.Configuration hadoopConfig =
new org.apache.hadoop.conf.Configuration();
for (String key : flinkConfig.keySet()) {
for (String prefix : flinkConfigPrefixes) {
if (key.startsWith(prefix)) {
String newKey = hadoopConfigPrefix + key.substring(prefix.length());
String newValue = fixHadoopConfig(key, flinkConfig.getString(key, null));
hadoopConfig.set(newKey, newValue);
LOG.debug(
"Adding Flink config entry for {} as {} to Hadoop config", key, newKey);
}
}
}
return hadoopConfig;
}
// mirror certain keys to make use more uniform across implementations
// with different keys
private org.apache.hadoop.conf.Configuration mirrorCertainHadoopConfig(
org.apache.hadoop.conf.Configuration hadoopConfig) {
for (String[] mirrored : mirroredConfigKeys) {
String value = hadoopConfig.get(mirrored[0], null);
if (value != null) {
hadoopConfig.set(mirrored[1], value);
}
}
return hadoopConfig;
}
private String fixHadoopConfig(String key, String value) {
return key != null && configKeysToShade.contains(key) ? shadeClassConfig(value) : value;
}
private String shadeClassConfig(String classConfig) {
return packagePrefixesToShade.stream().anyMatch(classConfig::startsWith)
? flinkShadingPrefix + classConfig
: classConfig;
}
}
| HadoopConfigLoader |
java | apache__maven | impl/maven-core/src/test/java/org/apache/maven/plugin/PluginManagerTest.java | {
"start": 1720,
"end": 8517
} | class ____ extends AbstractCoreMavenComponentTestCase {
@Inject
private DefaultBuildPluginManager pluginManager;
protected String getProjectsDirectory() {
return "src/test/projects/plugin-manager";
}
@Test
void testPluginLoading() throws Exception {
MavenSession session = createMavenSession(null);
Plugin plugin = new Plugin();
plugin.setGroupId("org.apache.maven.its.plugins");
plugin.setArtifactId("maven-it-plugin");
plugin.setVersion("0.1");
PluginDescriptor pluginDescriptor = pluginManager.loadPlugin(
plugin, session.getCurrentProject().getRemotePluginRepositories(), session.getRepositorySession());
assertNotNull(pluginDescriptor);
}
@Test
void testMojoDescriptorRetrieval() throws Exception {
MavenSession session = createMavenSession(null);
String goal = "it";
Plugin plugin = new Plugin();
plugin.setGroupId("org.apache.maven.its.plugins");
plugin.setArtifactId("maven-it-plugin");
plugin.setVersion("0.1");
MojoDescriptor mojoDescriptor = pluginManager.getMojoDescriptor(
plugin,
goal,
session.getCurrentProject().getRemotePluginRepositories(),
session.getRepositorySession());
assertNotNull(mojoDescriptor);
assertEquals(goal, mojoDescriptor.getGoal());
// igorf: plugin realm comes later
// assertNotNull( mojoDescriptor.getRealm() );
PluginDescriptor pluginDescriptor = mojoDescriptor.getPluginDescriptor();
assertNotNull(pluginDescriptor);
assertEquals("org.apache.maven.its.plugins", pluginDescriptor.getGroupId());
assertEquals("maven-it-plugin", pluginDescriptor.getArtifactId());
assertEquals("0.1", pluginDescriptor.getVersion());
}
// test a build where projects use different versions of the same plugin
@Test
void testThatPluginDependencyThatHasSystemScopeIsResolved() throws Exception {
MavenSession session = createMavenSession(getProject("project-contributing-system-scope-plugin-dep"));
MavenProject project = session.getCurrentProject();
Plugin plugin = project.getPlugin("org.apache.maven.its.plugins:maven-it-plugin");
RepositoryRequest repositoryRequest = new DefaultRepositoryRequest();
repositoryRequest.setLocalRepository(getLocalRepository());
repositoryRequest.setRemoteRepositories(getPluginArtifactRepositories());
PluginDescriptor pluginDescriptor = pluginManager.loadPlugin(
plugin, session.getCurrentProject().getRemotePluginRepositories(), session.getRepositorySession());
pluginManager.getPluginRealm(session, pluginDescriptor);
List<Artifact> artifacts = pluginDescriptor.getArtifacts();
for (Artifact a : artifacts) {
if (a.getGroupId().equals("org.apache.maven.its.mng3586")
&& a.getArtifactId().equals("tools")) {
// The system scoped dependencies will be present in the classloader for the plugin
return;
}
}
fail("Can't find the system scoped dependency in the plugin artifacts.");
}
// -----------------------------------------------------------------------------------------------
// Testing help
// -----------------------------------------------------------------------------------------------
protected void assertPluginDescriptor(
MojoDescriptor mojoDescriptor, String groupId, String artifactId, String version) {
assertNotNull(mojoDescriptor);
PluginDescriptor pd = mojoDescriptor.getPluginDescriptor();
assertNotNull(pd);
assertEquals(groupId, pd.getGroupId());
assertEquals(artifactId, pd.getArtifactId());
assertEquals(version, pd.getVersion());
}
@Test
void testPluginRealmCache() throws Exception {
RepositoryRequest repositoryRequest = new DefaultRepositoryRequest();
repositoryRequest.setLocalRepository(getLocalRepository());
repositoryRequest.setRemoteRepositories(getPluginArtifactRepositories());
// prime realm cache
MavenSession session = createMavenSession(getProject("project-contributing-system-scope-plugin-dep"));
MavenProject project = session.getCurrentProject();
Plugin plugin = project.getPlugin("org.apache.maven.its.plugins:maven-it-plugin");
PluginDescriptor pluginDescriptor = pluginManager.loadPlugin(
plugin, session.getCurrentProject().getRemotePluginRepositories(), session.getRepositorySession());
pluginManager.getPluginRealm(session, pluginDescriptor);
assertEquals(1, pluginDescriptor.getDependencies().size());
for (ComponentDescriptor<?> descriptor : pluginDescriptor.getComponents()) {
assertNotNull(descriptor.getRealm());
assertNotNull(descriptor.getImplementationClass());
}
// reload plugin realm from cache
session = createMavenSession(getProject("project-contributing-system-scope-plugin-dep"));
project = session.getCurrentProject();
plugin = project.getPlugin("org.apache.maven.its.plugins:maven-it-plugin");
pluginDescriptor = pluginManager.loadPlugin(
plugin, session.getCurrentProject().getRemotePluginRepositories(), session.getRepositorySession());
pluginManager.getPluginRealm(session, pluginDescriptor);
assertEquals(1, pluginDescriptor.getDependencies().size());
for (ComponentDescriptor<?> descriptor : pluginDescriptor.getComponents()) {
assertNotNull(descriptor.getRealm());
assertNotNull(descriptor.getImplementationClass());
}
}
@Test
void testBuildExtensionsPluginLoading() throws Exception {
RepositoryRequest repositoryRequest = new DefaultRepositoryRequest();
repositoryRequest.setLocalRepository(getLocalRepository());
repositoryRequest.setRemoteRepositories(getPluginArtifactRepositories());
// prime realm cache
MavenSession session = createMavenSession(getProject("project-with-build-extensions-plugin"));
MavenProject project = session.getCurrentProject();
Plugin plugin = project.getPlugin("org.apache.maven.its.plugins:maven-it-plugin");
PluginDescriptor pluginDescriptor = pluginManager.loadPlugin(
plugin, session.getCurrentProject().getRemotePluginRepositories(), session.getRepositorySession());
ClassRealm pluginRealm = pluginManager.getPluginRealm(session, pluginDescriptor);
assertEquals(pluginRealm, pluginDescriptor.getComponents().get(0).getRealm());
}
}
| PluginManagerTest |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/common/Randomness.java | {
"start": 1743,
"end": 5241
} | class ____ {
private static final Method currentMethod;
private static final Method getRandomMethod;
static {
Method maybeCurrentMethod;
Method maybeGetRandomMethod;
try {
Class<?> clazz = Class.forName("com.carrotsearch.randomizedtesting.RandomizedContext");
maybeCurrentMethod = clazz.getMethod("current");
maybeGetRandomMethod = clazz.getMethod("getRandom");
} catch (Exception e) {
maybeCurrentMethod = null;
maybeGetRandomMethod = null;
}
currentMethod = maybeCurrentMethod;
getRandomMethod = maybeGetRandomMethod;
}
private Randomness() {}
/**
* Provides a reproducible source of randomness seeded by a long
* seed in the settings with the key setting.
*
* @param settings the settings containing the seed
* @param setting the setting to access the seed
* @return a reproducible source of randomness
*/
public static Random get(Settings settings, Setting<Long> setting) {
if (setting.exists(settings)) {
return new Random(setting.get(settings));
} else {
return get();
}
}
/**
* Provides a source of randomness that is reproducible when
* running under the Elasticsearch test suite, and otherwise
* produces a non-reproducible source of randomness. Reproducible
* sources of randomness are created when the system property
* "tests.seed" is set and the security policy allows reading this
* system property. Otherwise, non-reproducible sources of
* randomness are created.
*
* @return a source of randomness
* @throws IllegalStateException if running tests but was not able
* to acquire an instance of Random from
* RandomizedContext or tests are
* running but tests.seed is not set
*/
public static Random get() {
if (currentMethod != null && getRandomMethod != null) {
try {
Object randomizedContext = currentMethod.invoke(null);
return (Random) getRandomMethod.invoke(randomizedContext);
} catch (ReflectiveOperationException e) {
// unexpected, bail
throw new IllegalStateException("running tests but failed to invoke RandomizedContext#getRandom", e);
}
} else {
return getWithoutSeed();
}
}
/**
* Provides a secure source of randomness.
*
* This acts exactly similar to {@link #get()}, but returning a new {@link SecureRandom}.
*/
public static SecureRandom createSecure() {
if (currentMethod != null && getRandomMethod != null) {
// tests, so just use a seed from the non secure random
byte[] seed = new byte[16];
get().nextBytes(seed);
return new SecureRandom(seed);
} else {
return new SecureRandom();
}
}
@SuppressForbidden(reason = "ThreadLocalRandom is okay when not running tests")
private static Random getWithoutSeed() {
assert currentMethod == null && getRandomMethod == null : "running under tests but tried to create non-reproducible random";
return ThreadLocalRandom.current();
}
public static void shuffle(List<?> list) {
Collections.shuffle(list, get());
}
}
| Randomness |
java | apache__thrift | lib/java/src/main/java/org/apache/thrift/transport/sasl/DataFrameWriter.java | {
"start": 1231,
"end": 2472
} | class ____ extends FrameWriter {
@Override
public void withOnlyPayload(byte[] payload, int offset, int length) {
if (!isComplete()) {
throw new IllegalStateException(
"Previous write is not yet complete, with " + frameBytes.remaining() + " bytes left.");
}
frameBytes = buildFrameWithPayload(payload, offset, length);
}
@Override
protected ByteBuffer buildFrame(
byte[] header,
int headerOffset,
int headerLength,
byte[] payload,
int payloadOffset,
int payloadLength) {
if (header != null && headerLength > 0) {
throw new IllegalArgumentException(
"Extra header ["
+ StringUtils.bytesToHexString(header)
+ "] offset "
+ payloadOffset
+ " length "
+ payloadLength);
}
return buildFrameWithPayload(payload, payloadOffset, payloadLength);
}
private ByteBuffer buildFrameWithPayload(byte[] payload, int offset, int length) {
byte[] bytes = new byte[PAYLOAD_LENGTH_BYTES + length];
EncodingUtils.encodeBigEndian(length, bytes, 0);
System.arraycopy(payload, offset, bytes, PAYLOAD_LENGTH_BYTES, length);
return ByteBuffer.wrap(bytes);
}
}
| DataFrameWriter |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/threadsafety/ThreadSafeChecker.java | {
"start": 3086,
"end": 4623
} | class ____ extends BugChecker
implements ClassTreeMatcher,
NewClassTreeMatcher,
TypeParameterTreeMatcher,
MethodInvocationTreeMatcher,
MemberReferenceTreeMatcher {
private final WellKnownThreadSafety wellKnownThreadSafety;
private final ThreadSafeAnalysis.Factory threadSafeAnalysisFactory;
@Inject
ThreadSafeChecker(
WellKnownThreadSafety wellKnownThreadSafety,
ThreadSafeAnalysis.Factory threadSafeAnalysisFactory) {
this.wellKnownThreadSafety = wellKnownThreadSafety;
this.threadSafeAnalysisFactory = threadSafeAnalysisFactory;
}
// check instantiations of `@ThreadSafe`s in method references
@Override
public Description matchMemberReference(MemberReferenceTree tree, VisitorState state) {
checkInvocation(tree, ((JCMemberReference) tree).referentType, state, getSymbol(tree));
return NO_MATCH;
}
// check instantiations of `@ThreadSafe`s in method invocations
@Override
public Description matchMethodInvocation(MethodInvocationTree tree, VisitorState state) {
checkInvocation(tree, getType(tree.getMethodSelect()), state, getSymbol(tree));
return NO_MATCH;
}
@Override
public Description matchNewClass(NewClassTree tree, VisitorState state) {
// check instantiations of `@ThreadSafeTypeParameter`s in generic constructor invocations
checkInvocation(
tree, ((JCNewClass) tree).constructorType, state, ((JCNewClass) tree).constructor);
// check instantiations of `@ThreadSafeTypeParameter`s in | ThreadSafeChecker |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/sql/exec/internal/JdbcSelectExecutorStandardImpl.java | {
"start": 2553,
"end": 14418
} | class ____ implements JdbcSelectExecutor {
/**
* Singleton access
*/
public static final JdbcSelectExecutorStandardImpl INSTANCE = new JdbcSelectExecutorStandardImpl();
@Override
public <T, R> T executeQuery(
JdbcSelect jdbcSelect,
JdbcParameterBindings jdbcParameterBindings,
ExecutionContext executionContext,
RowTransformer<R> rowTransformer,
Class<R> domainResultType,
StatementCreator statementCreator,
ResultsConsumer<T, R> resultsConsumer) {
return executeQuery(
jdbcSelect,
jdbcParameterBindings,
executionContext,
rowTransformer,
domainResultType,
-1,
statementCreator,
resultsConsumer
);
}
@Override
public <T, R> T executeQuery(
JdbcSelect jdbcSelect,
JdbcParameterBindings jdbcParameterBindings,
ExecutionContext executionContext,
RowTransformer<R> rowTransformer,
Class<R> domainResultType,
int resultCountEstimate,
StatementCreator statementCreator,
ResultsConsumer<T, R> resultsConsumer) {
final var persistenceContext = executionContext.getSession().getPersistenceContext();
final boolean defaultReadOnlyOrig = persistenceContext.isDefaultReadOnly();
final Boolean readOnly = executionContext.getQueryOptions().isReadOnly();
if ( readOnly != null ) {
// The read-only/modifiable mode for the query was explicitly set.
// Temporarily set the default read-only/modifiable setting to the query's setting.
persistenceContext.setDefaultReadOnly( readOnly );
}
try {
return doExecuteQuery(
jdbcSelect,
jdbcParameterBindings,
executionContext,
rowTransformer,
domainResultType,
resultCountEstimate,
statementCreator,
resultsConsumer
);
}
finally {
if ( readOnly != null ) {
persistenceContext.setDefaultReadOnly( defaultReadOnlyOrig );
}
}
}
private <T, R> T doExecuteQuery(
JdbcSelect jdbcSelect,
JdbcParameterBindings jdbcParameterBindings,
ExecutionContext executionContext,
RowTransformer<R> rowTransformer,
Class<R> domainResultType,
int resultCountEstimate,
StatementCreator statementCreator,
ResultsConsumer<T, R> resultsConsumer) {
final var deferredResultSetAccess = new DeferredResultSetAccess(
jdbcSelect,
jdbcParameterBindings,
executionContext,
statementCreator,
resultCountEstimate
);
final var jdbcValues = resolveJdbcValuesSource(
executionContext.getQueryIdentifier( deferredResultSetAccess.getFinalSql() ),
jdbcSelect,
resultsConsumer.canResultsBeCached(),
executionContext,
deferredResultSetAccess
);
if ( rowTransformer == null ) {
rowTransformer = getRowTransformer( executionContext, jdbcValues );
}
final var session = executionContext.getSession();
final var factory = session.getFactory();
final boolean stats;
long startTime = 0;
final var statistics = factory.getStatistics();
if ( executionContext.hasQueryExecutionToBeAddedToStatistics()
&& jdbcValues instanceof JdbcValuesResultSetImpl ) {
stats = statistics.isStatisticsEnabled();
if ( stats ) {
startTime = System.nanoTime();
}
}
else {
stats = false;
}
/*
* Processing options effectively are only used for entity loading. Here we don't need these values.
*/
final JdbcValuesSourceProcessingOptions processingOptions = new JdbcValuesSourceProcessingOptions() {
@Override
public Object getEffectiveOptionalObject() {
return executionContext.getEntityInstance();
}
@Override
public String getEffectiveOptionalEntityName() {
return null;
}
@Override
public Object getEffectiveOptionalId() {
return executionContext.getEntityId();
}
@Override
public boolean shouldReturnProxies() {
return true;
}
};
final var valuesProcessingState = new JdbcValuesSourceProcessingStateStandardImpl(
jdbcSelect.getLoadedValuesCollector(),
processingOptions,
executionContext
);
final var rowReader = ResultsHelper.createRowReader(
factory,
rowTransformer,
domainResultType,
jdbcValues
);
final var rowProcessingState = new RowProcessingStateStandardImpl( valuesProcessingState, executionContext, rowReader, jdbcValues );
final var logicalConnection = session.getJdbcCoordinator().getLogicalConnection();
final var connection = logicalConnection.getPhysicalConnection();
final var statementAccess = new StatementAccessImpl( connection, logicalConnection, factory );
jdbcSelect.performPreActions( statementAccess, connection, executionContext );
try {
final T result = resultsConsumer.consume(
jdbcValues,
session,
processingOptions,
valuesProcessingState,
rowProcessingState,
rowReader
);
jdbcSelect.performPostAction( true, statementAccess, connection, executionContext );
if ( stats ) {
logQueryStatistics( jdbcSelect, executionContext, startTime, result, statistics );
}
return result;
}
catch (RuntimeException e) {
jdbcSelect.performPostAction( false, statementAccess, connection, executionContext );
throw e;
}
}
private void logQueryStatistics(
JdbcSelect jdbcSelect,
ExecutionContext executionContext,
long startTime,
Object result,
StatisticsImplementor statistics) {
final String query = executionContext.getQueryIdentifier( jdbcSelect.getSqlString() );
final long endTime = System.nanoTime();
final long milliseconds =
TimeUnit.MILLISECONDS.convert( endTime - startTime, TimeUnit.NANOSECONDS );
final int rows = getResultSize( result );
STATISTICS_LOGGER.queryExecuted( query, milliseconds, (long) rows );
statistics.queryExecuted( query, rows, milliseconds );
}
protected static <R> RowTransformer<R> getRowTransformer(ExecutionContext executionContext, JdbcValues jdbcValues) {
@SuppressWarnings("unchecked")
final var tupleTransformer = (TupleTransformer<R>) executionContext.getQueryOptions().getTupleTransformer();
if ( tupleTransformer == null ) {
return RowTransformerStandardImpl.instance();
}
else {
final var domainResults = jdbcValues.getValuesMapping().getDomainResults();
final String[] aliases = new String[domainResults.size()];
for ( int i = 0; i < domainResults.size(); i++ ) {
aliases[i] = domainResults.get( i ).getResultVariable();
}
return new RowTransformerTupleTransformerAdapter<>( aliases, tupleTransformer );
}
}
protected <T> int getResultSize(T result) {
return result instanceof List<?> list ? list.size() : -1;
}
protected JdbcValues resolveJdbcValuesSource(
String queryIdentifier,
JdbcSelect jdbcSelect,
boolean canBeCached,
ExecutionContext executionContext,
ResultSetAccess resultSetAccess) {
final var session = executionContext.getSession();
final var factory = session.getFactory();
final boolean queryCacheEnabled = factory.getSessionFactoryOptions().isQueryCacheEnabled();
final CacheMode cacheMode = resolveCacheMode( executionContext );
final var mappingProducer = jdbcSelect.getJdbcValuesMappingProducer();
final var queryOptions = executionContext.getQueryOptions();
final boolean cacheable =
queryCacheEnabled
&& canBeCached
&& queryOptions.isResultCachingEnabled() == Boolean.TRUE;
final QueryKey queryResultsCacheKey;
final List<?> cachedResults;
if ( cacheable && cacheMode.isGetEnabled() ) {
SQL_EXEC_LOGGER.readingQueryResultCacheData( cacheMode.name() );
final Set<String> querySpaces = jdbcSelect.getAffectedTableNames();
if ( querySpaces == null || querySpaces.isEmpty() ) {
SQL_EXEC_LOGGER.affectedQuerySpacesUnexpectedlyEmpty();
}
else {
SQL_EXEC_LOGGER.affectedQuerySpaces( querySpaces );
}
final var queryCache = factory.getCache()
.getQueryResultsCache( queryOptions.getResultCacheRegionName() );
queryResultsCacheKey = QueryKey.from(
jdbcSelect.getSqlString(),
queryOptions.getLimit(),
executionContext.getQueryParameterBindings(),
session
);
cachedResults = queryCache.get(
// todo (6.0) : QueryCache#get takes the `queryResultsCacheKey` see tat discussion above
queryResultsCacheKey,
// todo (6.0) : `querySpaces` and `session` make perfect sense as args, but its odd passing those into this method just to pass along
// atm we do not even collect querySpaces, but we need to
querySpaces,
session
);
// todo (6.0) : `querySpaces` and `session` are used in QueryCache#get to verify "up-to-dateness" via UpdateTimestampsCache
// better imo to move UpdateTimestampsCache handling here and have QueryCache be a simple access to
// the underlying query result cache region.
//
// todo (6.0) : if we go this route (^^), still beneficial to have an abstraction over different UpdateTimestampsCache-based
// invalidation strategies - QueryCacheInvalidationStrategy
final var statistics = factory.getStatistics();
if ( statistics.isStatisticsEnabled() ) {
if ( cachedResults == null ) {
statistics.queryCacheMiss( queryIdentifier, queryCache.getRegion().getName() );
}
else {
statistics.queryCacheHit( queryIdentifier, queryCache.getRegion().getName() );
}
}
}
else {
SQL_EXEC_LOGGER.skippingReadingQueryResultCacheData(
queryCacheEnabled ? "enabled" : "disabled",
cacheMode.name()
);
cachedResults = null;
if ( cacheable && cacheMode.isPutEnabled() ) {
queryResultsCacheKey = QueryKey.from(
jdbcSelect.getSqlString(),
queryOptions.getLimit(),
executionContext.getQueryParameterBindings(),
session
);
}
else {
queryResultsCacheKey = null;
}
}
return resolveJdbcValues(
queryIdentifier,
executionContext,
resultSetAccess,
cachedResults,
queryResultsCacheKey,
mappingProducer,
session,
factory
);
}
private static AbstractJdbcValues resolveJdbcValues(
String queryIdentifier,
ExecutionContext executionContext,
ResultSetAccess resultSetAccess,
List<?> cachedResults,
QueryKey queryResultsCacheKey,
JdbcValuesMappingProducer mappingProducer,
SharedSessionContractImplementor session,
SessionFactoryImplementor factory) {
final var loadQueryInfluencers = session.getLoadQueryInfluencers();
if ( cachedResults == null ) {
final CachedJdbcValuesMetadata metadataForCache;
final JdbcValuesMapping jdbcValuesMapping;
if ( queryResultsCacheKey == null ) {
jdbcValuesMapping = mappingProducer.resolve( resultSetAccess, loadQueryInfluencers, factory );
metadataForCache = null;
}
else {
// If we need to put the values into the cache, we need to be able to capture the JdbcValuesMetadata
final var capturingMetadata = new CapturingJdbcValuesMetadata( resultSetAccess );
jdbcValuesMapping = mappingProducer.resolve( capturingMetadata, loadQueryInfluencers, factory );
metadataForCache = capturingMetadata.resolveMetadataForCache();
}
return new JdbcValuesResultSetImpl(
resultSetAccess,
queryResultsCacheKey,
queryIdentifier,
executionContext.getQueryOptions(),
false,
jdbcValuesMapping,
metadataForCache,
executionContext
);
}
else {
final var valuesMetadata =
!cachedResults.isEmpty()
&& cachedResults.get( 0 ) instanceof JdbcValuesMetadata jdbcValuesMetadata
? jdbcValuesMetadata
: resultSetAccess;
return new JdbcValuesCacheHit( cachedResults,
mappingProducer.resolve( valuesMetadata, loadQueryInfluencers, factory ) );
}
}
private static CacheMode resolveCacheMode(ExecutionContext executionContext) {
final var queryOptions = executionContext.getQueryOptions();
return coalesceSuppliedValues(
() -> queryOptions == null ? null : queryOptions.getCacheMode(),
executionContext.getSession()::getCacheMode,
() -> CacheMode.NORMAL
);
}
static | JdbcSelectExecutorStandardImpl |
java | mybatis__mybatis-3 | src/main/java/org/apache/ibatis/cache/CacheException.java | {
"start": 783,
"end": 1165
} | class ____ extends PersistenceException {
private static final long serialVersionUID = -193202262468464650L;
public CacheException() {
}
public CacheException(String message) {
super(message);
}
public CacheException(String message, Throwable cause) {
super(message, cause);
}
public CacheException(Throwable cause) {
super(cause);
}
}
| CacheException |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/junit4/RepeatedSpringRunnerTests.java | {
"start": 4701,
"end": 5113
} | class ____ extends AbstractRepeatedTestCase {
@Test
@RepeatedFiveTimes
public void repeatedFiveTimes() {
incrementInvocationCount();
}
}
/**
* Tests for claims raised in <a href="https://jira.spring.io/browse/SPR-6011" target="_blank">SPR-6011</a>.
*/
@Ignore("TestCase classes are run manually by the enclosing test class")
public static final | RepeatedFiveTimesViaMetaAnnotationRepeatedTestCase |
java | quarkusio__quarkus | integration-tests/mongodb-panache/src/main/java/io/quarkus/it/mongodb/panache/person/resources/PersonEntityResource.java | {
"start": 689,
"end": 3228
} | class ____ {
@GET
public List<PersonEntity> getPersons(@QueryParam("sort") String sort) {
if (sort != null) {
return PersonEntity.listAll(Sort.ascending(sort));
}
return PersonEntity.listAll();
}
@GET
@Path("/search/{name}")
public Set<PersonName> searchPersons(@PathParam("name") String name) {
Set<PersonName> uniqueNames = new HashSet<>();
List<PersonName> lastnames = PersonEntity.find("lastname = ?1 and status = ?2", name, Status.ALIVE)
.project(PersonName.class)
.withReadPreference(ReadPreference.primaryPreferred())
.list();
lastnames.forEach(p -> uniqueNames.add(p));// this will throw if it's not the right type
return uniqueNames;
}
@POST
public Response addPerson(PersonEntity person) {
person.persist();
String id = person.id.toString();
return Response.created(URI.create("/persons/entity/" + id)).build();
}
@POST
@Path("/multiple")
public void addPersons(List<PersonEntity> persons) {
PersonEntity.persist(persons);
}
@PUT
public Response updatePerson(PersonEntity person) {
person.update();
return Response.accepted().build();
}
// PATCH is not correct here but it allows to test persistOrUpdate without a specific subpath
@PATCH
public Response upsertPerson(PersonEntity person) {
person.persistOrUpdate();
return Response.accepted().build();
}
@DELETE
@Path("/{id}")
public void deletePerson(@PathParam("id") String id) {
PersonEntity person = PersonEntity.findById(Long.parseLong(id));
person.delete();
}
@GET
@Path("/{id}")
public PersonEntity getPerson(@PathParam("id") String id) {
return PersonEntity.findById(Long.parseLong(id));
}
@POST
@Path("/ids")
public List<PersonEntity> getPerson(List<String> ids) {
return PersonEntity.findByIds(ids.stream().map(Long::valueOf).toList());
}
@GET
@Path("/count")
public long countAll() {
return PersonEntity.count();
}
@DELETE
public void deleteAll() {
PersonEntity.deleteAll();
}
@POST
@Path("/rename")
public Response rename(@QueryParam("previousName") String previousName, @QueryParam("newName") String newName) {
PersonEntity.update("lastname", newName).where("lastname", previousName);
return Response.ok().build();
}
}
| PersonEntityResource |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-client/deployment/src/test/java/io/quarkus/rest/client/reactive/timeout/BuilderReadTimeoutTest.java | {
"start": 2106,
"end": 2356
} | interface ____ {
@GET
@Path("/slow")
String slow();
@GET
@Path("/fast")
String fast();
}
@Path("/")
@Produces(MediaType.TEXT_PLAIN)
@Consumes(MediaType.TEXT_PLAIN)
public static | Client |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/params/provider/MethodArgumentsProviderTests.java | {
"start": 29732,
"end": 29977
} | class ____ {
// Test
void test(String param) {
}
// NOT a Factory
static String test() {
return null;
}
// Also NOT a Factory
static Object test(int num) {
return null;
}
}
static | MultipleInvalidDefaultFactoriesTestCase |
java | google__dagger | dagger-compiler/main/java/dagger/internal/codegen/binding/ComponentDependencyProvisionBinding.java | {
"start": 2128,
"end": 2297
} | class ____
extends ContributionBinding.Builder<ComponentDependencyProvisionBinding, Builder> {
abstract Builder nullability(Nullability nullability);
}
}
| Builder |
java | apache__camel | components/camel-kafka/src/main/java/org/apache/camel/component/kafka/consumer/errorhandler/BridgeErrorStrategy.java | {
"start": 1232,
"end": 2421
} | class ____ implements PollExceptionStrategy {
private static final Logger LOG = LoggerFactory.getLogger(BridgeErrorStrategy.class);
private final KafkaFetchRecords recordFetcher;
private final Consumer<?, ?> consumer;
private boolean continueFlag = true; // whether to continue polling or not
public BridgeErrorStrategy(KafkaFetchRecords recordFetcher, Consumer<?, ?> consumer) {
this.recordFetcher = recordFetcher;
this.consumer = consumer;
}
@Override
public boolean canContinue() {
return continueFlag;
}
@Override
public void handle(long partitionLastOffset, Exception exception) {
LOG.warn("Deferring processing to the exception handler based on polling exception strategy");
// use bridge error handler to route with exception
recordFetcher.getBridge().handleException(exception);
// skip this poison message and seek to the next message
SeekUtil.seekToNextOffset(consumer, partitionLastOffset);
if (exception instanceof AuthenticationException || exception instanceof AuthorizationException) {
continueFlag = false;
}
}
}
| BridgeErrorStrategy |
java | dropwizard__dropwizard | dropwizard-logging/src/main/java/io/dropwizard/logging/common/PrefixedExtendedThrowableProxyConverter.java | {
"start": 328,
"end": 627
} | class ____ extends PrefixedThrowableProxyConverter {
@Override
protected void extraData(StringBuilder builder, StackTraceElementProxy step) {
if (step != null) {
ThrowableProxyUtil.subjoinPackagingData(builder, step);
}
}
}
| PrefixedExtendedThrowableProxyConverter |
java | apache__hadoop | hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/commit/ITestAbfsTerasort.java | {
"start": 3322,
"end": 12697
} | class ____ extends AbstractAbfsClusterITest {
private static final Logger LOG =
LoggerFactory.getLogger(ITestAbfsTerasort.class);
public static final int EXPECTED_PARTITION_COUNT = 10;
public static final int PARTITION_SAMPLE_SIZE = 1000;
public static final int ROW_COUNT = 1000;
/**
* This has to be common across all test methods.
*/
private static final Path TERASORT_PATH = new Path("/ITestAbfsTerasort");
/**
* Duration tracker created in the first of the test cases and closed
* in {@link #test_140_teracomplete()}.
*/
private static Optional<DurationInfo> terasortDuration = empty();
/**
* Tracker of which stages are completed and how long they took.
*/
private static final Map<String, DurationInfo> COMPLETED_STAGES = new HashMap<>();
/**
* FileSystem statistics are collected from the _SUCCESS markers.
*/
protected static final IOStatisticsSnapshot JOB_IOSTATS =
snapshotIOStatistics();
/**
* Map of stage -> success file.
*/
private static final Map<String, ManifestSuccessData> SUCCESS_FILES = new HashMap<>();
/** Base path for all the terasort input and output paths. */
private Path terasortPath;
/** Input (teragen) path. */
private Path sortInput;
/** Path where sorted data goes. */
private Path sortOutput;
/** Path for validated job's output. */
private Path sortValidate;
public ITestAbfsTerasort() throws Exception {
}
@BeforeEach
@Override
public void setup() throws Exception {
// superclass calls requireScaleTestsEnabled();
super.setup();
prepareToTerasort();
}
/**
* Set up the job conf with the options for terasort chosen by the scale
* options.
* @param conf configuration
*/
@Override
protected void applyCustomConfigOptions(JobConf conf) {
// small sample size for faster runs
conf.setInt(TeraSortConfigKeys.SAMPLE_SIZE.key(),
getSampleSizeForEachPartition());
conf.setInt(TeraSortConfigKeys.NUM_PARTITIONS.key(),
getExpectedPartitionCount());
conf.setBoolean(
TeraSortConfigKeys.USE_SIMPLE_PARTITIONER.key(),
false);
}
private int getExpectedPartitionCount() {
return EXPECTED_PARTITION_COUNT;
}
private int getSampleSizeForEachPartition() {
return PARTITION_SAMPLE_SIZE;
}
protected int getRowCount() {
return ROW_COUNT;
}
/**
* Set up the terasort by initializing paths variables
* The paths used must be unique across parameterized runs but
* common across all test cases in a single parameterized run.
*/
private void prepareToTerasort() {
terasortPath = getFileSystem().makeQualified(TERASORT_PATH);
sortInput = new Path(terasortPath, "sortin");
sortOutput = new Path(terasortPath, "sortout");
sortValidate = new Path(terasortPath, "validate");
}
/**
* Declare that a stage has completed.
* @param stage stage name/key in the map
* @param d duration.
*/
private static void completedStage(final String stage,
final DurationInfo d) {
COMPLETED_STAGES.put(stage, d);
}
/**
* Declare a stage which is required for this test case.
* @param stage stage name
*/
private static void requireStage(final String stage) {
assumeThat(COMPLETED_STAGES.get(stage))
.as("Required stage was not completed: " + stage)
.isNotNull();
}
/**
* Execute a single stage in the terasort.
* Updates the completed stages map with the stage duration -if successful.
* @param stage Stage name for the stages map.
* @param jobConf job conf
* @param dest destination directory -the _SUCCESS file will be expected here.
* @param tool tool to run.
* @param args args for the tool.
* @param minimumFileCount minimum number of files to have been created
* @return the job success file.
* @throws Exception any failure
*/
private ManifestSuccessData executeStage(
final String stage,
final JobConf jobConf,
final Path dest,
final Tool tool,
final String[] args,
final int minimumFileCount) throws Exception {
int result;
// the duration info is created outside a try-with-resources
// clause as it is used later.
DurationInfo d = new DurationInfo(LOG, stage);
try {
result = ToolRunner.run(jobConf, tool, args);
} finally {
d.close();
}
dumpOutputTree(dest);
assertEquals(0, result, stage+ "(" + StringUtils.join(", ", args) + ")"+ " failed");
final ManifestSuccessData successFile = validateSuccessFile(getFileSystem(), dest,
minimumFileCount, "");
final IOStatistics iostats = successFile.getIOStatistics();
JOB_IOSTATS.aggregate(iostats);
SUCCESS_FILES.put(stage, successFile);
completedStage(stage, d);
// now assert there were no failures recorded in the IO statistics
// for critical functions.
// these include collected statistics from manifest save
// operations.
assertNoFailureStatistics(iostats,
stage,
OP_SAVE_TASK_MANIFEST,
OP_RENAME_FILE);
return successFile;
}
/**
* Set up terasort by cleaning out the destination, and note the initial
* time before any of the jobs are executed.
*
* This is executed first <i>for each parameterized run</i>.
* It is where all variables which need to be reset for each run need
* to be reset.
*/
@Test
public void test_100_terasort_setup() throws Throwable {
describe("Setting up for a terasort");
getFileSystem().delete(terasortPath, true);
terasortDuration = Optional.of(new DurationInfo(LOG, false, "Terasort"));
}
@Test
public void test_110_teragen() throws Throwable {
describe("Teragen to %s", sortInput);
getFileSystem().delete(sortInput, true);
JobConf jobConf = newJobConf();
patchConfigurationForCommitter(jobConf);
executeStage("teragen",
jobConf,
sortInput,
new TeraGen(),
new String[]{Integer.toString(getRowCount()), sortInput.toString()},
1);
}
@Test
public void test_120_terasort() throws Throwable {
describe("Terasort from %s to %s", sortInput, sortOutput);
requireStage("teragen");
getFileSystem().delete(sortOutput, true);
loadSuccessFile(getFileSystem(), sortInput);
JobConf jobConf = newJobConf();
patchConfigurationForCommitter(jobConf);
executeStage("terasort",
jobConf,
sortOutput,
new TeraSort(),
new String[]{sortInput.toString(), sortOutput.toString()},
1);
}
@Test
public void test_130_teravalidate() throws Throwable {
describe("TeraValidate from %s to %s", sortOutput, sortValidate);
requireStage("terasort");
getFileSystem().delete(sortValidate, true);
loadSuccessFile(getFileSystem(), sortOutput);
JobConf jobConf = newJobConf();
patchConfigurationForCommitter(jobConf);
executeStage("teravalidate",
jobConf,
sortValidate,
new TeraValidate(),
new String[]{sortOutput.toString(), sortValidate.toString()},
1);
}
/**
* Print the results, and save to the base dir as a CSV file.
* Why there? Makes it easy to list and compare.
*/
@Test
public void test_140_teracomplete() throws Throwable {
terasortDuration.ifPresent(d -> {
d.close();
completedStage("overall", d);
});
// IO Statistics
IOStatisticsLogging.logIOStatisticsAtLevel(LOG, IOSTATISTICS_LOGGING_LEVEL_INFO, JOB_IOSTATS);
// and the summary
final StringBuilder results = new StringBuilder();
results.append("\"Operation\"\t\"Duration\"\n");
// this is how you dynamically create a function in a method
// for use afterwards.
// Works because there's no IOEs being raised in this sequence.
Consumer<String> stage = (s) -> {
DurationInfo duration = COMPLETED_STAGES.get(s);
results.append(String.format("\"%s\"\t\"%s\"\n",
s,
duration == null ? "" : duration));
};
stage.accept("teragen");
stage.accept("terasort");
stage.accept("teravalidate");
stage.accept("overall");
String text = results.toString();
File resultsFile = File.createTempFile("results", ".csv");
FileUtils.write(resultsFile, text, StandardCharsets.UTF_8);
LOG.info("Results are in {}\n{}", resultsFile, text);
LOG.info("Report directory {}", getReportDir());
}
/**
* Reset the duration so if two committer tests are run sequentially.
* Without this the total execution time is reported as from the start of
* the first test suite to the end of the second.
*/
@Test
public void test_150_teracleanup() throws Throwable {
terasortDuration = Optional.empty();
}
@Test
public void test_200_directory_deletion() throws Throwable {
getFileSystem().delete(terasortPath, true);
}
/**
* Dump the files under a path -but not fail if the path is not present.,
* @param path path to dump
* @throws Exception any failure.
*/
protected void dumpOutputTree(Path path) throws Exception {
LOG.info("Files under output directory {}", path);
try {
RemoteIterators.foreach(getFileSystem().listFiles(path, true),
(status) -> LOG.info("{}", status));
} catch (FileNotFoundException e) {
LOG.info("Output directory {} not found", path);
}
}
}
| ITestAbfsTerasort |
java | elastic__elasticsearch | x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/EndsWithFunctionPipe.java | {
"start": 693,
"end": 3433
} | class ____ extends Pipe {
private final Pipe input, pattern;
private final boolean caseInsensitive;
public EndsWithFunctionPipe(Source source, Expression expression, Pipe input, Pipe pattern, boolean caseInsensitive) {
super(source, expression, Arrays.asList(input, pattern));
this.input = input;
this.pattern = pattern;
this.caseInsensitive = caseInsensitive;
}
@Override
public final Pipe replaceChildren(List<Pipe> newChildren) {
return replaceChildren(newChildren.get(0), newChildren.get(1));
}
@Override
public final Pipe resolveAttributes(AttributeResolver resolver) {
Pipe newInput = input.resolveAttributes(resolver);
Pipe newPattern = pattern.resolveAttributes(resolver);
if (newInput == input && newPattern == pattern) {
return this;
}
return replaceChildren(newInput, newPattern);
}
@Override
public boolean supportedByAggsOnlyQuery() {
return input.supportedByAggsOnlyQuery() && pattern.supportedByAggsOnlyQuery();
}
@Override
public boolean resolved() {
return input.resolved() && pattern.resolved();
}
protected EndsWithFunctionPipe replaceChildren(Pipe newInput, Pipe newPattern) {
return new EndsWithFunctionPipe(source(), expression(), newInput, newPattern, caseInsensitive);
}
@Override
public final void collectFields(QlSourceBuilder sourceBuilder) {
input.collectFields(sourceBuilder);
pattern.collectFields(sourceBuilder);
}
@Override
protected NodeInfo<EndsWithFunctionPipe> info() {
return NodeInfo.create(this, EndsWithFunctionPipe::new, expression(), input, pattern, caseInsensitive);
}
@Override
public EndsWithFunctionProcessor asProcessor() {
return new EndsWithFunctionProcessor(input.asProcessor(), pattern.asProcessor(), caseInsensitive);
}
public Pipe input() {
return input;
}
public Pipe pattern() {
return pattern;
}
protected boolean isCaseInsensitive() {
return caseInsensitive;
}
@Override
public int hashCode() {
return Objects.hash(input, pattern, caseInsensitive);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
EndsWithFunctionPipe other = (EndsWithFunctionPipe) obj;
return Objects.equals(input(), other.input())
&& Objects.equals(pattern(), other.pattern())
&& Objects.equals(isCaseInsensitive(), other.isCaseInsensitive());
}
}
| EndsWithFunctionPipe |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/Job.java | {
"start": 1799,
"end": 3582
} | interface ____ {
JobId getID();
String getName();
JobState getState();
JobReport getReport();
/**
* Get all the counters of this job. This includes job-counters aggregated
* together with the counters of each task. This creates a clone of the
* Counters, so use this judiciously.
* @return job-counters and aggregate task-counters
*/
Counters getAllCounters();
Map<TaskId,Task> getTasks();
Map<TaskId,Task> getTasks(TaskType taskType);
Task getTask(TaskId taskID);
List<String> getDiagnostics();
int getTotalMaps();
int getTotalReduces();
int getCompletedMaps();
int getCompletedReduces();
int getFailedMaps();
int getFailedReduces();
int getKilledMaps();
int getKilledReduces();
float getProgress();
boolean isUber();
String getUserName();
String getQueueName();
/**
* @return a path to where the config file for this job is located.
*/
Path getConfFile();
/**
* @return a parsed version of the config files pointed to by
* {@link #getConfFile()}.
* @throws IOException on any error trying to load the conf file.
*/
Configuration loadConfFile() throws IOException;
/**
* @return the ACLs for this job for each type of JobACL given.
*/
Map<JobACL, AccessControlList> getJobACLs();
TaskAttemptCompletionEvent[]
getTaskAttemptCompletionEvents(int fromEventId, int maxEvents);
TaskCompletionEvent[]
getMapAttemptCompletionEvents(int startIndex, int maxEvents);
/**
* @return information for MR AppMasters (previously failed and current)
*/
List<AMInfo> getAMInfos();
boolean checkAccess(UserGroupInformation callerUGI, JobACL jobOperation);
public void setQueueName(String queueName);
public void setJobPriority(Priority priority);
}
| Job |
java | quarkusio__quarkus | independent-projects/arc/processor/src/main/java/io/quarkus/arc/processor/Injection.java | {
"start": 5952,
"end": 19167
} | class
____ = ipTarget.asField().declaringClass().name();
} else if (ipTarget.kind() == Kind.METHOD_PARAMETER) {
// the injection point is a producer method parameter then the type parameter of the injected Bean
// must be the same as the producer method return type
if (beanType == BeanType.PRODUCER_METHOD) {
expectedType = ipTarget.asMethodParameter().method().returnType().name();
} else {
expectedType = ipTarget.asMethodParameter().method().declaringClass().name();
}
}
if (expectedType != null
// This is very rudimentary check, might need to be expanded?
&& !expectedType.equals(actualType.name())) {
throw new DefinitionException(
"Type of injected Bean<T> does not match the type of the bean declaring the " +
"injection point. Problematic injection point: " + injectionPointInfo.getTargetInfo());
}
}
}
if (beanType == BeanType.INTERCEPTOR) {
// the injection point is a field, an initializer method parameter or a bean constructor of an interceptor,
// with qualifier @Intercepted, then the type parameter of the injected Bean must be an unbounded wildcard
if (injectionPointInfo.getRequiredType().name().equals(DotNames.BEAN)
&& injectionPointInfo.getRequiredQualifier(DotNames.INTERCEPTED) != null
&& injectionPointInfo.getRequiredType().kind() == Type.Kind.PARAMETERIZED_TYPE) {
ParameterizedType parameterizedType = injectionPointInfo.getRequiredType().asParameterizedType();
// there should be exactly one param - wildcard - and it has to be unbound; all else is DefinitionException
if (parameterizedType.arguments().size() != 1
|| !(parameterizedType.arguments().get(0).kind() == Type.Kind.WILDCARD_TYPE)
|| !(parameterizedType.arguments().get(0).asWildcardType().extendsBound().name().equals(DotNames.OBJECT)
&& parameterizedType.arguments().get(0).asWildcardType().superBound() == null)) {
throw new DefinitionException(
"Injected @Intercepted Bean<?> has to use unbound wildcard as its type parameter. " +
"Problematic injection point: " + injectionPointInfo.getTargetInfo());
}
}
// the injection point is a field, an initializer method parameter or a bean constructor, with qualifier
// @Default, then the type parameter of the injected Bean, or Interceptor must be the same as the type
// declaring the injection point
if (injectionPointInfo.getRequiredType().name().equals(DotNames.INTERCEPTOR_BEAN)
&& injectionPointInfo.getRequiredType().kind() == Type.Kind.PARAMETERIZED_TYPE
&& injectionPointInfo.getRequiredType().asParameterizedType().arguments().size() == 1) {
Type actualType = injectionPointInfo.getRequiredType().asParameterizedType().arguments().get(0);
AnnotationTarget ipTarget = injectionPointInfo.getAnnotationTarget();
DotName expectedType = null;
if (ipTarget.kind() == Kind.FIELD) {
expectedType = ipTarget.asField().declaringClass().name();
} else if (ipTarget.kind() == Kind.METHOD_PARAMETER) {
expectedType = ipTarget.asMethodParameter().method().declaringClass().name();
}
if (expectedType != null
// This is very rudimentary check, might need to be expanded?
&& !expectedType.equals(actualType.name())) {
throw new DefinitionException(
"Type of injected Interceptor<T> does not match the type of the bean declaring the " +
"injection point. Problematic injection point: " + injectionPointInfo.getTargetInfo());
}
}
}
if (beanType == BeanType.DECORATOR) {
// the injection point is a field, an initializer method parameter or a bean constructor, with qualifier
// @Default, then the type parameter of the injected Decorator must be the same as the type
// declaring the injection point
if (injectionPointInfo.getRequiredType().name().equals(DotNames.DECORATOR)
&& injectionPointInfo.getRequiredType().kind() == Type.Kind.PARAMETERIZED_TYPE
&& injectionPointInfo.getRequiredType().asParameterizedType().arguments().size() == 1) {
Type actualType = injectionPointInfo.getRequiredType().asParameterizedType().arguments().get(0);
AnnotationTarget ipTarget = injectionPointInfo.getAnnotationTarget();
DotName expectedType = null;
if (ipTarget.kind() == Kind.FIELD) {
expectedType = ipTarget.asField().declaringClass().name();
} else if (ipTarget.kind() == Kind.METHOD_PARAMETER) {
expectedType = ipTarget.asMethodParameter().method().declaringClass().name();
}
if (expectedType != null
// This is very rudimentary check, might need to be expanded?
&& !expectedType.equals(actualType.name())) {
throw new DefinitionException(
"Type of injected Decorator<T> does not match the type of the bean declaring the " +
"injection point. Problematic injection point: " + injectionPointInfo.getTargetInfo());
}
}
// the injection point is a field, an initializer method parameter or a bean constructor of a decorator,
// with qualifier @Decorated, then the type parameter of the injected Bean must be the same as the delegate type
//
// a validation for the specification text above would naturally belong here, but we don't have
// access to the delegate type yet, so this is postponed to `Beans.validateInterceptorDecorator()`
}
}
private static void validateInjections(List<Injection> injections, BeanType beanType) {
for (Injection injection : injections) {
for (InjectionPointInfo ipi : injection.injectionPoints) {
validateInjections(ipi, beanType);
}
}
}
static List<Injection> forBean(AnnotationTarget beanTarget, BeanInfo declaringBean, BeanDeployment beanDeployment,
InjectionPointModifier transformer, BeanType beanType) {
if (Kind.CLASS.equals(beanTarget.kind())) {
List<Injection> injections = forClassBean(beanTarget.asClass(), beanTarget.asClass(), beanDeployment,
transformer, false, new HashSet<>());
Set<AnnotationTarget> injectConstructors = injections.stream().filter(Injection::isConstructor)
.map(Injection::getTarget).collect(Collectors.toSet());
if (injectConstructors.size() > 1) {
throw new DefinitionException(
"Multiple @Inject constructors found on " + beanTarget.asClass().name() + ":\n"
+ injectConstructors.stream().map(Object::toString).collect(Collectors.joining("\n")));
}
for (AnnotationTarget injectConstructor : injectConstructors) {
Set<AnnotationInstance> parameterAnnotations = Annotations.getParameterAnnotations(beanDeployment,
injectConstructor.asMethod());
for (AnnotationInstance annotation : parameterAnnotations) {
if (DotNames.DISPOSES.equals(annotation.name())) {
throw new DefinitionException(
"Bean constructor must not have a @Disposes parameter: " + injectConstructor);
}
if (DotNames.OBSERVES.equals(annotation.name())) {
throw new DefinitionException(
"Bean constructor must not have an @Observes parameter: " + injectConstructor);
}
if (DotNames.OBSERVES_ASYNC.equals(annotation.name())) {
throw new DefinitionException(
"Bean constructor must not have an @ObservesAsync parameter: " + injectConstructor);
}
}
}
Set<MethodInfo> initializerMethods = injections.stream()
.filter(it -> it.isMethod() && !it.isConstructor())
.map(Injection::getTarget)
.map(AnnotationTarget::asMethod)
.collect(Collectors.toSet());
for (MethodInfo initializerMethod : initializerMethods) {
if (beanDeployment.hasAnnotation(initializerMethod, DotNames.PRODUCES)) {
throw new DefinitionException("Initializer method must not be annotated @Produces "
+ "(alternatively, producer method must not be annotated @Inject): "
+ beanTarget.asClass() + "." + initializerMethod.name());
}
if (Annotations.hasParameterAnnotation(beanDeployment, initializerMethod, DotNames.DISPOSES)) {
throw new DefinitionException("Initializer method must not have a @Disposes parameter "
+ "(alternatively, disposer method must not be annotated @Inject): "
+ beanTarget.asClass() + "." + initializerMethod.name());
}
if (Annotations.hasParameterAnnotation(beanDeployment, initializerMethod, DotNames.OBSERVES)) {
throw new DefinitionException("Initializer method must not have an @Observes parameter "
+ "(alternatively, observer method must not be annotated @Inject): "
+ beanTarget.asClass() + "." + initializerMethod.name());
}
if (Annotations.hasParameterAnnotation(beanDeployment, initializerMethod, DotNames.OBSERVES_ASYNC)) {
throw new DefinitionException("Initializer method must not have an @ObservesAsync parameter "
+ "(alternatively, async observer method must not be annotated @Inject): "
+ beanTarget.asClass() + "." + initializerMethod.name());
}
}
validateInjections(injections, beanType);
return injections;
} else if (Kind.METHOD.equals(beanTarget.kind())) {
MethodInfo producerMethod = beanTarget.asMethod();
if (beanDeployment.hasAnnotation(producerMethod, DotNames.INJECT)) {
throw new DefinitionException("Producer method must not be annotated @Inject "
+ "(alternatively, initializer method must not be annotated @Produces): "
+ producerMethod);
}
if (Annotations.hasParameterAnnotation(beanDeployment, producerMethod, DotNames.DISPOSES)) {
throw new DefinitionException("Producer method must not have a @Disposes parameter "
+ "(alternatively, disposer method must not be annotated @Produces): "
+ producerMethod);
}
if (Annotations.hasParameterAnnotation(beanDeployment, producerMethod, DotNames.OBSERVES)) {
throw new DefinitionException("Producer method must not have an @Observes parameter "
+ "(alternatively, observer method must not be annotated @Produces): "
+ producerMethod);
}
if (Annotations.hasParameterAnnotation(beanDeployment, producerMethod, DotNames.OBSERVES_ASYNC)) {
throw new DefinitionException("Producer method must not have an @ObservesAsync parameter "
+ "(alternatively, async observer method must not be annotated @Produces): "
+ producerMethod);
}
if (producerMethod.parameterTypes().isEmpty()) {
return Collections.emptyList();
}
// All parameters are injection points
List<Injection> injections = Collections.singletonList(new Injection(producerMethod,
InjectionPointInfo.fromMethod(producerMethod, declaringBean.getImplClazz(), beanDeployment, transformer)));
validateInjections(injections, beanType);
return injections;
}
throw new IllegalArgumentException("Unsupported annotation target");
}
static | expectedType |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/NonRuntimeAnnotation.java | {
"start": 1772,
"end": 3040
} | class ____ extends BugChecker implements MethodInvocationTreeMatcher {
private static final Matcher<ExpressionTree> MATCHER =
instanceMethod()
.onExactClass("java.lang.Class")
.named("getAnnotation")
.withParameters("java.lang.Class");
@Override
public Description matchMethodInvocation(MethodInvocationTree tree, VisitorState state) {
if (!MATCHER.matches(tree, state)) {
return NO_MATCH;
}
Type classType = getType(getOnlyElement(tree.getArguments()));
if (classType == null || classType.getTypeArguments().isEmpty()) {
return NO_MATCH;
}
Type type = getUpperBound(getOnlyElement(classType.getTypeArguments()), state.getTypes());
if (isSameType(type, state.getSymtab().annotationType, state)) {
return NO_MATCH;
}
RetentionPolicy retention = state.getTypes().getRetention(type.asElement());
switch (retention) {
case RUNTIME -> {}
case SOURCE, CLASS -> {
return buildDescription(tree)
.setMessage(
String.format(
"%s; %s has %s retention",
message(), type.asElement().getSimpleName(), retention))
.build();
}
}
return NO_MATCH;
}
}
| NonRuntimeAnnotation |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/cluster/LocalMasterServiceTask.java | {
"start": 747,
"end": 2643
} | class ____ implements ClusterStateTaskListener {
private final Priority priority;
public LocalMasterServiceTask(Priority priority) {
this.priority = priority;
}
protected void execute(ClusterState currentState) {}
protected void onPublicationComplete() {}
public void submit(MasterService masterService, String source) {
// Uses a new queue each time so that these tasks are not batched, but they never change the cluster state anyway so they don't
// trigger the publication process and hence batching isn't really needed.
masterService.createTaskQueue("local-master-service-task", priority, new ClusterStateTaskExecutor<LocalMasterServiceTask>() {
@Override
public boolean runOnlyOnMaster() {
return false;
}
@Override
public String describeTasks(List<LocalMasterServiceTask> tasks) {
return ""; // only one task in the batch so the source is enough
}
@Override
public ClusterState execute(BatchExecutionContext<LocalMasterServiceTask> batchExecutionContext) {
final var thisTask = LocalMasterServiceTask.this;
final var taskContexts = batchExecutionContext.taskContexts();
assert taskContexts.size() == 1 && taskContexts.get(0).getTask() == thisTask
: "expected one-element task list containing current object but was " + taskContexts;
try (var ignored = taskContexts.get(0).captureResponseHeaders()) {
thisTask.execute(batchExecutionContext.initialState());
}
taskContexts.get(0).success(() -> onPublicationComplete());
return batchExecutionContext.initialState();
}
}).submitTask(source, this, null);
}
}
| LocalMasterServiceTask |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/component/pojo/SayService.java | {
"start": 852,
"end": 1251
} | class ____ implements ISay {
String message = "Hello";
public SayService() {
}
public SayService(String message) {
this.message = message;
}
@Override
public String say() {
return message;
}
public String getMessage() {
return message;
}
public void setMessage(String message) {
this.message = message;
}
}
| SayService |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/writer/LengthEncodedWriter.java | {
"start": 974,
"end": 3097
} | class ____ implements RecordWriter {
private OutputStream outputStream;
private ByteBuffer lengthBuffer;
/**
* Create the writer on the OutputStream <code>os</code>.
* This object will never close <code>os</code>.
*/
public LengthEncodedWriter(OutputStream os) {
outputStream = os;
// This will be used to convert 32 bit integers to network byte order
lengthBuffer = ByteBuffer.allocate(4); // 4 == sizeof(int)
}
/**
* Convert each String in the record array to a length/value encoded pair
* and write to the outputstream.
*/
@Override
public void writeRecord(String[] record) throws IOException {
writeNumFields(record.length);
for (String field : record) {
writeField(field);
}
}
/**
* Convert each String in the record list to a length/value encoded
* pair and write to the outputstream.
*/
@Override
public void writeRecord(List<String> record) throws IOException {
writeNumFields(record.size());
for (String field : record) {
writeField(field);
}
}
/**
* Lower level functions to write records individually.
* After this function is called {@link #writeField(String)}
* must be called <code>numFields</code> times.
*/
public void writeNumFields(int numFields) throws IOException {
// number fields
lengthBuffer.clear();
lengthBuffer.putInt(numFields);
outputStream.write(lengthBuffer.array());
}
/**
* Lower level functions to write record fields individually.
* {@linkplain #writeNumFields(int)} must be called first
*/
public void writeField(String field) throws IOException {
byte[] utf8Bytes = field.getBytes(StandardCharsets.UTF_8);
lengthBuffer.clear();
lengthBuffer.putInt(utf8Bytes.length);
outputStream.write(lengthBuffer.array());
outputStream.write(utf8Bytes);
}
@Override
public void flush() throws IOException {
outputStream.flush();
}
}
| LengthEncodedWriter |
java | qos-ch__slf4j | log4j-over-slf4j/src/main/java/org/apache/log4j/spi/LoggerRepository.java | {
"start": 1187,
"end": 2960
} | interface ____ {
/**
* Add a {@link HierarchyEventListener} event to the repository.
*
* @param listener a listener
*/
public void addHierarchyEventListener(HierarchyEventListener listener);
/**
* Returns whether this repository is disabled for a given
* level. The answer depends on the repository threshold and the
* <code>level</code> parameter. See also {@link #setThreshold}
* method.
*/
boolean isDisabled(int level);
/**
* Set the repository-wide threshold. All logging requests below the
* threshold are immediately dropped. By default, the threshold is
* set to <code>Level.ALL</code> which has the lowest possible rank.
*/
public void setThreshold(Level level);
/**
* Another form of {@link #setThreshold(Level)} accepting a string
* parameter instead of a <code>Level</code>.
*/
public void setThreshold(String val);
public void emitNoAppenderWarning(Category cat);
/**
* Get the repository-wide threshold. See {@link
* #setThreshold(Level)} for an explanation.
*/
public Level getThreshold();
public Logger getLogger(String name);
public Logger getLogger(String name, LoggerFactory factory);
public Logger getRootLogger();
public abstract Logger exists(String name);
public abstract void shutdown();
@SuppressWarnings("rawtypes")
public Enumeration getCurrentLoggers();
/**
* Deprecated. Please use {@link #getCurrentLoggers} instead.
*/
@SuppressWarnings("rawtypes")
public Enumeration getCurrentCategories();
public abstract void fireAddAppenderEvent(Category logger, Appender appender);
public abstract void resetConfiguration();
}
| LoggerRepository |
java | apache__camel | components/camel-jte/src/main/java/org/apache/camel/component/jte/JteCodeResolver.java | {
"start": 1203,
"end": 2676
} | class ____ implements CodeResolver {
private final CamelContext camelContext;
private final Map<String, String> headerTemplates = new ConcurrentHashMap<>();
private final Map<String, String> pathMappings = new ConcurrentHashMap<>();
public JteCodeResolver(CamelContext camelContext) {
this.camelContext = camelContext;
}
public void addTemplateFromHeader(String exchangeId, String template) {
if (exchangeId != null && template != null) {
headerTemplates.put(exchangeId, template);
}
}
public void addPathMapping(String name, String uri) {
if (name != null && uri != null) {
pathMappings.put(name, uri);
}
}
@Override
public String resolve(String name) {
String answer = headerTemplates.remove(name);
if (answer == null) {
String key = pathMappings.remove(name);
if (key != null) {
key = name + ":" + key;
} else {
key = name;
}
try {
InputStream is = ResourceHelper.resolveResourceAsInputStream(camelContext, key);
if (is != null) {
answer = IOHelper.loadText(is);
}
} catch (IOException e) {
// ignore
}
}
return answer;
}
@Override
public long getLastModified(String name) {
return 0;
}
}
| JteCodeResolver |
java | spring-projects__spring-framework | spring-web/src/test/java/org/springframework/http/converter/json/SpringHandlerInstantiatorTests.java | {
"start": 8277,
"end": 8380
} | class ____ {
public String capitalize(String text) {
return text.toUpperCase();
}
}
}
| Capitalizer |
java | apache__camel | components/camel-as2/camel-as2-api/src/main/java/org/apache/camel/component/as2/api/entity/ApplicationEDIConsentEntity.java | {
"start": 993,
"end": 1485
} | class ____ extends ApplicationEntity {
public ApplicationEDIConsentEntity(byte[] content, String charset, String contentTransferEncoding,
boolean isMainBody, String fileName) {
super(content, ContentType.create(AS2MediaType.APPLICATION_EDI_CONSENT, charset), contentTransferEncoding, isMainBody,
fileName);
}
@Override
public void close() throws IOException {
// do nothing
}
}
| ApplicationEDIConsentEntity |
java | micronaut-projects__micronaut-core | inject/src/main/java/io/micronaut/context/ConfigurableBeanContext.java | {
"start": 770,
"end": 1065
} | interface ____
extends BeanContext {
/**
* Configures the bean context loading all bean definitions
* required to perform successful startup without starting the context itself.
*
* <p>Once called the methods of the {@link BeanDefinitionRegistry} | ConfigurableBeanContext |
java | hibernate__hibernate-orm | hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/customtype/ExtendedEnumTypeTest.java | {
"start": 2275,
"end": 2512
} | class ____ {
@Id
@GeneratedValue
private Integer id;
@Type( ExtendedEnumType.class )
@Enumerated(EnumType.STRING)
private Status status;
@Enumerated
@Type( ExtendedEnumType.class )
private Status status2;
public | Widget |
java | apache__camel | core/camel-support/src/main/java/org/apache/camel/support/LifecycleStrategySupport.java | {
"start": 1773,
"end": 1846
} | class ____ {@link LifecycleStrategy} implementations.
*/
public abstract | for |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/batch/CompositeIdAndElementCollectionBatchingTest.java | {
"start": 3556,
"end": 3697
} | class ____ {
private String name;
public EmbeddableA() {
}
public EmbeddableA(String name) {
this.name = name;
}
}
}
| EmbeddableA |
java | google__guava | android/guava/src/com/google/common/math/MathPreconditions.java | {
"start": 917,
"end": 3295
} | class ____ {
@CanIgnoreReturnValue
static int checkPositive(String role, int x) {
if (x <= 0) {
throw new IllegalArgumentException(role + " (" + x + ") must be > 0");
}
return x;
}
@CanIgnoreReturnValue
static long checkPositive(String role, long x) {
if (x <= 0) {
throw new IllegalArgumentException(role + " (" + x + ") must be > 0");
}
return x;
}
@CanIgnoreReturnValue
static BigInteger checkPositive(String role, BigInteger x) {
if (x.signum() <= 0) {
throw new IllegalArgumentException(role + " (" + x + ") must be > 0");
}
return x;
}
@CanIgnoreReturnValue
static int checkNonNegative(String role, int x) {
if (x < 0) {
throw new IllegalArgumentException(role + " (" + x + ") must be >= 0");
}
return x;
}
@CanIgnoreReturnValue
static long checkNonNegative(String role, long x) {
if (x < 0) {
throw new IllegalArgumentException(role + " (" + x + ") must be >= 0");
}
return x;
}
@CanIgnoreReturnValue
static BigInteger checkNonNegative(String role, BigInteger x) {
if (x.signum() < 0) {
throw new IllegalArgumentException(role + " (" + x + ") must be >= 0");
}
return x;
}
@CanIgnoreReturnValue
static double checkNonNegative(String role, double x) {
if (!(x >= 0)) { // not x < 0, to work with NaN.
throw new IllegalArgumentException(role + " (" + x + ") must be >= 0");
}
return x;
}
static void checkRoundingUnnecessary(boolean condition) {
if (!condition) {
throw new ArithmeticException("mode was UNNECESSARY, but rounding was necessary");
}
}
static void checkInRangeForRoundingInputs(boolean condition, double input, RoundingMode mode) {
if (!condition) {
throw new ArithmeticException(
"rounded value is out of range for input " + input + " and rounding mode " + mode);
}
}
static void checkNoOverflow(boolean condition, String methodName, int a, int b) {
if (!condition) {
throw new ArithmeticException("overflow: " + methodName + "(" + a + ", " + b + ")");
}
}
static void checkNoOverflow(boolean condition, String methodName, long a, long b) {
if (!condition) {
throw new ArithmeticException("overflow: " + methodName + "(" + a + ", " + b + ")");
}
}
private MathPreconditions() {}
}
| MathPreconditions |
java | apache__flink | flink-formats/flink-hadoop-bulk/src/test/java/org/apache/flink/formats/hadoop/bulk/HadoopPathBasedPartFileWriterITCase.java | {
"start": 2485,
"end": 6055
} | class ____ {
@Test
void testPendingFileRecoverableSerializer() throws IOException {
HadoopPathBasedPendingFileRecoverable recoverable =
new HadoopPathBasedPendingFileRecoverable(
new Path("hdfs://fake/path"), new Path("hdfs://fake/path.inprogress.uuid"));
HadoopPathBasedPendingFileRecoverableSerializer serializer =
new HadoopPathBasedPendingFileRecoverableSerializer();
byte[] serializedBytes = serializer.serialize(recoverable);
HadoopPathBasedPendingFileRecoverable deSerialized =
serializer.deserialize(serializer.getVersion(), serializedBytes);
assertThat(deSerialized.getTargetFilePath()).isEqualTo(recoverable.getTargetFilePath());
assertThat(deSerialized.getTempFilePath()).isEqualTo(recoverable.getTempFilePath());
}
@Test
void testWriteFile(@TempDir java.nio.file.Path tmpDir) throws Exception {
Path basePath = new Path(tmpDir.toUri());
List<String> data = Arrays.asList("first line", "second line", "third line");
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(1);
env.enableCheckpointing(100);
// This data generator source will emit data elements twice with two checkpoints completed
// in between
DataStream<String> stream =
env.fromSource(
TestDataGenerators.fromDataWithSnapshotsLatch(data, Types.STRING),
WatermarkStrategy.noWatermarks(),
"Test Source");
Configuration configuration = new Configuration();
// Elements from source are going to be assigned to one bucket
HadoopPathBasedBulkFormatBuilder<String, String, ?> builder =
new HadoopPathBasedBulkFormatBuilder<>(
basePath,
new TestHadoopPathBasedBulkWriterFactory(),
configuration,
new BasePathBucketAssigner<>());
TestStreamingFileSinkFactory<String> streamingFileSinkFactory =
new TestStreamingFileSinkFactory<>();
stream.addSink(streamingFileSinkFactory.createSink(builder, 1000));
env.execute();
validateResult(data, configuration, basePath);
}
// ------------------------------------------------------------------------
private void validateResult(List<String> expected, Configuration config, Path basePath)
throws IOException {
FileSystem fileSystem = FileSystem.get(basePath.toUri(), config);
FileStatus[] partFiles = fileSystem.listStatus(basePath);
assertThat(partFiles).hasSize(2);
for (FileStatus partFile : partFiles) {
assertThat(partFile.getLen()).isGreaterThan(0);
List<String> fileContent = readHadoopPath(fileSystem, partFile.getPath());
assertThat(fileContent).isEqualTo(expected);
}
}
private List<String> readHadoopPath(FileSystem fileSystem, Path partFile) throws IOException {
try (FSDataInputStream dataInputStream = fileSystem.open(partFile)) {
List<String> lines = new ArrayList<>();
BufferedReader reader = new BufferedReader(new InputStreamReader(dataInputStream));
String line = null;
while ((line = reader.readLine()) != null) {
lines.add(line);
}
return lines;
}
}
}
| HadoopPathBasedPartFileWriterITCase |
java | apache__camel | components/camel-openapi-java/src/main/java/org/apache/camel/openapi/RestOpenApiReader.java | {
"start": 6399,
"end": 48534
} | class ____ to use @return the openApi model
* @throws ClassNotFoundException is thrown if error loading class
* @throws IOException is thrown if error loading openapi specification
* @throws UnknownHostException is thrown if error resolving local hostname
*/
public OpenAPI read(
CamelContext camelContext, List<RestDefinition> rests, BeanConfig config,
String camelContextId, ClassResolver classResolver)
throws ClassNotFoundException, IOException, UnknownHostException {
// contract first, then load the specification as-is and use as response
for (RestDefinition rest : rests) {
if (rest.getOpenApi() != null) {
Resource res
= PluginHelper.getResourceLoader(camelContext).resolveResource(rest.getOpenApi().getSpecification());
if (res != null && res.exists()) {
InputStream is = res.getInputStream();
String data = IOHelper.loadText(is);
IOHelper.close(is);
OpenAPIV3Parser parser = new OpenAPIV3Parser();
SwaggerParseResult out = parser.readContents(data);
OpenAPI answer = out.getOpenAPI();
String host = null;
RestConfiguration restConfig = camelContext.getRestConfiguration();
if (restConfig.getHostNameResolver() != RestConfiguration.RestHostNameResolver.none) {
host = camelContext.getRestConfiguration().getApiHost();
if (host == null || host.isEmpty()) {
String scheme = "http";
int port = 0;
host = RestComponentHelper.resolveRestHostName(host, restConfig);
EmbeddedHttpService server
= CamelContextHelper.findSingleByType(camelContext, EmbeddedHttpService.class);
if (server != null) {
scheme = server.getScheme();
port = server.getServerPort();
}
host = scheme + "://" + host;
if (port > 0 && port != 80) {
host = host + ":" + port;
}
}
}
if (host != null) {
String basePath = RestOpenApiSupport.getBasePathFromOasDocument(answer);
if (basePath == null) {
basePath = "/";
}
if (!basePath.startsWith("/")) {
basePath = "/" + basePath;
}
Server server = new Server();
server.setUrl(host + basePath);
answer.setServers(null);
answer.addServersItem(server);
}
return answer;
}
}
}
OpenAPI openApi = config.isOpenApi31() ? new OpenAPI(SpecVersion.V31) : new OpenAPI();
if (config.getVersion() != null) {
openApi.setOpenapi(config.getVersion());
}
for (RestDefinition rest : rests) {
Boolean disabled = CamelContextHelper.parseBoolean(camelContext, rest.getDisabled());
if (disabled == null || !disabled) {
parse(camelContext, openApi, rest, camelContextId, classResolver, config);
}
}
openApi = shortenClassNames(openApi);
/*
* Fixes the problem of not generating the "paths" section when no rest route is defined.
* A schema with no paths is considered invalid.
*/
if (openApi.getPaths() == null) {
openApi.setPaths(new Paths());
}
/*
* Fixes the problem of generating duplicated tags which is invalid per the specification
*/
if (openApi.getTags() != null) {
openApi.setTags(new ArrayList<>(
openApi.getTags()
.stream()
.collect(Collectors.toMap(
Tag::getName,
Function.identity(),
(prev, current) -> prev,
LinkedHashMap::new))
.values()));
}
// configure before returning
openApi = config.configure(openApi);
checkCompatOpenApi2(openApi, config);
return openApi;
}
private void checkCompatOpenApi2(OpenAPI openApi, BeanConfig config) {
if (config.isOpenApi2()) {
throw new IllegalArgumentException("OpenAPI 2.x is not supported");
}
}
private void parse(
CamelContext camelContext, OpenAPI openApi, RestDefinition rest, String camelContextId,
ClassResolver classResolver, BeanConfig config)
throws ClassNotFoundException {
// only include enabled verbs
List<VerbDefinition> filter = new ArrayList<>();
for (VerbDefinition verb : rest.getVerbs()) {
Boolean disabled = CamelContextHelper.parseBoolean(camelContext, verb.getDisabled());
if (disabled == null || !disabled) {
filter.add(verb);
}
}
List<VerbDefinition> verbs = new ArrayList<>(filter);
// must sort the verbs by uri so we group them together when an uri has multiple operations
verbs.sort(new VerbOrdering(camelContext));
// we need to group the operations within the same tag, so use the path as default if not configured
// Multi tag support for a comma delimeted tag
String[] pathAsTags = null != rest.getTag()
? getValue(camelContext, rest.getTag()).split(",")
: null != rest.getPath()
? new String[] { getValue(camelContext, rest.getPath()) }
: new String[0];
parseOas30(camelContext, openApi, rest, pathAsTags);
// gather all types in use
Set<String> types = new LinkedHashSet<>();
for (VerbDefinition verb : verbs) {
// check if the Verb Definition must be excluded from documentation
String apiDocs;
if (verb.getApiDocs() != null) {
apiDocs = verb.getApiDocs();
} else {
// fallback to option on rest
apiDocs = rest.getApiDocs();
}
if (apiDocs != null && !Boolean.parseBoolean(apiDocs)) {
continue;
}
String type = verb.getType();
if (org.apache.camel.util.ObjectHelper.isNotEmpty(type)) {
if (type.endsWith("[]")) {
type = type.substring(0, type.length() - 2);
}
types.add(type);
}
type = verb.getOutType();
if (org.apache.camel.util.ObjectHelper.isNotEmpty(type)) {
if (type.endsWith("[]")) {
type = type.substring(0, type.length() - 2);
}
types.add(type);
}
// there can also be types in response messages
if (verb.getResponseMsgs() != null) {
for (ResponseMessageDefinition def : verb.getResponseMsgs()) {
type = def.getResponseModel();
if (org.apache.camel.util.ObjectHelper.isNotEmpty(type)) {
if (type.endsWith("[]")) {
type = type.substring(0, type.length() - 2);
}
types.add(type);
}
}
}
}
// use annotation scanner to find models (annotated classes)
for (String type : types) {
Class<?> clazz = classResolver.resolveMandatoryClass(type);
appendModels(clazz, openApi, config.isOpenApi31());
}
doParseVerbs(camelContext, openApi, rest, camelContextId, verbs, pathAsTags, config);
// setup root security node if necessary
List<SecurityDefinition> securityRequirements = rest.getSecurityRequirements();
securityRequirements.forEach(requirement -> {
SecurityRequirement oasRequirement = new SecurityRequirement();
List<String> scopes;
if (requirement.getScopes() == null || requirement.getScopes().isBlank()) {
scopes = Collections.emptyList();
} else {
scopes = Arrays.asList(requirement.getScopes().trim().split("\\s*,\\s*"));
}
oasRequirement.addList(requirement.getKey(), scopes);
openApi.addSecurityItem(oasRequirement);
});
}
private void parseOas30(CamelContext camelContext, OpenAPI openApi, RestDefinition rest, String[] pathAsTags) {
String summary = rest.getDescriptionText();
for (String tag : pathAsTags) {
// add rest as tag
openApi.addTagsItem(new Tag().name(tag).description(summary));
}
// setup security definitions
RestSecuritiesDefinition sd = rest.getSecurityDefinitions();
if (sd != null && !sd.getSecurityDefinitions().isEmpty() && openApi.getComponents() == null) {
openApi.setComponents(new Components());
}
if (sd != null) {
for (RestSecurityDefinition def : sd.getSecurityDefinitions()) {
if (def instanceof BasicAuthDefinition) {
SecurityScheme auth = new SecurityScheme().type(SecurityScheme.Type.HTTP)
.scheme("basic").description(CamelContextHelper.parseText(camelContext, def.getDescription()));
openApi.getComponents().addSecuritySchemes(CamelContextHelper.parseText(camelContext, def.getKey()), auth);
} else if (def instanceof BearerTokenDefinition) {
SecurityScheme auth = new SecurityScheme().type(SecurityScheme.Type.HTTP)
.scheme("bearer").description(CamelContextHelper.parseText(camelContext, def.getDescription()))
.bearerFormat(
(CamelContextHelper.parseText(camelContext, ((BearerTokenDefinition) def).getFormat())));
openApi.getComponents().addSecuritySchemes(CamelContextHelper.parseText(camelContext, def.getKey()), auth);
} else if (def instanceof ApiKeyDefinition) {
ApiKeyDefinition rs = (ApiKeyDefinition) def;
SecurityScheme auth = new SecurityScheme().type(SecurityScheme.Type.APIKEY)
.name(CamelContextHelper.parseText(camelContext, rs.getName()))
.description(CamelContextHelper.parseText(camelContext, def.getDescription()));
if (Boolean.TRUE.equals(CamelContextHelper.parseBoolean(camelContext, rs.getInHeader()))) {
auth.setIn(SecurityScheme.In.HEADER);
} else if (Boolean.TRUE.equals(CamelContextHelper.parseBoolean(camelContext, rs.getInQuery()))) {
auth.setIn(SecurityScheme.In.QUERY);
} else if (Boolean.TRUE.equals(CamelContextHelper.parseBoolean(camelContext, rs.getInCookie()))) {
auth.setIn(SecurityScheme.In.COOKIE);
} else {
throw new IllegalStateException("No API Key location specified.");
}
openApi.getComponents().addSecuritySchemes(CamelContextHelper.parseText(camelContext, def.getKey()), auth);
} else if (def instanceof OAuth2Definition) {
OAuth2Definition rs = (OAuth2Definition) def;
SecurityScheme auth = new SecurityScheme().type(SecurityScheme.Type.OAUTH2)
.description(CamelContextHelper.parseText(camelContext, def.getDescription()));
String flow = CamelContextHelper.parseText(camelContext, rs.getFlow());
if (flow == null) {
flow = inferOauthFlow(camelContext, rs);
}
OAuthFlows oauthFlows = new OAuthFlows();
auth.setFlows(oauthFlows);
OAuthFlow oauthFlow = new OAuthFlow();
switch (flow) {
case "authorizationCode":
case "accessCode":
oauthFlows.setAuthorizationCode(oauthFlow);
break;
case "implicit":
oauthFlows.setImplicit(oauthFlow);
break;
case "clientCredentials":
case "application":
oauthFlows.setClientCredentials(oauthFlow);
break;
case "password":
oauthFlows.setPassword(oauthFlow);
break;
default:
throw new IllegalStateException("Invalid OAuth flow '" + flow + "' specified");
}
oauthFlow.setAuthorizationUrl(CamelContextHelper.parseText(camelContext, rs.getAuthorizationUrl()));
oauthFlow.setTokenUrl(CamelContextHelper.parseText(camelContext, rs.getTokenUrl()));
oauthFlow.setRefreshUrl(CamelContextHelper.parseText(camelContext, rs.getRefreshUrl()));
if (!rs.getScopes().isEmpty()) {
oauthFlow.setScopes(new Scopes());
for (RestPropertyDefinition scope : rs.getScopes()) {
oauthFlow.getScopes().addString(CamelContextHelper.parseText(camelContext, scope.getKey()),
CamelContextHelper.parseText(camelContext, scope.getValue()));
}
}
openApi.getComponents().addSecuritySchemes(CamelContextHelper.parseText(camelContext, def.getKey()), auth);
} else if (def instanceof MutualTLSDefinition) {
SecurityScheme auth = new SecurityScheme().type(SecurityScheme.Type.MUTUALTLS)
.description(CamelContextHelper.parseText(camelContext, def.getDescription()));
openApi.getComponents().addSecuritySchemes(CamelContextHelper.parseText(camelContext, def.getKey()), auth);
} else if (def instanceof OpenIdConnectDefinition) {
SecurityScheme auth = new SecurityScheme().type(SecurityScheme.Type.OPENIDCONNECT)
.description(CamelContextHelper.parseText(camelContext, def.getDescription()));
auth.setOpenIdConnectUrl(
CamelContextHelper.parseText(camelContext, ((OpenIdConnectDefinition) def).getUrl()));
openApi.getComponents().addSecuritySchemes(CamelContextHelper.parseText(camelContext, def.getKey()), auth);
}
}
}
}
private String buildBasePath(CamelContext camelContext, RestDefinition rest) {
// used during gathering of apis
String basePath = FileUtil.stripLeadingSeparator(getValue(camelContext, rest.getPath()));
// must start with leading slash
if (basePath != null && !basePath.startsWith("/")) {
basePath = "/" + basePath;
}
return basePath;
}
private void doParseVerbs(
CamelContext camelContext, OpenAPI openApi, RestDefinition rest, String camelContextId,
List<VerbDefinition> verbs, String[] pathAsTags, BeanConfig config) {
String basePath = buildBasePath(camelContext, rest);
for (VerbDefinition verb : verbs) {
// check if the Verb Definition must be excluded from documentation
String apiDocs;
if (verb.getApiDocs() != null) {
apiDocs = getValue(camelContext, verb.getApiDocs());
} else {
// fallback to option on rest
apiDocs = getValue(camelContext, rest.getApiDocs());
}
if (apiDocs != null && !Boolean.parseBoolean(apiDocs)) {
continue;
}
// the method must be in lower case
String method = verb.asVerb().toLowerCase(Locale.US);
// operation path is a key
String opPath = OpenApiHelper.buildUrl(basePath, getValue(camelContext, verb.getPath()));
if (openApi.getPaths() == null) {
openApi.paths(new Paths());
}
PathItem path = openApi.getPaths().get(opPath);
if (path == null) {
path = new PathItem(); //openApi.paths.createPathItem(opPath);
}
Operation op = new Operation(); //path.createOperation(method);
for (String tag : pathAsTags) {
// group in the same tag
op.addTagsItem(tag);
}
// favour ids from verb, rest, route
final String operationId;
if (verb.getId() != null) {
operationId = getValue(camelContext, verb.getId());
} else if (rest.getId() != null) {
operationId = getValue(camelContext, rest.getId());
} else {
verb.idOrCreate(camelContext.getCamelContextExtension().getContextPlugin(NodeIdFactory.class));
operationId = verb.getId();
}
op.setOperationId(operationId);
// add id as vendor extensions
op.addExtension("x-camelContextId", camelContextId);
path.operation(PathItem.HttpMethod.valueOf(method.toUpperCase()), op);
String consumes = getValue(camelContext, verb.getConsumes() != null ? verb.getConsumes() : rest.getConsumes());
if (consumes == null) {
consumes = config.defaultConsumes;
}
String produces = getValue(camelContext, verb.getProduces() != null ? verb.getProduces() : rest.getProduces());
if (produces == null) {
produces = config.defaultProduces;
}
doParseVerb(camelContext, openApi, verb, op, consumes, produces);
// enrich with configured response messages from the rest-dsl
doParseResponseMessages(camelContext, openApi, verb, op, produces);
// add path
openApi.getPaths().addPathItem(opPath, path);
}
}
private void doParseVerb(
CamelContext camelContext, OpenAPI openApi, VerbDefinition verb, Operation op, String consumes,
String produces) {
if (verb.getDescriptionText() != null) {
op.setSummary(getValue(camelContext, verb.getDescriptionText()));
}
if ("true".equals(verb.getDeprecated())) {
op.setDeprecated(Boolean.TRUE);
}
// security
for (SecurityDefinition sd : verb.getSecurity()) {
List<String> scopes = new ArrayList<>();
if (sd.getScopes() != null) {
for (String scope : ObjectHelper.createIterable(getValue(camelContext, sd.getScopes()))) {
scopes.add(scope);
}
}
SecurityRequirement securityRequirement = new SecurityRequirement(); //op.createSecurityRequirement();
securityRequirement.addList(getValue(camelContext, sd.getKey()), scopes);
op.addSecurityItem(securityRequirement);
}
for (ParamDefinition param : verb.getParams()) {
Parameter parameter = new Parameter().in(param.getType().name());
if (parameter != null) {
parameter.setName(getValue(camelContext, param.getName()));
if (org.apache.camel.util.ObjectHelper.isNotEmpty(param.getDescription())) {
parameter.setDescription(getValue(camelContext, param.getDescription()));
}
parameter.setRequired(param.getRequired());
final String dataType
= getValue(camelContext, param.getDataType() != null ? param.getDataType() : "string");
// set type on parameter
if (!"body".equals(parameter.getIn())) {
Schema schema = new Schema<>();
final boolean isArray = "array".equalsIgnoreCase(dataType);
final List<String> allowableValues = getValue(camelContext, param.getAllowableValuesAsStringList());
final boolean hasAllowableValues = allowableValues != null && !allowableValues.isEmpty();
parameter.setSchema(schema);
schema.setType(dataType);
if (openApi.getSpecVersion().equals(SpecVersion.V31)) {
schema.addType(dataType);
}
if (param.getDataFormat() != null) {
schema.setFormat(getValue(camelContext, param.getDataFormat()));
}
if (isArray) {
String arrayType = getValue(camelContext, param.getArrayType());
if (arrayType != null) {
if (arrayType.equalsIgnoreCase("string")) {
defineSchemas(parameter, allowableValues, String.class);
}
if (arrayType.equalsIgnoreCase("int") || arrayType.equalsIgnoreCase("integer")) {
defineSchemas(parameter, allowableValues, Integer.class);
}
if (arrayType.equalsIgnoreCase("long")) {
defineSchemas(parameter, allowableValues, Long.class);
}
if (arrayType.equalsIgnoreCase("float")) {
defineSchemas(parameter, allowableValues, Float.class);
}
if (arrayType.equalsIgnoreCase("double")) {
defineSchemas(parameter, allowableValues, Double.class);
}
if (arrayType.equalsIgnoreCase("boolean")) {
defineSchemas(parameter, allowableValues, Boolean.class);
}
if (arrayType.equalsIgnoreCase("byte")) {
defineSchemas(parameter, allowableValues, ByteArraySchema.class);
}
if (arrayType.equalsIgnoreCase("binary")) {
defineSchemas(parameter, allowableValues, BinarySchema.class);
}
if (arrayType.equalsIgnoreCase("date")) {
defineSchemas(parameter, allowableValues, DateSchema.class);
}
if (arrayType.equalsIgnoreCase("date-time")) {
defineSchemas(parameter, allowableValues, DateTimeSchema.class);
}
if (arrayType.equalsIgnoreCase("password")) {
defineSchemas(parameter, allowableValues, PasswordSchema.class);
}
}
}
if (param.getCollectionFormat() != null) {
parameter.setStyle(convertToOpenApiStyle(getValue(camelContext, param.getCollectionFormat().name())));
}
if (hasAllowableValues && !isArray) {
schema.setEnum(allowableValues);
}
// set default value on parameter
if (org.apache.camel.util.ObjectHelper.isNotEmpty(param.getDefaultValue())) {
schema.setDefault(getValue(camelContext, param.getDefaultValue()));
}
// add examples
if (param.getExamples() != null && !param.getExamples().isEmpty()) {
// Examples can be added with a key or a single one with no key
for (RestPropertyDefinition example : param.getExamples()) {
if (example.getKey().isEmpty()) {
if (parameter.getExample() != null) {
LOG.warn("The parameter already has an example with no key!");
}
parameter.setExample(example.getValue());
} else {
parameter.addExample(example.getKey(), new Example().value(example.getValue()));
}
}
}
op.addParametersItem(parameter);
}
// In OpenAPI 3x, body or form parameters are replaced by requestBody
if (parameter.getIn().equals("body")) {
RequestBody reqBody = new RequestBody().content(new Content());
reqBody.setRequired(param.getRequired());
reqBody.setDescription(getValue(camelContext, param.getDescription()));
op.setRequestBody(reqBody);
String type = getValue(camelContext, verb.getType());
if (type == null) {
type = dataType;
}
Schema<?> bodySchema = null;
if (type != null) {
if (type.endsWith("[]")) {
bodySchema = modelTypeAsProperty(type, openApi);
} else {
String ref = modelTypeAsRef(type, openApi);
if (ref != null) {
bodySchema = new Schema().$ref(OAS30_SCHEMA_DEFINITION_PREFIX + ref);
} else {
bodySchema = modelTypeAsProperty(type, openApi);
}
}
}
if (consumes != null) {
String[] parts = consumes.split(",");
for (String part : parts) {
MediaType mediaType = new MediaType().schema(bodySchema);
if (param.getExamples() != null) {
for (RestPropertyDefinition example : param.getExamples()) {
if (part.equals(example.getKey())) {
mediaType.setExample(example.getValue());
}
// TODO: Check for non-matched or empty key
}
}
reqBody.getContent().addMediaType(part, mediaType);
}
}
}
}
}
// clear parameters if its empty
if (op.getParameters() != null && op.getParameters().isEmpty()) {
// op.parameters.clear();
op.setParameters(null); // Is this necessary?
}
// if we have an out type then set that as response message
if (verb.getOutType() != null) {
if (op.getResponses() == null) {
op.setResponses(new ApiResponses());
}
String[] parts;
if (produces != null) {
parts = produces.split(",");
for (String produce : parts) {
ApiResponse response = new ApiResponse().description("Output type"); // ??
Content responseContent = new Content();
MediaType contentType = new MediaType();
responseContent.addMediaType(produce, contentType);
Schema<?> model = modelTypeAsProperty(getValue(camelContext, verb.getOutType()), openApi);
contentType.setSchema(model);
response.setContent(responseContent);
op.getResponses().addApiResponse("200", response);
}
}
}
}
private StyleEnum convertToOpenApiStyle(String value) {
//Should be a Collection Format name
switch (CollectionFormat.valueOf(value)) {
case csv:
return StyleEnum.FORM;
case ssv:
case tsv:
return StyleEnum.SPACEDELIMITED;
case pipes:
return StyleEnum.PIPEDELIMITED;
case multi:
return StyleEnum.DEEPOBJECT;
default:
return null;
}
}
private static void defineSchemas(
final Parameter serializableParameter,
final List<String> allowableValues,
final Class<?> type) {
Schema parameterSchema = serializableParameter.getSchema();
if (allowableValues != null && !allowableValues.isEmpty()) {
if (String.class.equals(type)) {
parameterSchema.setEnum(allowableValues);
} else {
convertAndSetItemsEnum(parameterSchema, allowableValues, type);
}
}
if (Objects.equals(parameterSchema.getType(), "array")) {
Schema<?> itemsSchema;
if (Integer.class.equals(type)) {
itemsSchema = new IntegerSchema();
} else if (Long.class.equals(type)) {
itemsSchema = new IntegerSchema().format("int64");
} else if (Float.class.equals(type)) {
itemsSchema = new NumberSchema().format("float");
} else if (Double.class.equals(type)) {
itemsSchema = new NumberSchema().format("double");
} else if (Boolean.class.equals(type)) {
itemsSchema = new BooleanSchema();
} else if (ByteArraySchema.class.equals(type)) {
itemsSchema = new ByteArraySchema();
} else if (BinarySchema.class.equals(type)) {
itemsSchema = new BinarySchema();
} else if (DateSchema.class.equals(type)) {
itemsSchema = new DateSchema();
} else if (DateTimeSchema.class.equals(type)) {
itemsSchema = new DateTimeSchema();
} else if (PasswordSchema.class.equals(type)) {
itemsSchema = new PasswordSchema();
} else {
itemsSchema = new StringSchema();
}
parameterSchema.setItems(itemsSchema);
}
}
private static void convertAndSetItemsEnum(
final Schema items, final List<String> allowableValues,
final Class<?> type) {
try {
final MethodHandle valueOf = ClassUtils.isPrimitiveWrapper(type)
? publicLookup().findStatic(type, "valueOf", MethodType.methodType(type, String.class)) : null;
final MethodHandle setEnum = publicLookup().bind(items, "setEnum",
MethodType.methodType(void.class, List.class));
final Method castSchema
= type.getSuperclass().equals(Schema.class) ? type.getDeclaredMethod("cast", Object.class) : null;
if (castSchema != null) {
castSchema.setAccessible(true);
}
final Object schema = castSchema != null ? type.getDeclaredConstructor().newInstance() : null;
final List<?> values = allowableValues.stream().map(v -> {
try {
if (valueOf != null) {
return valueOf.invoke(v);
} else if (castSchema != null) {
return castSchema.invoke(schema, v);
} else {
throw new RuntimeException("Can not convert allowable value " + v);
}
} catch (RuntimeException e) {
throw e;
} catch (Throwable e) {
throw new IllegalStateException(e);
}
}).collect(Collectors.toList());
setEnum.invoke(values);
} catch (RuntimeException e) {
throw e;
} catch (Throwable e) {
throw new IllegalStateException(e);
}
}
private void doParseResponseMessages(
CamelContext camelContext, OpenAPI openApi, VerbDefinition verb, Operation op, String produces) {
if (op.getResponses() == null) {
op.setResponses(new ApiResponses());
}
for (ResponseMessageDefinition msg : verb.getResponseMsgs()) {
doParseResponse(camelContext, openApi, op, produces, msg);
}
// must include an empty noop response if none exists
if (op.getResponses().isEmpty()) {
op.getResponses().addApiResponse(ApiResponses.DEFAULT, new ApiResponse());
}
}
private void doParseResponse(
CamelContext camelContext, OpenAPI openApi, Operation op, String produces,
ResponseMessageDefinition msg) {
ApiResponse response = null;
String code = getValue(camelContext, msg.getCode());
response = op.getResponses().get(code);
if (response == null) {
response = new ApiResponse();
op.getResponses().addApiResponse(code, response);
}
if (org.apache.camel.util.ObjectHelper.isNotEmpty(msg.getResponseModel())) {
String[] parts;
if (produces != null) {
Content respContent = new Content();
parts = produces.split(",");
for (String produce : parts) {
Schema model = modelTypeAsProperty(getValue(camelContext, msg.getResponseModel()), openApi);
respContent.addMediaType(produce, new MediaType().schema(model));
}
response.setContent(respContent);
}
}
if (org.apache.camel.util.ObjectHelper.isNotEmpty(msg.getMessage())) {
response.setDescription(getValue(camelContext, msg.getMessage()));
}
// add headers
if (msg.getHeaders() != null) {
for (ResponseHeaderDefinition header : msg.getHeaders()) {
String name = getValue(camelContext, header.getName());
String type = getValue(camelContext, header.getDataType());
String format = getValue(camelContext, header.getDataFormat());
if ("string".equals(type) || "long".equals(type) || "float".equals(type)
|| "double".equals(type) || "boolean".equals(type)) {
setResponseHeader(camelContext, response, header, name, format, type);
} else if ("int".equals(type) || "integer".equals(type)) {
setResponseHeader(camelContext, response, header, name, format, "integer");
} else if ("array".equals(type)) {
Header ap = new Header();
response.addHeaderObject(name, ap);
if (org.apache.camel.util.ObjectHelper.isNotEmpty(header.getDescription())) {
ap.setDescription(getValue(camelContext, header.getDescription()));
}
if (header.getArrayType() != null) {
String arrayType = getValue(camelContext, header.getArrayType());
if (arrayType.equalsIgnoreCase("string")
|| arrayType.equalsIgnoreCase("long")
|| arrayType.equalsIgnoreCase("float")
|| arrayType.equalsIgnoreCase("double")
|| arrayType.equalsIgnoreCase("boolean")) {
setHeaderSchemaOas30(ap, arrayType);
} else if (arrayType.equalsIgnoreCase("int")
|| arrayType.equalsIgnoreCase("integer")) {
setHeaderSchemaOas30(ap, "integer");
}
}
// add example
if (header.getExample() != null) {
ap.addExample("", new Example().value(getValue(camelContext, header.getExample())));
}
}
}
}
// add examples
if (msg.getExamples() != null) {
if (response.getContent() != null) {
for (MediaType mediaType : response.getContent().values()) {
for (RestPropertyDefinition prop : msg.getExamples()) {
mediaType.addExamples(getValue(camelContext, prop.getKey()), new Example()
.value(getValue(camelContext, prop.getValue())));
}
}
}
// if no content, can't add examples!
}
}
private void setHeaderSchemaOas30(Header ap, String arrayType) {
Schema items = new Schema().type(arrayType);
ap.setSchema(items);
}
private void setResponseHeader(
CamelContext camelContext, ApiResponse response, ResponseHeaderDefinition header,
String name, String format, String type) {
Header ip = new Header();
response.addHeaderObject(name, ip);
Schema schema = new Schema().type(type);
ip.setSchema(schema);
if (format != null) {
schema.setFormat(format);
}
ip.setDescription(getValue(camelContext, header.getDescription()));
List<String> values;
if (header.getAllowableValues() != null) {
values = new ArrayList<>();
for (String text : header.getAllowableValuesAsStringList()) {
values.add(getValue(camelContext, text));
}
schema.setEnum(values);
}
// add example
if (header.getExample() != null) {
ip.addExample("", new Example().value(getValue(camelContext, header.getExample())));
}
}
private String modelTypeAsRef(String typeName, OpenAPI openApi) {
boolean array = typeName.endsWith("[]");
if (array) {
typeName = typeName.substring(0, typeName.length() - 2);
}
if (NO_REFERENCE_TYPE_NAMES.contains(typeName)) {
return null;
}
if (openApi.getComponents() != null
&& openApi.getComponents().getSchemas() != null) {
for (Schema model : openApi.getComponents().getSchemas().values()) {
if (typeName.equals(getClassNameExtension(model))) {
return model.getName();
}
}
}
return null;
}
private Object getClassNameExtension(Schema model) {
Object className = null;
if (model.getExtensions() != null) {
Object value = model.getExtensions().get("x-className");
if (value instanceof Map) {
className = ((Map) value).get("format");
}
}
return className;
}
private Schema<?> modelTypeAsProperty(String typeName, OpenAPI openApi) {
Schema<?> prop = null;
boolean array = typeName.endsWith("[]");
if (array) {
typeName = typeName.substring(0, typeName.length() - 2);
}
String ref = modelTypeAsRef(typeName, openApi);
if (ref == null) {
// No explicit schema reference so handle primitive types
// special for byte arrays
if (array && ("byte".equals(typeName) || "java.lang.Byte".equals(typeName))) {
// Note built-in ByteArraySchema sets type="string" !
prop = new Schema<byte[]>().type("number").format("byte");
array = false;
} else if ("string".equalsIgnoreCase(typeName) || "java.lang.String".equals(typeName)) {
prop = new StringSchema();
} else if ("int".equals(typeName) || "java.lang.Integer".equals(typeName)) {
prop = new IntegerSchema();
} else if ("long".equals(typeName) || "java.lang.Long".equals(typeName)) {
prop = new IntegerSchema().format("int64");
} else if ("float".equals(typeName) || "java.lang.Float".equals(typeName)) {
prop = new NumberSchema().format("float");
} else if ("double".equals(typeName) || "java.lang.Double".equals(typeName)) {
prop = new NumberSchema().format("double");
} else if ("boolean".equals(typeName) || "java.lang.Boolean".equals(typeName)) {
prop = new NumberSchema().format("boolean");
} else if ("file".equals(typeName) || "java.io.File".equals(typeName)) {
prop = new FileSchema();
} else {
prop = new StringSchema();
}
}
if (array) {
Schema<?> items = new Schema<>();
if (ref != null) {
items.set$ref(OAS30_SCHEMA_DEFINITION_PREFIX + ref);
}
prop = new ArraySchema().items(items);
} else if (prop == null) {
prop = new Schema<>().$ref(OAS30_SCHEMA_DEFINITION_PREFIX + ref);
}
return prop;
}
/**
* If the | resolver |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/common/settings/SettingTests.java | {
"start": 30996,
"end": 77777
} | class ____ {
private Integer b;
private Integer a;
public void set(Integer a, Integer b) {
this.a = a;
this.b = b;
}
public void validate(Integer a, Integer b) {
if (Integer.signum(a) != Integer.signum(b)) {
throw new IllegalArgumentException("boom");
}
}
}
public void testComposite() {
Composite c = new Composite();
Setting<Integer> a = Setting.intSetting("foo.int.bar.a", 1, Property.Dynamic, Property.NodeScope);
Setting<Integer> b = Setting.intSetting("foo.int.bar.b", 1, Property.Dynamic, Property.NodeScope);
ClusterSettings.SettingUpdater<Tuple<Integer, Integer>> settingUpdater = Setting.compoundUpdater(c::set, c::validate, a, b, logger);
assertFalse(settingUpdater.apply(Settings.EMPTY, Settings.EMPTY));
assertNull(c.a);
assertNull(c.b);
Settings build = Settings.builder().put("foo.int.bar.a", 2).build();
assertTrue(settingUpdater.apply(build, Settings.EMPTY));
assertEquals(2, c.a.intValue());
assertEquals(1, c.b.intValue());
Integer aValue = c.a;
assertFalse(settingUpdater.apply(build, build));
assertSame(aValue, c.a);
Settings previous = build;
build = Settings.builder().put("foo.int.bar.a", 2).put("foo.int.bar.b", 5).build();
assertTrue(settingUpdater.apply(build, previous));
assertEquals(2, c.a.intValue());
assertEquals(5, c.b.intValue());
// reset to default
assertTrue(settingUpdater.apply(Settings.EMPTY, build));
assertEquals(1, c.a.intValue());
assertEquals(1, c.b.intValue());
}
public void testCompositeValidator() {
Composite c = new Composite();
Setting<Integer> a = Setting.intSetting("foo.int.bar.a", 1, Property.Dynamic, Property.NodeScope);
Setting<Integer> b = Setting.intSetting("foo.int.bar.b", 1, Property.Dynamic, Property.NodeScope);
ClusterSettings.SettingUpdater<Tuple<Integer, Integer>> settingUpdater = Setting.compoundUpdater(c::set, c::validate, a, b, logger);
assertFalse(settingUpdater.apply(Settings.EMPTY, Settings.EMPTY));
assertNull(c.a);
assertNull(c.b);
Settings build = Settings.builder().put("foo.int.bar.a", 2).build();
assertTrue(settingUpdater.apply(build, Settings.EMPTY));
assertEquals(2, c.a.intValue());
assertEquals(1, c.b.intValue());
Integer aValue = c.a;
assertFalse(settingUpdater.apply(build, build));
assertSame(aValue, c.a);
Settings previous = build;
build = Settings.builder().put("foo.int.bar.a", 2).put("foo.int.bar.b", 5).build();
assertTrue(settingUpdater.apply(build, previous));
assertEquals(2, c.a.intValue());
assertEquals(5, c.b.intValue());
Settings invalid = Settings.builder().put("foo.int.bar.a", -2).put("foo.int.bar.b", 5).build();
IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> settingUpdater.apply(invalid, previous));
assertThat(exc.getMessage(), equalTo("boom"));
// reset to default
assertTrue(settingUpdater.apply(Settings.EMPTY, build));
assertEquals(1, c.a.intValue());
assertEquals(1, c.b.intValue());
}
public void testListKeyExists() {
final Setting<List<String>> listSetting = Setting.listSetting(
"foo",
Collections.singletonList("bar"),
Function.identity(),
Property.NodeScope
);
Settings settings = Settings.builder().put("foo", "bar1,bar2").build();
assertFalse(listSetting.exists(Settings.EMPTY));
assertTrue(listSetting.exists(settings));
settings = Settings.builder().put("foo.0", "foo1").put("foo.1", "foo2").build();
assertFalse(listSetting.exists(Settings.EMPTY));
assertTrue(listSetting.exists(settings));
}
public void testListSettingsDeprecated() {
final Setting<List<String>> deprecatedListSetting = Setting.listSetting(
"foo.deprecated",
Collections.singletonList("foo.deprecated"),
Function.identity(),
Property.DeprecatedWarning,
Property.NodeScope
);
final Setting<List<String>> nonDeprecatedListSetting = Setting.listSetting(
"foo.non_deprecated",
Collections.singletonList("foo.non_deprecated"),
Function.identity(),
Property.NodeScope
);
Settings settings = Settings.builder()
.put("foo.deprecated", "foo.deprecated1,foo.deprecated2")
.put("foo.non_deprecated", "foo.non_deprecated1,foo.non_deprecated2")
.build();
deprecatedListSetting.get(settings);
nonDeprecatedListSetting.get(settings);
assertSettingDeprecationsAndWarnings(new Setting<?>[] { deprecatedListSetting });
settings = Settings.builder()
.put("foo.deprecated.0", "foo.deprecated1")
.put("foo.deprecated.1", "foo.deprecated2")
.put("foo.non_deprecated.0", "foo.non_deprecated1")
.put("foo.non_deprecated.1", "foo.non_deprecated2")
.build();
deprecatedListSetting.get(settings);
nonDeprecatedListSetting.get(settings);
assertSettingDeprecationsAndWarnings(new Setting<?>[] { deprecatedListSetting });
}
public void testListSettings() {
Setting<List<String>> listSetting = Setting.listSetting(
"foo.bar",
Arrays.asList("foo,bar"),
(s) -> s.toString(),
Property.Dynamic,
Property.NodeScope
);
List<String> value = listSetting.get(Settings.EMPTY);
assertFalse(listSetting.exists(Settings.EMPTY));
assertEquals(1, value.size());
assertEquals("foo,bar", value.get(0));
List<String> input = Arrays.asList("test", "test1, test2", "test", ",,,,");
Settings.Builder builder = Settings.builder().putList("foo.bar", input.toArray(new String[0]));
assertTrue(listSetting.exists(builder.build()));
value = listSetting.get(builder.build());
assertEquals(input.size(), value.size());
assertArrayEquals(value.toArray(new String[0]), input.toArray(new String[0]));
// try to parse this really annoying format
builder = Settings.builder();
for (int i = 0; i < input.size(); i++) {
builder.put("foo.bar." + i, input.get(i));
}
value = listSetting.get(builder.build());
assertEquals(input.size(), value.size());
assertArrayEquals(value.toArray(new String[0]), input.toArray(new String[0]));
assertTrue(listSetting.exists(builder.build()));
AtomicReference<List<String>> ref = new AtomicReference<>();
AbstractScopedSettings.SettingUpdater<List<String>> settingUpdater = listSetting.newUpdater(ref::set, logger);
assertTrue(settingUpdater.hasChanged(builder.build(), Settings.EMPTY));
settingUpdater.apply(builder.build(), Settings.EMPTY);
assertEquals(input.size(), ref.get().size());
assertArrayEquals(ref.get().toArray(new String[0]), input.toArray(new String[0]));
settingUpdater.apply(Settings.builder().putList("foo.bar", "123").build(), builder.build());
assertEquals(1, ref.get().size());
assertArrayEquals(ref.get().toArray(new String[0]), new String[] { "123" });
settingUpdater.apply(Settings.builder().put("foo.bar", "1,2,3").build(), Settings.builder().putList("foo.bar", "123").build());
assertEquals(3, ref.get().size());
assertArrayEquals(ref.get().toArray(new String[0]), new String[] { "1", "2", "3" });
settingUpdater.apply(Settings.EMPTY, Settings.builder().put("foo.bar", "1,2,3").build());
assertEquals(1, ref.get().size());
assertEquals("foo,bar", ref.get().get(0));
Setting<List<Integer>> otherSettings = Setting.listSetting(
"foo.bar",
Collections.emptyList(),
Integer::parseInt,
Property.Dynamic,
Property.NodeScope
);
List<Integer> defaultValue = otherSettings.get(Settings.EMPTY);
assertEquals(0, defaultValue.size());
List<Integer> intValues = otherSettings.get(Settings.builder().put("foo.bar", "0,1,2,3").build());
assertEquals(4, intValues.size());
for (int i = 0; i < intValues.size(); i++) {
assertEquals(i, intValues.get(i).intValue());
}
Setting<List<String>> settingWithFallback = Setting.listSetting(
"foo.baz",
listSetting,
Function.identity(),
Property.Dynamic,
Property.NodeScope
);
value = settingWithFallback.get(Settings.EMPTY);
assertEquals(1, value.size());
assertEquals("foo,bar", value.get(0));
value = settingWithFallback.get(Settings.builder().putList("foo.bar", "1", "2").build());
assertEquals(2, value.size());
assertEquals("1", value.get(0));
assertEquals("2", value.get(1));
value = settingWithFallback.get(Settings.builder().putList("foo.baz", "3", "4").build());
assertEquals(2, value.size());
assertEquals("3", value.get(0));
assertEquals("4", value.get(1));
value = settingWithFallback.get(Settings.builder().putList("foo.baz", "3", "4").putList("foo.bar", "1", "2").build());
assertEquals(2, value.size());
assertEquals("3", value.get(0));
assertEquals("4", value.get(1));
}
public void testListSettingAcceptsNumberSyntax() {
Setting<List<String>> listSetting = Setting.listSetting(
"foo.bar",
Arrays.asList("foo,bar"),
(s) -> s.toString(),
Property.Dynamic,
Property.NodeScope
);
List<String> input = Arrays.asList("test", "test1, test2", "test", ",,,,");
Settings.Builder builder = Settings.builder().putList("foo.bar", input.toArray(new String[0]));
// try to parse this really annoying format
for (String key : builder.keys()) {
assertTrue("key: " + key + " doesn't match", listSetting.match(key));
}
builder = Settings.builder().put("foo.bar", "1,2,3");
for (String key : builder.keys()) {
assertTrue("key: " + key + " doesn't match", listSetting.match(key));
}
assertFalse(listSetting.match("foo_bar"));
assertFalse(listSetting.match("foo_bar.1"));
assertTrue(listSetting.match("foo.bar"));
assertTrue(listSetting.match("foo.bar." + randomIntBetween(0, 10000)));
}
public void testDynamicKeySetting() {
Setting<Boolean> setting = Setting.prefixKeySetting("foo.", (key) -> Setting.boolSetting(key, false, Property.NodeScope));
assertTrue(setting.hasComplexMatcher());
assertTrue(setting.match("foo.bar"));
assertFalse(setting.match("foo"));
Setting<Boolean> concreteSetting = setting.getConcreteSetting("foo.bar");
assertTrue(concreteSetting.get(Settings.builder().put("foo.bar", "true").build()));
assertFalse(concreteSetting.get(Settings.builder().put("foo.baz", "true").build()));
try {
setting.getConcreteSetting("foo");
fail();
} catch (IllegalArgumentException ex) {
assertEquals("key [foo] must match [foo.] but didn't.", ex.getMessage());
}
}
public void testPrefixKeySettingFallbackAsMap() {
Setting.AffixSetting<Boolean> setting = Setting.prefixKeySetting(
"foo.",
"bar.",
(ns, key) -> Setting.boolSetting(key, false, Property.NodeScope)
);
assertTrue(setting.match("foo.bar"));
assertTrue(setting.match("bar.bar"));
Map<String, Boolean> map = setting.getAsMap(Settings.builder().put("foo.bar", "true").build());
assertEquals(1, map.size());
assertTrue(map.get("bar"));
map = setting.getAsMap(Settings.builder().put("bar.bar", "true").build());
assertEquals(1, map.size());
assertTrue(map.get("bar"));
// Prefer primary
map = setting.getAsMap(Settings.builder().put("foo.bar", "false").put("bar.bar", "true").build());
assertEquals(1, map.size());
assertFalse(map.get("bar"));
}
public void testAffixKeySetting() {
Setting<Boolean> setting = Setting.affixKeySetting("foo.", "enable", (key) -> Setting.boolSetting(key, false, Property.NodeScope));
assertTrue(setting.hasComplexMatcher());
assertTrue(setting.match("foo.bar.enable"));
assertTrue(setting.match("foo.baz.enable"));
assertFalse(setting.match("foo.bar.baz.enable"));
assertFalse(setting.match("foo.bar"));
assertFalse(setting.match("foo.bar.baz.enabled"));
assertFalse(setting.match("foo"));
Setting<Boolean> concreteSetting = setting.getConcreteSetting("foo.bar.enable");
assertTrue(concreteSetting.get(Settings.builder().put("foo.bar.enable", "true").build()));
assertFalse(concreteSetting.get(Settings.builder().put("foo.baz.enable", "true").build()));
IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> setting.getConcreteSetting("foo"));
assertEquals("key [foo] must match [foo.*.enable] but didn't.", exc.getMessage());
exc = expectThrows(
IllegalArgumentException.class,
() -> Setting.affixKeySetting("foo", "enable", (key) -> Setting.boolSetting(key, false, Property.NodeScope))
);
assertEquals("prefix must end with a '.'", exc.getMessage());
exc = expectThrows(
IllegalArgumentException.class,
() -> Setting.prefixKeySetting("foo.", "bar", (ns, key) -> Setting.boolSetting(key, false, Property.NodeScope))
);
assertEquals("prefix must end with a '.'", exc.getMessage());
Setting<List<String>> listAffixSetting = Setting.affixKeySetting(
"foo.",
"bar",
(key) -> Setting.listSetting(key, Collections.emptyList(), Function.identity(), Property.NodeScope)
);
assertTrue(listAffixSetting.hasComplexMatcher());
assertTrue(listAffixSetting.match("foo.test.bar"));
assertTrue(listAffixSetting.match("foo.test_1.bar"));
assertFalse(listAffixSetting.match("foo.buzz.baz.bar"));
assertFalse(listAffixSetting.match("foo.bar"));
assertFalse(listAffixSetting.match("foo.baz"));
assertFalse(listAffixSetting.match("foo"));
}
public void testAffixKeySettingWithSecure() {
Setting.AffixSetting<SecureString> secureSetting = Setting.affixKeySetting(
"foo.",
"secret",
(key) -> SecureSetting.secureString(key, null)
);
MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString("foo.a.secret", "secret1");
secureSettings.setString("foo.b.secret", "secret2");
Settings settings = Settings.builder().setSecureSettings(secureSettings).build();
assertThat(secureSetting.exists(settings), is(true));
Map<String, SecureString> secrets = secureSetting.getAsMap(settings);
assertThat(secrets.keySet(), contains("a", "b"));
Setting<SecureString> secureA = secureSetting.getConcreteSetting("foo.a.secret");
assertThat(secureA.get(settings), is("secret1"));
assertThat(secrets.get("a"), is("secret1"));
}
public void testAffixKeyExists() {
Setting<Boolean> setting = Setting.affixKeySetting("foo.", "enable", (key) -> Setting.boolSetting(key, false, Property.NodeScope));
assertFalse(setting.exists(Settings.EMPTY));
assertTrue(setting.exists(Settings.builder().put("foo.test.enable", "true").build()));
}
public void testAffixSettingNamespaces() {
Setting.AffixSetting<Boolean> setting = Setting.affixKeySetting(
"foo.",
"enable",
(key) -> Setting.boolSetting(key, false, Property.NodeScope)
);
Settings build = Settings.builder()
.put("foo.bar.enable", "true")
.put("foo.baz.enable", "true")
.put("foo.boom.enable", "true")
.put("something.else", "true")
.build();
Set<String> namespaces = setting.getNamespaces(build);
assertEquals(3, namespaces.size());
assertTrue(namespaces.contains("bar"));
assertTrue(namespaces.contains("baz"));
assertTrue(namespaces.contains("boom"));
}
public void testAffixAsMap() {
Setting.AffixSetting<String> setting = Setting.prefixKeySetting("foo.bar.", key -> Setting.simpleString(key, Property.NodeScope));
Settings build = Settings.builder().put("foo.bar.baz", 2).put("foo.bar.foobar", 3).build();
Map<String, String> asMap = setting.getAsMap(build);
assertEquals(2, asMap.size());
assertEquals("2", asMap.get("baz"));
assertEquals("3", asMap.get("foobar"));
setting = Setting.prefixKeySetting("foo.bar.", key -> Setting.simpleString(key, Property.NodeScope));
build = Settings.builder().put("foo.bar.baz", 2).put("foo.bar.foobar", 3).put("foo.bar.baz.deep", 45).build();
asMap = setting.getAsMap(build);
assertEquals(3, asMap.size());
assertEquals("2", asMap.get("baz"));
assertEquals("3", asMap.get("foobar"));
assertEquals("45", asMap.get("baz.deep"));
}
public void testGetAllConcreteSettings() {
Setting.AffixSetting<List<String>> listAffixSetting = Setting.affixKeySetting(
"foo.",
"bar",
(key) -> Setting.listSetting(key, Collections.emptyList(), Function.identity(), Property.NodeScope)
);
Settings settings = Settings.builder()
.putList("foo.1.bar", "1", "2")
.putList("foo.2.bar", "3", "4", "5")
.putList("foo.bar", "6")
.putList("some.other", "6")
.putList("foo.3.bar", "6")
.build();
Stream<Setting<List<String>>> allConcreteSettings = listAffixSetting.getAllConcreteSettings(settings);
Map<String, List<String>> collect = allConcreteSettings.collect(Collectors.toMap(Setting::getKey, (s) -> s.get(settings)));
assertEquals(3, collect.size());
assertEquals(Arrays.asList("1", "2"), collect.get("foo.1.bar"));
assertEquals(Arrays.asList("3", "4", "5"), collect.get("foo.2.bar"));
assertEquals(Arrays.asList("6"), collect.get("foo.3.bar"));
}
public void testAffixSettingsFailOnGet() {
Setting.AffixSetting<List<String>> listAffixSetting = Setting.affixKeySetting(
"foo.",
"bar",
(key) -> Setting.listSetting(key, Collections.singletonList("testelement"), Function.identity(), Property.NodeScope)
);
expectThrows(UnsupportedOperationException.class, () -> listAffixSetting.get(Settings.EMPTY));
assertEquals(Collections.singletonList("testelement"), listAffixSetting.getDefault(Settings.EMPTY));
assertEquals("[\"testelement\"]", listAffixSetting.getDefaultRaw(Settings.EMPTY));
}
public void testAffixSettingsValidatorDependencies() {
Setting<Integer> affix = Setting.affixKeySetting("abc.", "def", k -> Setting.intSetting(k, 10));
Setting<Integer> fix0 = Setting.intSetting("abc.tuv", 20, 0);
Setting<Integer> fix1 = Setting.intSetting("abc.qrx", 20, 0, new Setting.Validator<Integer>() {
@Override
public void validate(Integer value) {}
String toString(Map<Setting<?>, Object> s) {
return s.entrySet()
.stream()
.map(e -> e.getKey().getKey() + ":" + e.getValue().toString())
.sorted()
.collect(Collectors.joining(","));
}
@Override
public void validate(Integer value, Map<Setting<?>, Object> settings, boolean isPresent) {
if (settings.get(fix0).equals(fix0.getDefault(Settings.EMPTY))) {
settings.remove(fix0);
}
if (settings.size() == 1) {
throw new IllegalArgumentException(toString(settings));
} else if (settings.size() == 2) {
throw new IllegalArgumentException(toString(settings));
}
}
@Override
public Iterator<Setting<?>> settings() {
List<Setting<?>> a = List.of(affix, fix0);
return a.iterator();
}
});
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> fix1.get(Settings.builder().put("abc.1.def", 11).put("abc.2.def", 12).put("abc.qrx", 11).build())
);
assertThat(e.getMessage(), is("abc.1.def:11,abc.2.def:12"));
e = expectThrows(
IllegalArgumentException.class,
() -> fix1.get(Settings.builder().put("abc.3.def", 13).put("abc.qrx", 20).build())
);
assertThat(e.getMessage(), is("abc.3.def:13"));
e = expectThrows(
IllegalArgumentException.class,
() -> fix1.get(Settings.builder().put("abc.4.def", 14).put("abc.qrx", 20).put("abc.tuv", 50).build())
);
assertThat(e.getMessage(), is("abc.4.def:14,abc.tuv:50"));
assertEquals(
fix1.get(Settings.builder().put("abc.3.def", 13).put("abc.1.def", 11).put("abc.2.def", 12).put("abc.qrx", 20).build()),
Integer.valueOf(20)
);
assertEquals(fix1.get(Settings.builder().put("abc.qrx", 30).build()), Integer.valueOf(30));
}
public void testMinMaxInt() {
Setting<Integer> integerSetting = Setting.intSetting("foo.bar", 1, 0, 10, Property.NodeScope);
try {
integerSetting.get(Settings.builder().put("foo.bar", 11).build());
fail();
} catch (IllegalArgumentException ex) {
assertEquals("Failed to parse value [11] for setting [foo.bar] must be <= 10", ex.getMessage());
}
try {
integerSetting.get(Settings.builder().put("foo.bar", -1).build());
fail();
} catch (IllegalArgumentException ex) {
assertEquals("Failed to parse value [-1] for setting [foo.bar] must be >= 0", ex.getMessage());
}
assertEquals(5, integerSetting.get(Settings.builder().put("foo.bar", 5).build()).intValue());
assertEquals(1, integerSetting.get(Settings.EMPTY).intValue());
}
/**
* Only one single scope can be added to any setting
*/
public void testMutuallyExclusiveScopes() {
// Those should pass
Setting<String> setting = Setting.simpleString("foo.bar", Property.NodeScope);
assertThat(setting.hasNodeScope(), is(true));
assertThat(setting.hasIndexScope(), is(false));
setting = Setting.simpleString("foo.bar", Property.IndexScope);
assertThat(setting.hasIndexScope(), is(true));
assertThat(setting.hasNodeScope(), is(false));
// We accept settings with no scope but they will be rejected when we register with SettingsModule.registerSetting
setting = Setting.simpleString("foo.bar");
assertThat(setting.hasIndexScope(), is(false));
assertThat(setting.hasNodeScope(), is(false));
// We accept settings with multiple scopes but they will be rejected when we register with SettingsModule.registerSetting
setting = Setting.simpleString("foo.bar", Property.IndexScope, Property.NodeScope);
assertThat(setting.hasIndexScope(), is(true));
assertThat(setting.hasNodeScope(), is(true));
}
/**
* We can't have Null properties
*/
public void testRejectNullProperties() {
IllegalArgumentException ex = expectThrows(
IllegalArgumentException.class,
() -> Setting.simpleString("foo.bar", (Property[]) null)
);
assertThat(ex.getMessage(), containsString("properties cannot be null for setting"));
}
public void testRejectConflictingDynamicAndFinalProperties() {
IllegalArgumentException ex = expectThrows(
IllegalArgumentException.class,
() -> Setting.simpleString("foo.bar", Property.Final, randomFrom(Property.Dynamic, Property.OperatorDynamic))
);
assertThat(ex.getMessage(), containsString("final setting [foo.bar] cannot be dynamic"));
}
public void testRejectConflictingDynamicAndOperatorDynamicProperties() {
IllegalArgumentException ex = expectThrows(
IllegalArgumentException.class,
() -> Setting.simpleString("foo.bar", Property.Dynamic, Property.OperatorDynamic)
);
assertThat(ex.getMessage(), containsString("setting [foo.bar] cannot be both dynamic and operator dynamic"));
}
public void testRejectNonIndexScopedNotCopyableOnResizeSetting() {
final IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> Setting.simpleString("foo.bar", Property.NotCopyableOnResize)
);
assertThat(e, hasToString(containsString("non-index-scoped setting [foo.bar] can not have property [NotCopyableOnResize]")));
}
public void testRejectNonIndexScopedInternalIndexSetting() {
final IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> Setting.simpleString("foo.bar", Property.InternalIndex)
);
assertThat(e, hasToString(containsString("non-index-scoped setting [foo.bar] can not have property [InternalIndex]")));
}
public void testRejectNonIndexScopedPrivateIndexSetting() {
final IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> Setting.simpleString("foo.bar", Property.PrivateIndex)
);
assertThat(e, hasToString(containsString("non-index-scoped setting [foo.bar] can not have property [PrivateIndex]")));
}
public void testTimeValue() {
final TimeValue random = randomTimeValue();
Setting<TimeValue> setting = Setting.timeSetting("foo", random);
assertThat(setting.get(Settings.EMPTY), equalTo(random));
final int factor = randomIntBetween(1, 10);
setting = Setting.timeSetting("foo", (s) -> TimeValue.timeValueMillis(random.getMillis() * factor), TimeValue.ZERO);
assertThat(setting.get(Settings.builder().put("foo", "12h").build()), equalTo(TimeValue.timeValueHours(12)));
assertThat(setting.get(Settings.EMPTY).getMillis(), equalTo(random.getMillis() * factor));
}
public void testTimeValueBounds() {
Setting<TimeValue> settingWithLowerBound = Setting.timeSetting(
"foo",
TimeValue.timeValueSeconds(10),
TimeValue.timeValueSeconds(5)
);
assertThat(settingWithLowerBound.get(Settings.EMPTY), equalTo(TimeValue.timeValueSeconds(10)));
assertThat(settingWithLowerBound.get(Settings.builder().put("foo", "5000ms").build()), equalTo(TimeValue.timeValueSeconds(5)));
IllegalArgumentException illegalArgumentException = expectThrows(
IllegalArgumentException.class,
() -> settingWithLowerBound.get(Settings.builder().put("foo", "4999ms").build())
);
assertThat(illegalArgumentException.getMessage(), equalTo("failed to parse value [4999ms] for setting [foo], must be >= [5s]"));
Setting<TimeValue> settingWithBothBounds = Setting.timeSetting(
"bar",
TimeValue.timeValueSeconds(10),
TimeValue.timeValueSeconds(5),
TimeValue.timeValueSeconds(20)
);
assertThat(settingWithBothBounds.get(Settings.EMPTY), equalTo(TimeValue.timeValueSeconds(10)));
assertThat(settingWithBothBounds.get(Settings.builder().put("bar", "5000ms").build()), equalTo(TimeValue.timeValueSeconds(5)));
assertThat(settingWithBothBounds.get(Settings.builder().put("bar", "20000ms").build()), equalTo(TimeValue.timeValueSeconds(20)));
illegalArgumentException = expectThrows(
IllegalArgumentException.class,
() -> settingWithBothBounds.get(Settings.builder().put("bar", "4999ms").build())
);
assertThat(illegalArgumentException.getMessage(), equalTo("failed to parse value [4999ms] for setting [bar], must be >= [5s]"));
illegalArgumentException = expectThrows(
IllegalArgumentException.class,
() -> settingWithBothBounds.get(Settings.builder().put("bar", "20001ms").build())
);
assertThat(illegalArgumentException.getMessage(), equalTo("failed to parse value [20001ms] for setting [bar], must be <= [20s]"));
}
public void testSettingsGroupUpdater() {
Setting<Integer> intSetting = Setting.intSetting("prefix.foo", 1, Property.NodeScope, Property.Dynamic);
Setting<Integer> intSetting2 = Setting.intSetting("prefix.same", 1, Property.NodeScope, Property.Dynamic);
AbstractScopedSettings.SettingUpdater<Settings> updater = Setting.groupedSettingsUpdater(
s -> {},
Arrays.asList(intSetting, intSetting2)
);
Settings current = Settings.builder().put("prefix.foo", 123).put("prefix.same", 5555).build();
Settings previous = Settings.builder().put("prefix.foo", 321).put("prefix.same", 5555).build();
assertTrue(updater.apply(current, previous));
}
public void testSettingsGroupUpdaterRemoval() {
Setting<Integer> intSetting = Setting.intSetting("prefix.foo", 1, Property.NodeScope, Property.Dynamic);
Setting<Integer> intSetting2 = Setting.intSetting("prefix.same", 1, Property.NodeScope, Property.Dynamic);
AbstractScopedSettings.SettingUpdater<Settings> updater = Setting.groupedSettingsUpdater(
s -> {},
Arrays.asList(intSetting, intSetting2)
);
Settings current = Settings.builder().put("prefix.same", 5555).build();
Settings previous = Settings.builder().put("prefix.foo", 321).put("prefix.same", 5555).build();
assertTrue(updater.apply(current, previous));
}
public void testSettingsGroupUpdaterWithAffixSetting() {
Setting<Integer> intSetting = Setting.intSetting("prefix.foo", 1, Property.NodeScope, Property.Dynamic);
Setting.AffixSetting<String> prefixKeySetting = Setting.prefixKeySetting(
"prefix.foo.bar.",
key -> Setting.simpleString(key, Property.NodeScope, Property.Dynamic)
);
Setting.AffixSetting<String> affixSetting = Setting.affixKeySetting(
"prefix.foo.",
"suffix",
key -> Setting.simpleString(key, Property.NodeScope, Property.Dynamic)
);
AbstractScopedSettings.SettingUpdater<Settings> updater = Setting.groupedSettingsUpdater(
s -> {},
Arrays.asList(intSetting, prefixKeySetting, affixSetting)
);
Settings.Builder currentSettingsBuilder = Settings.builder().put("prefix.foo.bar.baz", "foo").put("prefix.foo.infix.suffix", "foo");
Settings.Builder previousSettingsBuilder = Settings.builder()
.put("prefix.foo.bar.baz", "foo")
.put("prefix.foo.infix.suffix", "foo");
boolean removePrefixKeySetting = randomBoolean();
boolean changePrefixKeySetting = randomBoolean();
boolean removeAffixKeySetting = randomBoolean();
boolean changeAffixKeySetting = randomBoolean();
boolean removeAffixNamespace = randomBoolean();
if (removePrefixKeySetting) {
previousSettingsBuilder.remove("prefix.foo.bar.baz");
}
if (changePrefixKeySetting) {
currentSettingsBuilder.put("prefix.foo.bar.baz", "bar");
}
if (removeAffixKeySetting) {
previousSettingsBuilder.remove("prefix.foo.infix.suffix");
}
if (changeAffixKeySetting) {
currentSettingsBuilder.put("prefix.foo.infix.suffix", "bar");
}
if (removeAffixKeySetting == false && changeAffixKeySetting == false && removeAffixNamespace) {
currentSettingsBuilder.remove("prefix.foo.infix.suffix");
currentSettingsBuilder.put("prefix.foo.infix2.suffix", "bar");
previousSettingsBuilder.put("prefix.foo.infix2.suffix", "bar");
}
boolean expectedChange = removeAffixKeySetting
|| removePrefixKeySetting
|| changeAffixKeySetting
|| changePrefixKeySetting
|| removeAffixNamespace;
assertThat(updater.apply(currentSettingsBuilder.build(), previousSettingsBuilder.build()), is(expectedChange));
}
public void testAffixNamespacesWithGroupSetting() {
final Setting.AffixSetting<Settings> affixSetting = Setting.affixKeySetting(
"prefix.",
"suffix",
(key) -> Setting.groupSetting(key + ".", Setting.Property.Dynamic, Setting.Property.NodeScope)
);
assertThat(affixSetting.getNamespaces(Settings.builder().put("prefix.infix.suffix", "anything").build()), hasSize(1));
assertThat(affixSetting.getNamespaces(Settings.builder().put("prefix.infix.suffix.anything", "anything").build()), hasSize(1));
}
public void testGroupSettingUpdaterValidator() {
final Setting.AffixSetting<Integer> affixSetting = Setting.affixKeySetting(
"prefix.",
"suffix",
(key) -> Setting.intSetting(key, 5, Setting.Property.Dynamic, Setting.Property.NodeScope)
);
Setting<Integer> fixSetting = Setting.intSetting("abc", 1, Property.NodeScope);
Consumer<Settings> validator = s -> {
if (affixSetting.getNamespaces(s).contains("foo")) {
if (fixSetting.get(s) == 2) {
throw new IllegalArgumentException("foo and 2 can't go together");
}
} else if (affixSetting.getNamespaces(s).contains("bar")) {
throw new IllegalArgumentException("no bar");
}
};
AbstractScopedSettings.SettingUpdater<Settings> updater = Setting.groupedSettingsUpdater(
s -> {},
Arrays.asList(affixSetting, fixSetting),
validator
);
IllegalArgumentException illegal = expectThrows(IllegalArgumentException.class, () -> {
updater.getValue(Settings.builder().put("prefix.foo.suffix", 5).put("abc", 2).build(), Settings.EMPTY);
});
assertEquals("foo and 2 can't go together", illegal.getMessage());
illegal = expectThrows(IllegalArgumentException.class, () -> {
updater.getValue(Settings.builder().put("prefix.bar.suffix", 6).put("abc", 3).build(), Settings.EMPTY);
});
assertEquals("no bar", illegal.getMessage());
Settings s = updater.getValue(
Settings.builder().put("prefix.foo.suffix", 5).put("prefix.bar.suffix", 5).put("abc", 3).build(),
Settings.EMPTY
);
assertNotNull(s);
}
public void testExists() {
final Setting<?> fooSetting = Setting.simpleString("foo", Property.NodeScope);
assertFalse(fooSetting.exists(Settings.EMPTY));
assertTrue(fooSetting.exists(Settings.builder().put("foo", "bar").build()));
}
public void testExistsWithSecure() {
final MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString("foo", "foo");
Setting<String> fooSetting = Setting.simpleString("foo", Property.NodeScope);
assertFalse(fooSetting.exists(Settings.builder().setSecureSettings(secureSettings).build()));
}
public void testExistsWithFallback() {
final int count = randomIntBetween(1, 16);
Setting<String> current = Setting.simpleString("fallback0", Property.NodeScope);
for (int i = 1; i < count; i++) {
final Setting<String> next = new Setting<>(
new Setting.SimpleKey("fallback" + i),
current,
Function.identity(),
Property.NodeScope
);
current = next;
}
final Setting<String> fooSetting = new Setting<>(new Setting.SimpleKey("foo"), current, Function.identity(), Property.NodeScope);
assertFalse(fooSetting.exists(Settings.EMPTY));
if (randomBoolean()) {
assertTrue(fooSetting.exists(Settings.builder().put("foo", "bar").build()));
} else {
final String setting = "fallback" + randomIntBetween(0, count - 1);
assertFalse(fooSetting.exists(Settings.builder().put(setting, "bar").build()));
assertTrue(fooSetting.existsOrFallbackExists(Settings.builder().put(setting, "bar").build()));
}
}
public void testAffixMapUpdateWithNullSettingValue() {
// GIVEN an affix setting changed from "prefix._foo"="bar" to "prefix._foo"=null
final Settings current = Settings.builder().put("prefix._foo", (String) null).build();
final Settings previous = Settings.builder().put("prefix._foo", "bar").build();
final Setting.AffixSetting<String> affixSetting = Setting.prefixKeySetting(
"prefix" + ".",
key -> Setting.simpleString(key, Property.Dynamic, Property.NodeScope)
);
final Consumer<Map<String, String>> consumer = (map) -> {};
final BiConsumer<String, String> validator = (s1, s2) -> {};
// WHEN creating an affix updater
final SettingUpdater<Map<String, String>> updater = affixSetting.newAffixMapUpdater(consumer, logger, validator);
// THEN affix updater is always expected to have changed (even when defaults are omitted)
assertTrue(updater.hasChanged(current, previous));
// THEN changes are expected when defaults aren't omitted
final Map<String, String> updatedSettings = updater.getValue(current, previous);
assertNotNull(updatedSettings);
assertEquals(1, updatedSettings.size());
// THEN changes are reported when defaults aren't omitted
final String key = updatedSettings.keySet().iterator().next();
final String value = updatedSettings.get(key);
assertEquals("_foo", key);
assertEquals("", value);
}
public void testNonSecureSettingInKeystore() {
MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString("foo", "bar");
final Settings settings = Settings.builder().setSecureSettings(secureSettings).build();
Setting<String> setting = Setting.simpleString("foo", Property.NodeScope);
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> setting.get(settings));
assertThat(e.getMessage(), containsString("must be stored inside elasticsearch.yml"));
}
@TestLogging(
value = "org.elasticsearch.common.settings.IndexScopedSettings:DEBUG",
reason = "to ensure we log INFO-level messages from IndexScopedSettings"
)
public void testLogSettingUpdate() throws Exception {
final IndexMetadata metadata = newIndexMeta(
"index1",
Settings.builder().put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "20s").build()
);
final IndexSettings settings = new IndexSettings(metadata, Settings.EMPTY);
try (var mockLog = MockLog.capture(IndexScopedSettings.class)) {
mockLog.addExpectation(
new MockLog.SeenEventExpectation(
"message",
"org.elasticsearch.common.settings.IndexScopedSettings",
Level.DEBUG,
"updating [index.refresh_interval] from [20s] to [10s]"
) {
@Override
public boolean innerMatch(LogEvent event) {
return event.getMarker().getName().equals(" [index1]");
}
}
);
settings.updateIndexMetadata(
newIndexMeta("index1", Settings.builder().put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "10s").build())
);
mockLog.assertAllExpectationsMatched();
}
}
public void testDynamicTest() {
final Property property = randomFrom(Property.Dynamic, Property.OperatorDynamic);
final Setting<String> setting = Setting.simpleString("foo.bar", property);
assertTrue(setting.isDynamic());
assertEquals(setting.isOperatorOnly(), property == Property.OperatorDynamic);
}
public void testCheckForDeprecation() {
final String criticalSettingName = "foo.bar";
final String warningSettingName = "foo.foo";
final String settingValue = "blat";
final Setting<String> undeprecatedSetting1 = Setting.simpleString(criticalSettingName, settingValue);
final Setting<String> undeprecatedSetting2 = Setting.simpleString(warningSettingName, settingValue);
final Settings settings = Settings.builder().put(criticalSettingName, settingValue).put(warningSettingName, settingValue).build();
undeprecatedSetting1.checkDeprecation(settings);
undeprecatedSetting2.checkDeprecation(settings);
ensureNoWarnings();
final Setting<String> criticalDeprecatedSetting = Setting.simpleString(
criticalSettingName,
settingValue,
Property.DeprecatedWarning
);
criticalDeprecatedSetting.checkDeprecation(settings);
assertSettingDeprecationsAndWarnings(new Setting<?>[] { criticalDeprecatedSetting });
final Setting<String> deprecatedSettingWarningOnly = Setting.simpleString(
warningSettingName,
settingValue,
Property.DeprecatedWarning
);
deprecatedSettingWarningOnly.checkDeprecation(settings);
assertSettingDeprecationsAndWarnings(new Setting<?>[] { deprecatedSettingWarningOnly });
}
public void testCheckForDeprecationWithSkipSetting() {
final String settingName = "foo.bar.hide.this";
final String settingValue = "blat";
final Setting<String> setting = Setting.simpleString(settingName, settingValue);
final Settings settings = Settings.builder().put(settingName, settingValue).build();
setting.checkDeprecation(settings);
ensureNoWarnings();
final Setting<String> deprecatedSetting = Setting.simpleString(settingName, settingValue, Property.DeprecatedWarning);
deprecatedSetting.checkDeprecation(settings);
assertSettingDeprecationsAndWarnings(new Setting<?>[] { deprecatedSetting });
final Settings settingsWithSkipDeprecationSetting = Settings.builder()
.put(settingName, settingValue)
.putList("deprecation.skip_deprecated_settings", settingName)
.build();
DeprecationLogger.initialize(settingsWithSkipDeprecationSetting);
deprecatedSetting.checkDeprecation(settingsWithSkipDeprecationSetting);
ensureNoWarnings();
}
public void testDeprecationPropertyValidation() {
expectThrows(
IllegalArgumentException.class,
() -> Setting.boolSetting("a.bool.setting", true, Property.Deprecated, Property.DeprecatedWarning)
);
expectThrows(
IllegalArgumentException.class,
() -> Setting.boolSetting("a.bool.setting", true, Property.Deprecated, Property.IndexSettingDeprecatedInV7AndRemovedInV8)
);
expectThrows(
IllegalArgumentException.class,
() -> Setting.boolSetting("a.bool.setting", true, Property.DeprecatedWarning, Property.IndexSettingDeprecatedInV7AndRemovedInV8)
);
expectThrows(
IllegalArgumentException.class,
() -> Setting.boolSetting("a.bool.setting", true, Property.Deprecated, Property.IndexSettingDeprecatedInV8AndRemovedInV9)
);
expectThrows(
IllegalArgumentException.class,
() -> Setting.boolSetting("a.bool.setting", true, Property.DeprecatedWarning, Property.IndexSettingDeprecatedInV8AndRemovedInV9)
);
expectThrows(
IllegalArgumentException.class,
() -> Setting.boolSetting("a.bool.setting", true, Property.Deprecated, Property.IndexSettingDeprecatedInV9AndRemovedInV10)
);
expectThrows(
IllegalArgumentException.class,
() -> Setting.boolSetting(
"a.bool.setting",
true,
Property.DeprecatedWarning,
Property.IndexSettingDeprecatedInV9AndRemovedInV10
)
);
}
public void testIntSettingBounds() {
Setting<Integer> setting = Setting.intSetting("int.setting", 0, Integer.MIN_VALUE, Integer.MAX_VALUE);
var e = expectThrows(
IllegalArgumentException.class,
() -> setting.get(Settings.builder().put("int.setting", "2147483648").build())
);
assertThat(e.getMessage(), equalTo("Failed to parse value [2147483648] for setting [int.setting] must be <= 2147483647"));
var e2 = expectThrows(
IllegalArgumentException.class,
() -> setting.get(Settings.builder().put("int.setting", "-2147483649").build())
);
assertThat(e2.getMessage(), equalTo("Failed to parse value [-2147483649] for setting [int.setting] must be >= -2147483648"));
}
public void testLongSettingBounds() {
Setting<Long> setting = Setting.longSetting("long.setting", 0, Long.MIN_VALUE);
var e = expectThrows(
IllegalArgumentException.class,
() -> setting.get(Settings.builder().put("long.setting", "9223372036854775808").build())
);
assertThat(
e.getMessage(),
equalTo("Failed to parse value [9223372036854775808] for setting [long.setting] must be <= 9223372036854775807")
);
var e2 = expectThrows(
IllegalArgumentException.class,
() -> setting.get(Settings.builder().put("long.setting", "-9223372036854775809").build())
);
assertThat(
e2.getMessage(),
equalTo("Failed to parse value [-9223372036854775809] for setting [long.setting] must be >= -9223372036854775808")
);
}
}
| Composite |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/updatemethods/OrganizationTypeNrEntity.java | {
"start": 237,
"end": 446
} | class ____ {
private Integer number;
public Integer getNumber() {
return number;
}
public void setNumber(Integer number) {
this.number = number;
}
}
| OrganizationTypeNrEntity |
java | quarkusio__quarkus | extensions/security-jpa/runtime/src/main/java/io/quarkus/security/jpa/runtime/JpaIdentityProvider.java | {
"start": 803,
"end": 2895
} | class ____ implements IdentityProvider<UsernamePasswordAuthenticationRequest> {
private static Logger log = Logger.getLogger(JpaIdentityProvider.class);
@Inject
SessionFactory sessionFactory;
@Override
public Class<UsernamePasswordAuthenticationRequest> getRequestType() {
return UsernamePasswordAuthenticationRequest.class;
}
@Override
public Uni<SecurityIdentity> authenticate(UsernamePasswordAuthenticationRequest request,
AuthenticationRequestContext context) {
return context.runBlocking(new Supplier<SecurityIdentity>() {
@Override
public SecurityIdentity get() {
if (requireActiveCDIRequestContext() && !Arc.container().requestContext().isActive()) {
var requestContext = Arc.container().requestContext();
requestContext.activate();
try {
return authenticate(request);
} finally {
requestContext.terminate();
}
}
return authenticate(request);
}
});
}
private SecurityIdentity authenticate(UsernamePasswordAuthenticationRequest request) {
try (Session session = sessionFactory.openSession()) {
session.setHibernateFlushMode(FlushMode.MANUAL);
session.setDefaultReadOnly(true);
return authenticate(session, request);
} catch (SecurityException e) {
log.debug("Authentication failed", e);
throw new AuthenticationFailedException(e);
}
}
protected <T> T getSingleUser(Query query) {
@SuppressWarnings("unchecked")
List<T> results = (List<T>) query.getResultList();
return JpaIdentityProviderUtil.getSingleUser(results);
}
protected boolean requireActiveCDIRequestContext() {
return false;
}
public abstract SecurityIdentity authenticate(EntityManager em,
UsernamePasswordAuthenticationRequest request);
}
| JpaIdentityProvider |
java | google__guava | guava/src/com/google/common/util/concurrent/AbstractScheduledService.java | {
"start": 27391,
"end": 28483
} | class ____ {
private final long delay;
private final TimeUnit unit;
/**
* @param delay the time from now to delay execution
* @param unit the time unit of the delay parameter
*/
public Schedule(long delay, TimeUnit unit) {
this.delay = delay;
this.unit = checkNotNull(unit);
}
/**
* @param delay the time from now to delay execution
* @since 31.1 (but only since 33.4.0 in the Android flavor)
*/
public Schedule(Duration delay) {
this(toNanosSaturated(delay), NANOSECONDS);
}
}
/**
* Calculates the time at which to next invoke the task.
*
* <p>This is guaranteed to be called immediately after the task has completed an iteration and
* on the same thread as the previous execution of {@link
* AbstractScheduledService#runOneIteration}.
*
* @return a schedule that defines the delay before the next execution.
*/
// TODO(cpovirk): @ForOverride
protected abstract Schedule getNextSchedule() throws Exception;
}
}
| Schedule |
java | processing__processing4 | core/src/processing/data/Table.java | {
"start": 1983,
"end": 2567
} | class ____ handling tabular data, typically from a CSV, TSV, or other
* sort of spreadsheet file.
* </p>
* <p>
* CSV files are
* <a href="http://en.wikipedia.org/wiki/Comma-separated_values">comma separated
* values</a>, often with the data in quotes. TSV files use tabs as separators,
* and usually don't bother with the quotes.
* </p>
* <p>
* File names should end with .csv if they're comma separated.
* </p>
* <p>
* A rough "spec" for CSV can be found
* <a href="http://tools.ietf.org/html/rfc4180">here</a>.
* </p>
*
* @webref data:composite
* @webBrief Generic | for |
java | spring-projects__spring-data-jpa | spring-data-jpa/src/test/java/org/springframework/data/jpa/domain/SpecificationUnitTests.java | {
"start": 5378,
"end": 5593
} | class ____ implements Serializable, Specification<Object> {
@Override
public Predicate toPredicate(Root<Object> root, CriteriaQuery<?> query, CriteriaBuilder cb) {
return null;
}
}
}
| SerializableSpecification |
java | netty__netty | transport-native-io_uring/src/test/java/io/netty/channel/uring/CombinationOfEpollAndIoUringTest.java | {
"start": 850,
"end": 1208
} | class ____ {
@BeforeAll
public static void loadJNI() {
// Epoll must be usable.
Epoll.ensureAvailability();
assumeTrue(IoUring.isAvailable());
}
@Test
public void testEpollAndIOUringCanBothBeLoaded() {
Epoll.ensureAvailability();
IoUring.ensureAvailability();
}
}
| CombinationOfEpollAndIoUringTest |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/mysql/create/MySqlCreateProcedureTest14.java | {
"start": 924,
"end": 2498
} | class ____ extends MysqlTest {
public void test_0() throws Exception {
String sql = "CREATE DEFINER = 'admin'@'localhost' PROCEDURE account_count()\n" +
"SQL SECURITY INVOKER\n" +
"BEGIN\n" +
" SELECT 'Number of accounts:', COUNT(*) FROM mysql.user;\n" +
"END;";
List<SQLStatement> statementList = SQLUtils.parseStatements(sql, JdbcConstants.MYSQL);
SQLStatement stmt = statementList.get(0);
// print(statementList);
assertEquals(1, statementList.size());
System.out.println(SQLUtils.toMySqlString(stmt));
assertEquals("CREATE PROCEDURE account_count ()\n" +
"SQL SECURITY INVOKER\n" +
"BEGIN\n" +
"\tSELECT 'Number of accounts:', COUNT(*)\n" +
"\tFROM mysql.user;\n" +
"END;", SQLUtils.toMySqlString(stmt));
SchemaStatVisitor visitor = SQLUtils.createSchemaStatVisitor(JdbcConstants.MYSQL);
stmt.accept(visitor);
System.out.println("Tables : " + visitor.getTables());
System.out.println("fields : " + visitor.getColumns());
// System.out.println("coditions : " + visitor.getConditions());
// System.out.println("orderBy : " + visitor.getOrderByColumns());
assertEquals(1, visitor.getTables().size());
assertEquals(1, visitor.getColumns().size());
assertEquals(0, visitor.getConditions().size());
assertTrue(visitor.containsColumn("mysql.user", "*"));
}
}
| MySqlCreateProcedureTest14 |
java | apache__rocketmq | container/src/main/java/org/apache/rocketmq/container/BrokerBootHook.java | {
"start": 880,
"end": 1663
} | interface ____ {
/**
* Name of the hook.
*
* @return name of the hook
*/
String hookName();
/**
* Code to execute before broker start.
*
* @param innerBrokerController inner broker to start
* @param properties broker properties
* @throws Exception when execute hook
*/
void executeBeforeStart(InnerBrokerController innerBrokerController, Properties properties) throws Exception;
/**
* Code to execute after broker start.
*
* @param innerBrokerController inner broker to start
* @param properties broker properties
* @throws Exception when execute hook
*/
void executeAfterStart(InnerBrokerController innerBrokerController, Properties properties) throws Exception;
}
| BrokerBootHook |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/policy/PriorityComparator.java | {
"start": 1041,
"end": 1449
} | class ____ implements Comparator<SchedulableEntity> {
@Override
public int compare(SchedulableEntity se1, SchedulableEntity se2) {
Priority p1 = se1.getPriority();
Priority p2 = se2.getPriority();
if (p1 == null && p2 == null) {
return 0;
} else if (p1 == null) {
return -1;
} else if (p2 == null) {
return 1;
}
return p1.compareTo(p2);
}
}
| PriorityComparator |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/impl/DfsClientConf.java | {
"start": 27974,
"end": 37137
} | class ____ {
private static final Logger LOG = DfsClientConf.LOG;
private final int socketCacheCapacity;
private final long socketCacheExpiry;
private final boolean useLegacyBlockReaderLocal;
private final String domainSocketPath;
private final boolean skipShortCircuitChecksums;
private final int shortCircuitBufferSize;
private final boolean shortCircuitLocalReads;
private final boolean domainSocketDataTraffic;
private final int shortCircuitStreamsCacheSize;
private final long shortCircuitStreamsCacheExpiryMs;
private final int shortCircuitSharedMemoryWatcherInterruptCheckMs;
// Short Circuit Read Metrics
private final boolean scrMetricsEnabled;
private final int scrMetricsSamplingPercentage;
private final boolean shortCircuitMmapEnabled;
private final int shortCircuitMmapCacheSize;
private final long shortCircuitMmapCacheExpiryMs;
private final long shortCircuitMmapCacheRetryTimeout;
private final long shortCircuitCacheStaleThresholdMs;
private final long domainSocketDisableIntervalSeconds;
private final long keyProviderCacheExpiryMs;
public ShortCircuitConf(Configuration conf) {
socketCacheCapacity = conf.getInt(
DFS_CLIENT_SOCKET_CACHE_CAPACITY_KEY,
DFS_CLIENT_SOCKET_CACHE_CAPACITY_DEFAULT);
socketCacheExpiry = conf.getLong(
DFS_CLIENT_SOCKET_CACHE_EXPIRY_MSEC_KEY,
DFS_CLIENT_SOCKET_CACHE_EXPIRY_MSEC_DEFAULT);
useLegacyBlockReaderLocal = conf.getBoolean(
DFS_CLIENT_USE_LEGACY_BLOCKREADERLOCAL,
DFS_CLIENT_USE_LEGACY_BLOCKREADERLOCAL_DEFAULT);
shortCircuitLocalReads = conf.getBoolean(
Read.ShortCircuit.KEY,
Read.ShortCircuit.DEFAULT);
int scrSamplingPercentage = conf.getInt(
Read.ShortCircuit.METRICS_SAMPLING_PERCENTAGE_KEY,
Read.ShortCircuit.METRICS_SAMPLING_PERCENTAGE_DEFAULT);
if (scrSamplingPercentage <= 0) {
scrMetricsSamplingPercentage = 0;
scrMetricsEnabled = false;
} else if (scrSamplingPercentage > 100) {
scrMetricsSamplingPercentage = 100;
scrMetricsEnabled = true;
} else {
scrMetricsSamplingPercentage = scrSamplingPercentage;
scrMetricsEnabled = true;
}
domainSocketDataTraffic = conf.getBoolean(
DFS_CLIENT_DOMAIN_SOCKET_DATA_TRAFFIC,
DFS_CLIENT_DOMAIN_SOCKET_DATA_TRAFFIC_DEFAULT);
domainSocketPath = conf.getTrimmed(
DFS_DOMAIN_SOCKET_PATH_KEY,
DFS_DOMAIN_SOCKET_PATH_DEFAULT);
LOG.debug(DFS_CLIENT_USE_LEGACY_BLOCKREADERLOCAL
+ " = {}", useLegacyBlockReaderLocal);
LOG.debug(Read.ShortCircuit.KEY
+ " = {}", shortCircuitLocalReads);
LOG.debug(DFS_CLIENT_DOMAIN_SOCKET_DATA_TRAFFIC
+ " = {}", domainSocketDataTraffic);
LOG.debug(DFS_DOMAIN_SOCKET_PATH_KEY
+ " = {}", domainSocketPath);
skipShortCircuitChecksums = conf.getBoolean(
Read.ShortCircuit.SKIP_CHECKSUM_KEY,
Read.ShortCircuit.SKIP_CHECKSUM_DEFAULT);
shortCircuitBufferSize = conf.getInt(
Read.ShortCircuit.BUFFER_SIZE_KEY,
Read.ShortCircuit.BUFFER_SIZE_DEFAULT);
shortCircuitStreamsCacheSize = conf.getInt(
Read.ShortCircuit.STREAMS_CACHE_SIZE_KEY,
Read.ShortCircuit.STREAMS_CACHE_SIZE_DEFAULT);
shortCircuitStreamsCacheExpiryMs = conf.getLong(
Read.ShortCircuit.STREAMS_CACHE_EXPIRY_MS_KEY,
Read.ShortCircuit.STREAMS_CACHE_EXPIRY_MS_DEFAULT);
shortCircuitMmapEnabled = conf.getBoolean(
Mmap.ENABLED_KEY,
Mmap.ENABLED_DEFAULT);
shortCircuitMmapCacheSize = conf.getInt(
Mmap.CACHE_SIZE_KEY,
Mmap.CACHE_SIZE_DEFAULT);
shortCircuitMmapCacheExpiryMs = conf.getLong(
Mmap.CACHE_TIMEOUT_MS_KEY,
Mmap.CACHE_TIMEOUT_MS_DEFAULT);
shortCircuitMmapCacheRetryTimeout = conf.getLong(
Mmap.RETRY_TIMEOUT_MS_KEY,
Mmap.RETRY_TIMEOUT_MS_DEFAULT);
shortCircuitCacheStaleThresholdMs = conf.getLong(
ShortCircuit.REPLICA_STALE_THRESHOLD_MS_KEY,
ShortCircuit.REPLICA_STALE_THRESHOLD_MS_DEFAULT);
shortCircuitSharedMemoryWatcherInterruptCheckMs = conf.getInt(
DFS_SHORT_CIRCUIT_SHARED_MEMORY_WATCHER_INTERRUPT_CHECK_MS,
DFS_SHORT_CIRCUIT_SHARED_MEMORY_WATCHER_INTERRUPT_CHECK_MS_DEFAULT);
domainSocketDisableIntervalSeconds = conf.getLong(
DFS_DOMAIN_SOCKET_DISABLE_INTERVAL_SECOND_KEY,
DFS_DOMAIN_SOCKET_DISABLE_INTERVAL_SECOND_DEFAULT);
Preconditions.checkArgument(domainSocketDisableIntervalSeconds >= 0,
DFS_DOMAIN_SOCKET_DISABLE_INTERVAL_SECOND_KEY + "can't be negative.");
keyProviderCacheExpiryMs = conf.getLong(
DFS_CLIENT_KEY_PROVIDER_CACHE_EXPIRY_MS,
DFS_CLIENT_KEY_PROVIDER_CACHE_EXPIRY_DEFAULT);
}
/**
* @return the socketCacheCapacity
*/
public int getSocketCacheCapacity() {
return socketCacheCapacity;
}
/**
* @return the socketCacheExpiry
*/
public long getSocketCacheExpiry() {
return socketCacheExpiry;
}
public boolean isUseLegacyBlockReaderLocal() {
return useLegacyBlockReaderLocal;
}
public String getDomainSocketPath() {
return domainSocketPath;
}
public boolean isShortCircuitLocalReads() {
return shortCircuitLocalReads;
}
public boolean isScrMetricsEnabled() {
return scrMetricsEnabled;
}
public int getScrMetricsSamplingPercentage() {
return scrMetricsSamplingPercentage;
}
public boolean isDomainSocketDataTraffic() {
return domainSocketDataTraffic;
}
/**
* @return the skipShortCircuitChecksums
*/
public boolean isSkipShortCircuitChecksums() {
return skipShortCircuitChecksums;
}
/**
* @return the shortCircuitBufferSize
*/
public int getShortCircuitBufferSize() {
return shortCircuitBufferSize;
}
/**
* @return the shortCircuitStreamsCacheSize
*/
public int getShortCircuitStreamsCacheSize() {
return shortCircuitStreamsCacheSize;
}
/**
* @return the shortCircuitStreamsCacheExpiryMs
*/
public long getShortCircuitStreamsCacheExpiryMs() {
return shortCircuitStreamsCacheExpiryMs;
}
/**
* @return the shortCircuitSharedMemoryWatcherInterruptCheckMs
*/
public int getShortCircuitSharedMemoryWatcherInterruptCheckMs() {
return shortCircuitSharedMemoryWatcherInterruptCheckMs;
}
/**
* @return the shortCircuitMmapEnabled
*/
public boolean isShortCircuitMmapEnabled() {
return shortCircuitMmapEnabled;
}
/**
* @return the shortCircuitMmapCacheSize
*/
public int getShortCircuitMmapCacheSize() {
return shortCircuitMmapCacheSize;
}
/**
* @return the shortCircuitMmapCacheExpiryMs
*/
public long getShortCircuitMmapCacheExpiryMs() {
return shortCircuitMmapCacheExpiryMs;
}
/**
* @return the shortCircuitMmapCacheRetryTimeout
*/
public long getShortCircuitMmapCacheRetryTimeout() {
return shortCircuitMmapCacheRetryTimeout;
}
/**
* @return the shortCircuitCacheStaleThresholdMs
*/
public long getShortCircuitCacheStaleThresholdMs() {
return shortCircuitCacheStaleThresholdMs;
}
/**
* @return the domainSocketDisableIntervalSeconds
*/
public long getDomainSocketDisableIntervalSeconds() {
return domainSocketDisableIntervalSeconds;
}
/**
* @return the keyProviderCacheExpiryMs
*/
public long getKeyProviderCacheExpiryMs() {
return keyProviderCacheExpiryMs;
}
public String confAsString() {
return "shortCircuitStreamsCacheSize = "
+ shortCircuitStreamsCacheSize
+ ", shortCircuitStreamsCacheExpiryMs = "
+ shortCircuitStreamsCacheExpiryMs
+ ", shortCircuitMmapCacheSize = "
+ shortCircuitMmapCacheSize
+ ", shortCircuitMmapCacheExpiryMs = "
+ shortCircuitMmapCacheExpiryMs
+ ", shortCircuitMmapCacheRetryTimeout = "
+ shortCircuitMmapCacheRetryTimeout
+ ", shortCircuitCacheStaleThresholdMs = "
+ shortCircuitCacheStaleThresholdMs
+ ", socketCacheCapacity = "
+ socketCacheCapacity
+ ", socketCacheExpiry = "
+ socketCacheExpiry
+ ", shortCircuitLocalReads = "
+ shortCircuitLocalReads
+ ", useLegacyBlockReaderLocal = "
+ useLegacyBlockReaderLocal
+ ", domainSocketDataTraffic = "
+ domainSocketDataTraffic
+ ", shortCircuitSharedMemoryWatcherInterruptCheckMs = "
+ shortCircuitSharedMemoryWatcherInterruptCheckMs
+ ", keyProviderCacheExpiryMs = "
+ keyProviderCacheExpiryMs
+ ", domainSocketDisableIntervalSeconds = "
+ domainSocketDisableIntervalSeconds;
}
}
}
| ShortCircuitConf |
java | apache__camel | components/camel-weather/src/main/java/org/apache/camel/component/weather/geolocation/FreeGeoIpGeoLocationProvider.java | {
"start": 1361,
"end": 3750
} | class ____ implements GeoLocationProvider {
private final WeatherConfiguration configuration;
public FreeGeoIpGeoLocationProvider(WeatherConfiguration configuration) {
this.configuration = configuration;
}
@Override
public GeoLocation getCurrentGeoLocation() throws Exception {
HttpClient httpClient = configuration.getHttpClient();
if (isEmpty(configuration.getGeolocationAccessKey())) {
throw new IllegalStateException("The geolocation service requires a mandatory geolocationAccessKey");
}
if (isEmpty(configuration.getGeolocationRequestHostIP())) {
throw new IllegalStateException("The geolocation service requires a mandatory geolocationRequestHostIP");
}
String url = String.format("http://api.ipstack.com/%s?access_key=%s&legacy=1&output=json",
configuration.getGeolocationRequestHostIP(), configuration.getGeolocationAccessKey());
HttpGet getMethod = new HttpGet(url);
return httpClient.execute(
getMethod,
response -> {
try {
if (response.getCode() != HttpStatus.SC_OK) {
throw new IllegalStateException(
"Got the unexpected http-status '" + response.getCode()
+ "' for the geolocation");
}
String geoLocation = EntityUtils.toString(response.getEntity(), "UTF-8");
if (isEmpty(geoLocation)) {
throw new IllegalStateException(
"Got the unexpected value '" + geoLocation + "' for the geolocation");
}
ObjectMapper mapper = new ObjectMapper();
JsonNode node = mapper.readValue(geoLocation, JsonNode.class);
JsonNode latitudeNode = notNull(node.get("latitude"), "latitude");
JsonNode longitudeNode = notNull(node.get("longitude"), "longitude");
return new GeoLocation(longitudeNode.asText(), latitudeNode.asText());
} finally {
getMethod.reset();
}
});
}
}
| FreeGeoIpGeoLocationProvider |
java | spring-projects__spring-boot | core/spring-boot/src/main/java/org/springframework/boot/SpringApplication.java | {
"start": 66629,
"end": 67750
} | class ____ {
private @Nullable Duration timeTakenToStarted;
protected abstract long startTime();
protected abstract @Nullable Long processUptime();
protected abstract String action();
final Duration started() {
long now = System.currentTimeMillis();
this.timeTakenToStarted = Duration.ofMillis(now - startTime());
return this.timeTakenToStarted;
}
Duration timeTakenToStarted() {
Assert.state(this.timeTakenToStarted != null,
"timeTakenToStarted is not set. Make sure to call started() before this method");
return this.timeTakenToStarted;
}
private Duration ready() {
long now = System.currentTimeMillis();
return Duration.ofMillis(now - startTime());
}
static Startup create() {
ClassLoader classLoader = Startup.class.getClassLoader();
return (ClassUtils.isPresent("jdk.crac.management.CRaCMXBean", classLoader)
&& ClassUtils.isPresent("org.crac.management.CRaCMXBean", classLoader))
? new CoordinatedRestoreAtCheckpointStartup() : new StandardStartup();
}
}
/**
* Standard {@link Startup} implementation.
*/
private static final | Startup |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/event/EventDispatcher.java | {
"start": 1860,
"end": 2475
} | class ____<T extends Event> extends
AbstractService implements EventHandler<T> {
private final EventHandler<T> handler;
private final BlockingQueue<T> eventQueue =
new LinkedBlockingDeque<>();
private final Thread eventProcessor;
private volatile boolean stopped = false;
private boolean shouldExitOnError = true;
private EventTypeMetrics metrics;
private static final Logger LOG =
LoggerFactory.getLogger(EventDispatcher.class);
private static final Marker FATAL =
MarkerFactory.getMarker("FATAL");
private Clock clock = new MonotonicClock();
private final | EventDispatcher |
java | google__dagger | javatests/artifacts/hilt-android/simple/app/src/sharedTest/java/dagger/hilt/android/simple/SimpleActivityTest.java | {
"start": 1637,
"end": 3747
} | class ____ {
@Rule public HiltAndroidRule rule = new HiltAndroidRule(this);
@BindValue @UserName String fakeUserName = "FakeUser";
@BindValue @Model String fakeModel = "FakeModel";
@Inject @UserName String injectedUserName;
@Inject @Model String injectedModel;
@Test
public void testInjectedUserName() throws Exception {
assertThat(injectedUserName).isNull();
rule.inject();
assertThat(injectedUserName).isEqualTo("FakeUser");
}
@Test
public void testInjectedModel() throws Exception {
assertThat(injectedModel).isNull();
rule.inject();
assertThat(injectedModel).isEqualTo("FakeModel");
}
@Test
public void testActivityInject() throws Exception {
try (ActivityScenario<SimpleActivity> scenario =
ActivityScenario.launch(SimpleActivity.class)) {
onView(withId(R.id.greeting))
.check(matches(withText("Hello, FakeUser! You are on build FakeModel.")));
} catch (RuntimeException e) {
// Just skip this test if the root view never becomes active.
// This issue occurs sporadically in emulator tests and causes the test to be flaky.
// It is likely caused by a race between our activity and a dialog or lock screen but
// it's difficult to debug this since it only fails in CI builds.
// TODO(b/176111885): Remove this once this bug is fixed.
if (!e.getMessage().startsWith("Waited for the root of the view hierarchy")) {
throw e;
}
}
}
@Test
public void verifyMainActivity() {
try (ActivityScenario<SimpleActivity> scenario =
ActivityScenario.launch(SimpleActivity.class)) {
scenario.onActivity(
activity -> {
assertThat(activity.getClass().getSuperclass().getSimpleName())
.isEqualTo("Hilt_SimpleActivity");
}
);
}
}
@Test
public void verifyThing() {
try (ActivityScenario<SimpleActivity> scenario =
ActivityScenario.launch(SimpleActivity.class)) {
scenario.onActivity(activity -> assertThat(activity.thing).isInstanceOf(ThingImpl.class));
}
}
}
| SimpleActivityTest |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/web/servlet/samples/client/standalone/resultmatches/RequestAttributeAssertionTests.java | {
"start": 3301,
"end": 3430
} | class ____ {
@GetMapping(path="/{id}", produces="application/json")
String show() {
return "view";
}
}
}
| SimpleController |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/server/federation/store/records/TestFederationProtocolRecords.java | {
"start": 9973,
"end": 24282
} | class ____ extends BasePBImplRecordsTest {
@BeforeAll
public static void setup() throws Exception {
generateByNewInstance(ApplicationId.class);
generateByNewInstance(Version.class);
generateByNewInstance(SubClusterId.class);
generateByNewInstance(SubClusterInfo.class);
generateByNewInstance(Priority.class);
generateByNewInstance(URL.class);
generateByNewInstance(Resource.class);
generateByNewInstance(ContainerRetryContext.class);
generateByNewInstance(LocalResource.class);
generateByNewInstance(ContainerLaunchContext.class);
generateByNewInstance(LogAggregationContext.class);
generateByNewInstance(ApplicationSubmissionContext.class);
generateByNewInstance(ApplicationHomeSubCluster.class);
generateByNewInstance(SubClusterPolicyConfiguration.class);
generateByNewInstance(RouterMasterKey.class);
generateByNewInstance(YARNDelegationTokenIdentifier.class);
generateByNewInstance(RouterStoreToken.class);
generateByNewInstance(ReservationId.class);
}
@Test
public void testSubClusterId() throws Exception {
validatePBImplRecord(SubClusterIdPBImpl.class, SubClusterIdProto.class);
}
@Test
public void testSubClusterInfo() throws Exception {
validatePBImplRecord(SubClusterInfoPBImpl.class, SubClusterInfoProto.class);
}
@Test
public void testSubClusterRegisterRequest() throws Exception {
validatePBImplRecord(SubClusterRegisterRequestPBImpl.class,
SubClusterRegisterRequestProto.class);
}
@Test
public void testSubClusterRegisterResponse() throws Exception {
validatePBImplRecord(SubClusterRegisterResponsePBImpl.class,
SubClusterRegisterResponseProto.class);
}
@Test
public void testSubClusterDeregisterRequest() throws Exception {
validatePBImplRecord(SubClusterDeregisterRequestPBImpl.class,
SubClusterDeregisterRequestProto.class);
}
@Test
public void testSubClusterDeregisterResponse() throws Exception {
validatePBImplRecord(SubClusterDeregisterResponsePBImpl.class,
SubClusterDeregisterResponseProto.class);
}
@Test
public void testSubClusterHeartbeatRequest() throws Exception {
validatePBImplRecord(SubClusterHeartbeatRequestPBImpl.class,
SubClusterHeartbeatRequestProto.class);
}
@Test
public void testSubClusterHeartbeatResponse() throws Exception {
validatePBImplRecord(SubClusterHeartbeatResponsePBImpl.class,
SubClusterHeartbeatResponseProto.class);
}
@Test
public void testGetSubClusterRequest() throws Exception {
validatePBImplRecord(GetSubClusterInfoRequestPBImpl.class,
GetSubClusterInfoRequestProto.class);
}
@Test
public void testGetSubClusterResponse() throws Exception {
validatePBImplRecord(GetSubClusterInfoResponsePBImpl.class,
GetSubClusterInfoResponseProto.class);
}
@Test
public void testGetSubClustersInfoRequest() throws Exception {
validatePBImplRecord(GetSubClustersInfoRequestPBImpl.class,
GetSubClustersInfoRequestProto.class);
}
@Test
public void testGetSubClustersInfoResponse() throws Exception {
validatePBImplRecord(GetSubClustersInfoResponsePBImpl.class,
GetSubClustersInfoResponseProto.class);
}
@Test
public void testAddApplicationHomeSubClusterRequest() throws Exception {
validatePBImplRecord(AddApplicationHomeSubClusterRequestPBImpl.class,
AddApplicationHomeSubClusterRequestProto.class);
}
@Test
public void testAddApplicationHomeSubClusterResponse() throws Exception {
validatePBImplRecord(AddApplicationHomeSubClusterResponsePBImpl.class,
AddApplicationHomeSubClusterResponseProto.class);
}
@Test
public void testUpdateApplicationHomeSubClusterRequest() throws Exception {
validatePBImplRecord(UpdateApplicationHomeSubClusterRequestPBImpl.class,
UpdateApplicationHomeSubClusterRequestProto.class);
}
@Test
public void testUpdateApplicationHomeSubClusterResponse() throws Exception {
validatePBImplRecord(UpdateApplicationHomeSubClusterResponsePBImpl.class,
UpdateApplicationHomeSubClusterResponseProto.class);
}
@Test
public void testGetApplicationHomeSubClusterRequest() throws Exception {
validatePBImplRecord(GetApplicationHomeSubClusterRequestPBImpl.class,
GetApplicationHomeSubClusterRequestProto.class);
}
@Test
public void testGetApplicationHomeSubClusterResponse() throws Exception {
validatePBImplRecord(GetApplicationHomeSubClusterResponsePBImpl.class,
GetApplicationHomeSubClusterResponseProto.class);
}
@Test
public void testGetApplicationsHomeSubClusterRequest() throws Exception {
validatePBImplRecord(GetApplicationsHomeSubClusterRequestPBImpl.class,
GetApplicationsHomeSubClusterRequestProto.class);
}
@Test
public void testGetApplicationsHomeSubClusterResponse() throws Exception {
validatePBImplRecord(GetApplicationsHomeSubClusterResponsePBImpl.class,
GetApplicationsHomeSubClusterResponseProto.class);
}
@Test
public void testDeleteApplicationHomeSubClusterRequest() throws Exception {
validatePBImplRecord(DeleteApplicationHomeSubClusterRequestPBImpl.class,
DeleteApplicationHomeSubClusterRequestProto.class);
}
@Test
public void testDeleteApplicationHomeSubClusterResponse() throws Exception {
validatePBImplRecord(DeleteApplicationHomeSubClusterResponsePBImpl.class,
DeleteApplicationHomeSubClusterResponseProto.class);
}
@Test
public void testGetSubClusterPolicyConfigurationRequest() throws Exception {
validatePBImplRecord(GetSubClusterPolicyConfigurationRequestPBImpl.class,
GetSubClusterPolicyConfigurationRequestProto.class);
}
@Test
public void testGetSubClusterPolicyConfigurationResponse() throws Exception {
validatePBImplRecord(GetSubClusterPolicyConfigurationResponsePBImpl.class,
GetSubClusterPolicyConfigurationResponseProto.class);
}
@Test
public void testSetSubClusterPolicyConfigurationRequest() throws Exception {
validatePBImplRecord(SetSubClusterPolicyConfigurationRequestPBImpl.class,
SetSubClusterPolicyConfigurationRequestProto.class);
}
@Test
public void testSetSubClusterPolicyConfigurationResponse() throws Exception {
validatePBImplRecord(SetSubClusterPolicyConfigurationResponsePBImpl.class,
SetSubClusterPolicyConfigurationResponseProto.class);
}
@Test
public void testGetSubClusterPoliciesConfigurationsRequest()
throws Exception {
validatePBImplRecord(GetSubClusterPoliciesConfigurationsRequestPBImpl.class,
GetSubClusterPoliciesConfigurationsRequestProto.class);
}
@Test
public void testGetSubClusterPoliciesConfigurationsResponse()
throws Exception {
validatePBImplRecord(
GetSubClusterPoliciesConfigurationsResponsePBImpl.class,
GetSubClusterPoliciesConfigurationsResponseProto.class);
}
@Test
public void testRouterMasterKey() throws Exception {
validatePBImplRecord(RouterMasterKeyPBImpl.class, RouterMasterKeyProto.class);
}
@Test
public void testRouterMasterKeyRequest() throws Exception {
validatePBImplRecord(RouterMasterKeyRequestPBImpl.class, RouterMasterKeyRequestProto.class);
}
@Test
public void testRouterMasterKeyResponse() throws Exception {
validatePBImplRecord(RouterMasterKeyResponsePBImpl.class, RouterMasterKeyResponseProto.class);
}
@Test
public void testRouterStoreToken() throws Exception {
validatePBImplRecord(RouterStoreTokenPBImpl.class, RouterStoreTokenProto.class);
}
@Test
public void testRouterRMTokenRequest() throws Exception {
validatePBImplRecord(RouterRMTokenRequestPBImpl.class, RouterRMTokenRequestProto.class);
}
@Test
public void testRouterRMTokenResponse() throws Exception {
validatePBImplRecord(RouterRMTokenResponsePBImpl.class, RouterRMTokenResponseProto.class);
}
@Test
public void testApplicationHomeSubCluster() throws Exception {
validatePBImplRecord(ApplicationHomeSubClusterPBImpl.class,
ApplicationHomeSubClusterProto.class);
}
@Test
public void testGetReservationHomeSubClusterRequest() throws Exception {
validatePBImplRecord(GetReservationHomeSubClusterRequestPBImpl.class,
GetReservationHomeSubClusterRequestProto.class);
}
@Test
public void testValidateApplicationHomeSubClusterEqual() throws Exception {
long now = Time.now();
ApplicationId appId1 = ApplicationId.newInstance(now, 1);
SubClusterId subClusterId1 = SubClusterId.newInstance("SC-1");
ApplicationHomeSubCluster applicationHomeSubCluster1 =
ApplicationHomeSubCluster.newInstance(appId1, subClusterId1);
ApplicationId appId2 = ApplicationId.newInstance(now, 1);
SubClusterId subClusterId2 = SubClusterId.newInstance("SC-1");
ApplicationHomeSubCluster applicationHomeSubCluster2 =
ApplicationHomeSubCluster.newInstance(appId2, subClusterId2);
assertEquals(applicationHomeSubCluster1, applicationHomeSubCluster2);
}
@Test
public void testValidateReservationHomeSubClusterEqual() throws Exception {
long now = Time.now();
ReservationId reservationId1 = ReservationId.newInstance(now, 1);
SubClusterId subClusterId1 = SubClusterId.newInstance("SC-1");
ReservationHomeSubCluster reservationHomeSubCluster1 =
ReservationHomeSubCluster.newInstance(reservationId1, subClusterId1);
ReservationId reservationId2 = ReservationId.newInstance(now, 1);
SubClusterId subClusterId2 = SubClusterId.newInstance("SC-1");
ReservationHomeSubCluster reservationHomeSubCluster2 =
ReservationHomeSubCluster.newInstance(reservationId2, subClusterId2);
assertEquals(reservationHomeSubCluster1, reservationHomeSubCluster2);
}
@Test
public void testSubClusterIdEqual() throws Exception {
SubClusterId subClusterId1 = SubClusterId.newInstance("SC-1");
SubClusterId subClusterId2 = SubClusterId.newInstance("SC-1");
assertEquals(subClusterId1, subClusterId2);
}
@Test
public void testSubClusterIdInfoEqual() throws Exception {
SubClusterIdInfo subClusterIdInfo1 = new SubClusterIdInfo("SC-1");
SubClusterIdInfo subClusterIdInfo2 = new SubClusterIdInfo("SC-1");
assertEquals(subClusterIdInfo1, subClusterIdInfo2);
}
@Test
public void testSubClusterPolicyConfigurationEqual() throws Exception {
String queue1 = "queue1";
WeightedPolicyInfo policyInfo1 = mock(WeightedPolicyInfo.class);
ByteBuffer buf1 = policyInfo1.toByteBuffer();
SubClusterPolicyConfiguration configuration1 = SubClusterPolicyConfiguration
.newInstance(queue1, policyInfo1.getClass().getCanonicalName(), buf1);
String queue2 = "queue1";
WeightedPolicyInfo policyInfo2 = mock(WeightedPolicyInfo.class);
ByteBuffer buf2 = policyInfo1.toByteBuffer();
SubClusterPolicyConfiguration configuration2 = SubClusterPolicyConfiguration
.newInstance(queue2, policyInfo2.getClass().getCanonicalName(), buf2);
assertEquals(configuration1, configuration2);
}
@Test
public void testSubClusterInfoEqual() throws Exception {
String scAmRMAddress = "5.6.7.8:5";
String scClientRMAddress = "5.6.7.8:6";
String scRmAdminAddress = "5.6.7.8:7";
String scWebAppAddress = "127.0.0.1:8080";
String capabilityJson = "-";
long currentTime = Time.now();
SubClusterInfo sc1 =
SubClusterInfo.newInstance(SubClusterId.newInstance("SC-1"),
scAmRMAddress, scClientRMAddress, scRmAdminAddress, scWebAppAddress,
SubClusterState.SC_RUNNING, currentTime, capabilityJson);
SubClusterInfo sc2 =
SubClusterInfo.newInstance(SubClusterId.newInstance("SC-1"),
scAmRMAddress, scClientRMAddress, scRmAdminAddress, scWebAppAddress,
SubClusterState.SC_RUNNING, currentTime, capabilityJson);
assertEquals(sc1, sc2);
}
@Test
public void testApplicationHomeSubClusterEqual() throws Exception {
// Case1, We create 2 ApplicationHomeSubCluster,
// all properties are consistent
// We expect the result to be equal.
ApplicationId appId1 = ApplicationId.newInstance(1, 1);
SubClusterId subClusterId1 = SubClusterId.newInstance("SC");
ApplicationSubmissionContext context1 =
ApplicationSubmissionContext.newInstance(appId1, "test", "default",
Priority.newInstance(0), null, true, true,
2, Resource.newInstance(10, 2), "test");
long createTime = Time.now();
ApplicationHomeSubCluster ahsc1 =
ApplicationHomeSubCluster.newInstance(appId1, createTime, subClusterId1, context1);
ApplicationId appId2 = ApplicationId.newInstance(1, 1);
SubClusterId subClusterId2 = SubClusterId.newInstance("SC");
ApplicationSubmissionContext context2 =
ApplicationSubmissionContext.newInstance(appId1, "test", "default",
Priority.newInstance(0), null, true, true,
2, Resource.newInstance(10, 2), "test");
ApplicationHomeSubCluster ahsc2 =
ApplicationHomeSubCluster.newInstance(appId2, createTime, subClusterId2, context2);
assertEquals(ahsc1, ahsc2);
// Case2, We create 2 ApplicationHomeSubCluster, appId is different
// We expect the results to be unequal
ApplicationId appId3 = ApplicationId.newInstance(2, 1);
ApplicationSubmissionContext context3 =
ApplicationSubmissionContext.newInstance(appId3, "test", "default",
Priority.newInstance(0), null, true, true,
2, Resource.newInstance(10, 2), "test");
ApplicationHomeSubCluster ahsc3 =
ApplicationHomeSubCluster.newInstance(appId3, createTime, subClusterId2, context3);
assertNotEquals(ahsc1, ahsc3);
// Case3, We create 2 ApplicationHomeSubCluster, createTime is different
// We expect the results to be unequal
long createTime2 = Time.now() + 1000;
ApplicationHomeSubCluster ahsc4 =
ApplicationHomeSubCluster.newInstance(appId2, createTime2, subClusterId1, context2);
assertNotEquals(ahsc1, ahsc4);
// Case4, We create 2 ApplicationHomeSubCluster, submissionContext is different
// We expect the results to be unequal
ApplicationHomeSubCluster ahsc5 =
ApplicationHomeSubCluster.newInstance(appId2, createTime2, subClusterId2, context3);
assertNotEquals(ahsc1, ahsc5);
}
}
| TestFederationProtocolRecords |
java | quarkusio__quarkus | extensions/spring-data-jpa/deployment/src/main/java/io/quarkus/spring/data/deployment/generate/FragmentMethodsUtil.java | {
"start": 337,
"end": 1853
} | interface ____ throws an
* exception indicating the problem
*/
static DotName getImplementationDotName(DotName customInterfaceToImplement, IndexView index) {
Collection<ClassInfo> knownImplementors = index.getAllKnownImplementors(customInterfaceToImplement);
if (knownImplementors.size() > 1) {
DotName previouslyFound = null;
for (ClassInfo knownImplementor : knownImplementors) {
if (knownImplementor.name().toString().endsWith("Impl")) { // the default suffix that Spring Data JPA looks for is 'Impl'
if (previouslyFound != null) { // make sure we don't have multiple implementations suffixed with 'Impl'
throw new IllegalArgumentException(
"Interface " + customInterfaceToImplement
+ " must contain a single implementation whose name ends with 'Impl'. Multiple implementations were found: "
+ previouslyFound + "," + knownImplementor);
}
previouslyFound = knownImplementor.name();
}
}
return previouslyFound;
} else if (knownImplementors.size() == 1) {
return knownImplementors.iterator().next().name();
} else {
throw new IllegalArgumentException(
"No implementation of interface " + customInterfaceToImplement + " was found");
}
}
}
| or |
java | google__guice | extensions/struts2/src/com/google/inject/struts2/Struts2Factory.java | {
"start": 3473,
"end": 5195
} | class ____ raw type.
public Object buildBean(Class clazz, Map<String, Object> extraContext) {
if (strutsInjector == null) {
synchronized (this) {
if (strutsInjector == null) {
createInjector();
}
}
}
return strutsInjector.getInstance(clazz);
}
private void createInjector() {
logger.info("Loading struts2 Guice support...");
// Something is wrong, since this should be there if GuiceServletContextListener
// was present.
if (injector == null) {
logger.severe(ERROR_NO_INJECTOR);
throw new RuntimeException(ERROR_NO_INJECTOR);
}
this.strutsInjector =
injector.createChildInjector(
new AbstractModule() {
@Override
protected void configure() {
// Tell the injector about all the action classes, etc., so it
// can validate them at startup.
for (Class<?> boundClass : boundClasses) {
// TODO: Set source from Struts XML.
bind(boundClass);
}
// Validate the interceptor class.
for (ProvidedInterceptor interceptor : interceptors) {
interceptor.validate(binder());
}
}
});
// Inject interceptors.
for (ProvidedInterceptor interceptor : interceptors) {
interceptor.inject();
}
logger.info("Injector created successfully.");
}
@Override
@SuppressWarnings("unchecked")
public Interceptor buildInterceptor(
InterceptorConfig interceptorConfig, Map<String, String> interceptorRefParams)
throws ConfigurationException {
// Ensure the interceptor | uses |
java | quarkusio__quarkus | extensions/arc/deployment/src/test/java/io/quarkus/arc/test/interceptor/staticmethods/InterceptedStaticMethodTest.java | {
"start": 3297,
"end": 3537
} | class ____ {
// @InterceptMe is added by the transformer
public static String ping(String val) {
return val.toUpperCase();
}
}
@Priority(1)
@Interceptor
@InterceptMe
static | AnotherSimple |
java | quarkusio__quarkus | independent-projects/arc/processor/src/test/java/io/quarkus/arc/processor/SubclassSkipPredicateTest.java | {
"start": 2238,
"end": 2474
} | class ____<T extends Number, UNUSED> {
String echo(T payload) {
return payload.toString().toUpperCase();
}
T getName() {
return null;
}
}
@ApplicationScoped
static | Base |
java | reactor__reactor-core | reactor-core/src/main/java/reactor/core/publisher/FluxErrorOnRequest.java | {
"start": 1604,
"end": 2580
} | class ____ implements InnerProducer {
final CoreSubscriber<?> actual;
final Throwable error;
volatile int once;
static final AtomicIntegerFieldUpdater<ErrorSubscription> ONCE =
AtomicIntegerFieldUpdater.newUpdater(ErrorSubscription.class, "once");
ErrorSubscription(CoreSubscriber<?> actual, Throwable error) {
this.actual = actual;
this.error = error;
}
@Override
public void request(long n) {
if (Operators.validate(n)) {
if (ONCE.compareAndSet(this, 0, 1)) {
actual.onError(error);
}
}
}
@Override
public void cancel() {
once = 1;
}
@Override
public CoreSubscriber actual() {
return actual;
}
@Override
public @Nullable Object scanUnsafe(Attr key) {
if (key == Attr.ERROR) return error;
if (key == Attr.CANCELLED || key == Attr.TERMINATED)
return once == 1;
if (key == Attr.RUN_STYLE) return Attr.RunStyle.SYNC;
return InnerProducer.super.scanUnsafe(key);
}
}
}
| ErrorSubscription |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/support/AbstractContextConfigurationUtilsTests.java | {
"start": 6700,
"end": 6762
} | class ____ {
}
@MetaLocationsBarConfig
static | MetaLocationsFoo |
java | grpc__grpc-java | examples/example-orca/src/main/java/io/grpc/examples/orca/CustomBackendMetricsClient.java | {
"start": 1454,
"end": 4093
} | class ____ {
private static final Logger logger = Logger.getLogger(CustomBackendMetricsClient.class.getName());
private final GreeterGrpc.GreeterBlockingStub blockingStub;
/** Construct client for accessing HelloWorld server using the existing channel. */
public CustomBackendMetricsClient(Channel channel) {
blockingStub = GreeterGrpc.newBlockingStub(channel);
}
/** Say hello to server. */
public void greet(String name) {
logger.info("Will try to greet " + name + " ...");
HelloRequest request = HelloRequest.newBuilder().setName(name).build();
HelloReply response;
try {
response = blockingStub.sayHello(request);
} catch (StatusRuntimeException e) {
logger.log(Level.WARNING, "RPC failed: {0}", e.getStatus());
return;
}
logger.info("Greeting: " + response.getMessage());
}
/**
* Greet server. If provided, the first element of {@code args} is the name to use in the
* greeting. The second argument is the target server.
*/
public static void main(String[] args) throws Exception {
String user = "orca tester";
// The example defaults to the same behavior as the hello world example.
// To receive more periodic OOB metrics reports, use duration argument to a longer value.
String target = "localhost:50051";
long timeBeforeShutdown = 1500;
if (args.length > 0) {
if ("--help".equals(args[0])) {
System.err.println("Usage: [name [duration [target]]]");
System.err.println("");
System.err.println(" name The name you wish to be greeted by. Defaults to " + user);
System.err.println(" duration The time period in milliseconds that the client application " +
"wait until shutdown. Defaults to " + timeBeforeShutdown);
System.err.println(" target The server to connect to. Defaults to " + target);
System.exit(1);
}
user = args[0];
}
if (args.length > 1) {
timeBeforeShutdown = Long.parseLong(args[1]);
}
if (args.length > 2) {
target = args[2];
}
LoadBalancerRegistry.getDefaultRegistry().register(
new CustomBackendMetricsLoadBalancerProvider());
ManagedChannel channel = Grpc.newChannelBuilder(target, InsecureChannelCredentials.create())
.defaultLoadBalancingPolicy(EXAMPLE_LOAD_BALANCER)
.build();
try {
CustomBackendMetricsClient client = new CustomBackendMetricsClient(channel);
client.greet(user);
Thread.sleep(timeBeforeShutdown);
} finally {
channel.shutdownNow().awaitTermination(5, TimeUnit.SECONDS);
}
}
}
| CustomBackendMetricsClient |
java | micronaut-projects__micronaut-core | core/src/main/java/io/micronaut/core/type/ArgumentCoercible.java | {
"start": 840,
"end": 958
} | interface ____<T> {
/**
* @return The argument
*/
@NonNull Argument<T> asArgument();
}
| ArgumentCoercible |
java | apache__flink | flink-tests/src/test/java/org/apache/flink/test/runtime/BatchShuffleITCaseBase.java | {
"start": 2599,
"end": 6033
} | class ____ {
private static final String RECORD = "batch shuffle test";
private static final int NUM_TASK_MANAGERS = 2;
private static final int NUM_SLOTS_PER_TASK_MANAGER = 10;
private static final int PARALLELISM = NUM_SLOTS_PER_TASK_MANAGER;
private static final int[] NUM_RECEIVED_RECORDS = new int[PARALLELISM];
private static Path tmpDir;
@BeforeAll
static void setupClass(@TempDir Path path) throws Exception {
tmpDir = TempDirUtils.newFolder(path, UUID.randomUUID().toString()).toPath();
}
@BeforeEach
public void setup() {
Arrays.fill(NUM_RECEIVED_RECORDS, 0);
}
protected JobGraph createJobGraph(
int numRecordsToSend,
boolean failExecution,
Configuration configuration,
boolean enableAdaptiveAutoParallelism) {
return createJobGraph(
numRecordsToSend,
failExecution,
false,
configuration,
enableAdaptiveAutoParallelism);
}
protected JobGraph createJobGraph(
int numRecordsToSend,
boolean failExecution,
boolean deletePartitionFile,
Configuration configuration,
boolean enableAdaptiveAutoParallelism) {
configuration.set(
BatchExecutionOptions.ADAPTIVE_AUTO_PARALLELISM_ENABLED,
enableAdaptiveAutoParallelism);
StreamExecutionEnvironment env =
StreamExecutionEnvironment.getExecutionEnvironment(configuration);
RestartStrategyUtils.configureFixedDelayRestartStrategy(env, 2, 0L);
env.setParallelism(NUM_SLOTS_PER_TASK_MANAGER);
DataStream<String> source =
new DataStreamSource<>(
env,
BasicTypeInfo.STRING_TYPE_INFO,
new StreamSource<>(new StringSource(numRecordsToSend)),
true,
"source",
Boundedness.BOUNDED)
.setParallelism(PARALLELISM);
source.rebalance()
.map(value -> value)
.shuffle()
.addSink(new VerifySink(failExecution, deletePartitionFile));
StreamGraph streamGraph = env.getStreamGraph();
streamGraph.setJobType(JobType.BATCH);
return StreamingJobGraphGenerator.createJobGraph(streamGraph);
}
protected Configuration getConfiguration() {
Configuration configuration = new Configuration();
configuration.set(CoreOptions.TMP_DIRS, tmpDir.toString());
configuration.set(ExecutionOptions.RUNTIME_MODE, RuntimeExecutionMode.BATCH);
configuration.set(NettyShuffleEnvironmentOptions.NETWORK_REQUEST_BACKOFF_MAX, 100);
return configuration;
}
protected void executeJob(JobGraph jobGraph, Configuration configuration, int numRecordsToSend)
throws Exception {
JobGraphRunningUtil.execute(
jobGraph, configuration, NUM_TASK_MANAGERS, NUM_SLOTS_PER_TASK_MANAGER);
checkAllDataReceived(numRecordsToSend);
}
private void checkAllDataReceived(int numRecordsToSend) {
assertThat(Arrays.stream(NUM_RECEIVED_RECORDS).sum())
.isEqualTo(numRecordsToSend * PARALLELISM);
}
private static | BatchShuffleITCaseBase |
java | spring-projects__spring-framework | spring-r2dbc/src/main/java/org/springframework/r2dbc/connection/init/CompositeDatabasePopulator.java | {
"start": 1224,
"end": 2997
} | class ____ implements DatabasePopulator {
private final List<DatabasePopulator> populators = new ArrayList<>(4);
/**
* Create an empty {@code CompositeDatabasePopulator}.
* @see #setPopulators
* @see #addPopulators
*/
public CompositeDatabasePopulator() {
}
/**
* Create a {@code CompositeDatabasePopulator}. with the given populators.
* @param populators one or more populators to delegate to.
*/
public CompositeDatabasePopulator(Collection<DatabasePopulator> populators) {
Assert.notNull(populators, "DatabasePopulators must not be null");
this.populators.addAll(populators);
}
/**
* Create a {@code CompositeDatabasePopulator} with the given populators.
* @param populators one or more populators to delegate to.
*/
public CompositeDatabasePopulator(DatabasePopulator... populators) {
Assert.notNull(populators, "DatabasePopulators must not be null");
this.populators.addAll(Arrays.asList(populators));
}
/**
* Specify one or more populators to delegate to.
*/
public void setPopulators(DatabasePopulator... populators) {
Assert.notNull(populators, "DatabasePopulators must not be null");
this.populators.clear();
this.populators.addAll(Arrays.asList(populators));
}
/**
* Add one or more populators to the list of delegates.
*/
public void addPopulators(DatabasePopulator... populators) {
Assert.notNull(populators, "DatabasePopulators must not be null");
this.populators.addAll(Arrays.asList(populators));
}
@Override
public Mono<Void> populate(Connection connection) throws ScriptException {
Assert.notNull(connection, "Connection must not be null");
return Flux.fromIterable(this.populators).concatMap(populator -> populator.populate(connection))
.then();
}
}
| CompositeDatabasePopulator |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/query/sqm/tree/insert/SqmConflictUpdateAction.java | {
"start": 1238,
"end": 6332
} | class ____<T> implements SqmNode, JpaConflictUpdateAction<T> {
private final SqmInsertStatement<T> insertStatement;
private final SqmSetClause setClause;
private @Nullable SqmWhereClause whereClause;
public SqmConflictUpdateAction(SqmInsertStatement<T> insertStatement) {
this.insertStatement = insertStatement;
this.setClause = new SqmSetClause();
}
private SqmConflictUpdateAction(
SqmInsertStatement<T> insertStatement,
SqmSetClause setClause,
@Nullable SqmWhereClause whereClause) {
this.insertStatement = insertStatement;
this.setClause = setClause;
this.whereClause = whereClause;
}
@Override
public <Y, X extends Y> SqmConflictUpdateAction<T> set(SingularAttribute<? super T, Y> attribute, @Nullable X value) {
applyAssignment( getTarget().get( attribute ), (SqmExpression<? extends Y>) nodeBuilder().value( value ) );
return this;
}
@Override
public <Y> SqmConflictUpdateAction<T> set(SingularAttribute<? super T, Y> attribute, Expression<? extends Y> value) {
applyAssignment( getTarget().get( attribute ), (SqmExpression<? extends Y>) value );
return this;
}
@Override
public <Y, X extends Y> SqmConflictUpdateAction<T> set(Path<Y> attribute, @Nullable X value) {
applyAssignment( (SqmPath<Y>) attribute, (SqmExpression<? extends Y>) nodeBuilder().value( value ) );
return this;
}
@Override
public <Y> SqmConflictUpdateAction<T> set(Path<Y> attribute, Expression<? extends Y> value) {
applyAssignment( (SqmPath<Y>) attribute, (SqmExpression<? extends Y>) value );
return this;
}
@Override
public SqmConflictUpdateAction<T> set(String attributeName, @Nullable Object value) {
final SqmPath sqmPath = getTarget().get(attributeName);
final SqmExpression expression;
if ( value instanceof SqmExpression ) {
expression = (SqmExpression) value;
}
else {
expression = (SqmExpression) nodeBuilder().value( value );
}
assertAssignable( null, sqmPath, expression, nodeBuilder() );
applyAssignment( sqmPath, expression );
return this;
}
public void addAssignment(SqmAssignment<?> assignment) {
setClause.addAssignment( assignment );
}
private <Y> void applyAssignment(SqmPath<Y> targetPath, SqmExpression<? extends Y> value) {
setClause.addAssignment( new SqmAssignment<>( targetPath, value ) );
}
@Override
public SqmConflictUpdateAction<T> where(@Nullable Expression<Boolean> restriction) {
initAndGetWhereClause().setPredicate( (SqmPredicate) restriction );
return this;
}
@Override
public SqmConflictUpdateAction<T> where(Predicate @Nullable... restrictions) {
final SqmWhereClause whereClause = initAndGetWhereClause();
// Clear the current predicate if one is present
whereClause.setPredicate( null );
if ( restrictions != null ) {
for ( Predicate restriction : restrictions ) {
whereClause.applyPredicate( (SqmPredicate) restriction );
}
}
return this;
}
@Override
public @Nullable SqmPredicate getRestriction() {
return whereClause == null ? null : whereClause.getPredicate();
}
protected SqmWhereClause initAndGetWhereClause() {
if ( whereClause == null ) {
whereClause = new SqmWhereClause( nodeBuilder() );
}
return whereClause;
}
@Override
public NodeBuilder nodeBuilder() {
return insertStatement.nodeBuilder();
}
@Override
public SqmConflictUpdateAction<T> copy(SqmCopyContext context) {
final SqmConflictUpdateAction<T> existing = context.getCopy( this );
if ( existing != null ) {
return existing;
}
return context.registerCopy(
this,
new SqmConflictUpdateAction<>(
insertStatement.copy( context ),
setClause.copy( context ),
whereClause == null ? null : whereClause.copy( context )
)
);
}
public SqmSetClause getSetClause() {
return setClause;
}
public @Nullable SqmWhereClause getWhereClause() {
return whereClause;
}
private SqmRoot<T> getTarget() {
return insertStatement.getTarget();
}
public void appendHqlString(StringBuilder sb, SqmRenderContext context) {
sb.append( " do update" );
setClause.appendHqlString( sb, context );
final SqmPredicate predicate = whereClause == null ? null : whereClause.getPredicate();
if ( predicate != null ) {
sb.append( " where " );
predicate.appendHqlString( sb, context );
}
}
@Override
public boolean equals(@Nullable Object object) {
return object instanceof SqmConflictUpdateAction<?> that
&& setClause.equals( that.getSetClause() )
&& Objects.equals( whereClause, that.getWhereClause() );
}
@Override
public int hashCode() {
int result = setClause.hashCode();
result = 31 * result + Objects.hashCode( whereClause );
return result;
}
@Override
public boolean isCompatible(Object object) {
return object instanceof SqmConflictUpdateAction<?> that
&& setClause.isCompatible( that.getSetClause() )
&& SqmCacheable.areCompatible( whereClause, that.getWhereClause() );
}
@Override
public int cacheHashCode() {
int result = setClause.cacheHashCode();
result = 31 * result + SqmCacheable.cacheHashCode( whereClause );
return result;
}
}
| SqmConflictUpdateAction |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/generated/delegate/MutationDelegateStatementReleaseTest.java | {
"start": 7430,
"end": 7792
} | class ____ extends BaseEntity {
@Generated( event = EventType.INSERT )
@ColumnDefault( "'default_child_name'" )
private String childName;
@UpdateTimestamp( source = SourceType.DB )
private Date childUpdateDate;
public String getChildName() {
return childName;
}
public Date getChildUpdateDate() {
return childUpdateDate;
}
}
}
| ChildEntity |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/AWSConfigEndpointBuilderFactory.java | {
"start": 1418,
"end": 1556
} | interface ____ {
/**
* Builder for endpoint for the AWS Config Service component.
*/
public | AWSConfigEndpointBuilderFactory |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/checkpoint/filemerging/AcrossCheckpointFileMergingSnapshotManagerTest.java | {
"start": 1341,
"end": 16311
} | class ____
extends FileMergingSnapshotManagerTestBase {
@Override
FileMergingType getFileMergingType() {
return FileMergingType.MERGE_ACROSS_CHECKPOINT;
}
@Test
void testCreateAndReuseFiles() throws IOException {
try (FileMergingSnapshotManagerBase fmsm =
(FileMergingSnapshotManagerBase)
createFileMergingSnapshotManager(checkpointBaseDir)) {
fmsm.registerSubtaskForSharedStates(subtaskKey1);
fmsm.registerSubtaskForSharedStates(subtaskKey2);
// firstly, we try shared state.
PhysicalFile file1 =
fmsm.getOrCreatePhysicalFileForCheckpoint(
subtaskKey1, 0, CheckpointedStateScope.SHARED);
assertThat(file1.getFilePath().getParent())
.isEqualTo(fmsm.getManagedDir(subtaskKey1, CheckpointedStateScope.SHARED));
// allocate another
PhysicalFile file2 =
fmsm.getOrCreatePhysicalFileForCheckpoint(
subtaskKey1, 0, CheckpointedStateScope.SHARED);
assertThat(file2.getFilePath().getParent())
.isEqualTo(fmsm.getManagedDir(subtaskKey1, CheckpointedStateScope.SHARED));
assertThat(file2).isNotEqualTo(file1);
assertThat(fmsm.spaceStat.physicalFileCount.get()).isEqualTo(2);
// return for reuse
fmsm.returnPhysicalFileForNextReuse(subtaskKey1, 0, file1);
assertThat(fmsm.spaceStat.physicalFileCount.get()).isEqualTo(2);
// allocate for another subtask
PhysicalFile file3 =
fmsm.getOrCreatePhysicalFileForCheckpoint(
subtaskKey2, 0, CheckpointedStateScope.SHARED);
assertThat(file3.getFilePath().getParent())
.isEqualTo(fmsm.getManagedDir(subtaskKey2, CheckpointedStateScope.SHARED));
assertThat(file3).isNotEqualTo(file1);
assertThat(fmsm.spaceStat.physicalFileCount.get()).isEqualTo(3);
// allocate for another checkpoint
PhysicalFile file4 =
fmsm.getOrCreatePhysicalFileForCheckpoint(
subtaskKey1, 1, CheckpointedStateScope.SHARED);
assertThat(file4.getFilePath().getParent())
.isEqualTo(fmsm.getManagedDir(subtaskKey1, CheckpointedStateScope.SHARED));
assertThat(file4).isEqualTo(file1);
assertThat(fmsm.spaceStat.physicalFileCount.get()).isEqualTo(3);
// a physical file whose size is bigger than maxPhysicalFileSize cannot be reused
file4.incSize(fmsm.maxPhysicalFileSize);
fmsm.returnPhysicalFileForNextReuse(subtaskKey1, 1, file4);
// file4 is discarded because it's size is bigger than maxPhysicalFileSize
assertThat(fmsm.spaceStat.physicalFileCount.get()).isEqualTo(2);
PhysicalFile file5 =
fmsm.getOrCreatePhysicalFileForCheckpoint(
subtaskKey1, 1, CheckpointedStateScope.SHARED);
assertThat(file5.getFilePath().getParent())
.isEqualTo(fmsm.getManagedDir(subtaskKey1, CheckpointedStateScope.SHARED));
assertThat(file5).isNotEqualTo(file4);
assertThat(fmsm.spaceStat.physicalFileCount.get()).isEqualTo(3);
// Secondly, we try private state
PhysicalFile file6 =
fmsm.getOrCreatePhysicalFileForCheckpoint(
subtaskKey1, 1, CheckpointedStateScope.EXCLUSIVE);
assertThat(file6.getFilePath().getParent())
.isEqualTo(fmsm.getManagedDir(subtaskKey1, CheckpointedStateScope.EXCLUSIVE));
assertThat(fmsm.spaceStat.physicalFileCount.get()).isEqualTo(4);
// allocate another
PhysicalFile file7 =
fmsm.getOrCreatePhysicalFileForCheckpoint(
subtaskKey1, 1, CheckpointedStateScope.EXCLUSIVE);
assertThat(file7.getFilePath().getParent())
.isEqualTo(fmsm.getManagedDir(subtaskKey1, CheckpointedStateScope.EXCLUSIVE));
assertThat(file7).isNotEqualTo(file5);
assertThat(fmsm.spaceStat.physicalFileCount.get()).isEqualTo(5);
// return for reuse
fmsm.returnPhysicalFileForNextReuse(subtaskKey1, 0, file6);
assertThat(fmsm.spaceStat.physicalFileCount.get()).isEqualTo(5);
// allocate for another checkpoint
PhysicalFile file8 =
fmsm.getOrCreatePhysicalFileForCheckpoint(
subtaskKey1, 2, CheckpointedStateScope.EXCLUSIVE);
assertThat(file8.getFilePath().getParent())
.isEqualTo(fmsm.getManagedDir(subtaskKey1, CheckpointedStateScope.EXCLUSIVE));
assertThat(file8).isEqualTo(file6);
assertThat(fmsm.spaceStat.physicalFileCount.get()).isEqualTo(5);
// return for reuse
fmsm.returnPhysicalFileForNextReuse(subtaskKey1, 0, file8);
assertThat(fmsm.spaceStat.physicalFileCount.get()).isEqualTo(5);
// allocate for this checkpoint but another subtask
PhysicalFile file9 =
fmsm.getOrCreatePhysicalFileForCheckpoint(
subtaskKey2, 2, CheckpointedStateScope.EXCLUSIVE);
assertThat(file9.getFilePath().getParent())
.isEqualTo(fmsm.getManagedDir(subtaskKey2, CheckpointedStateScope.EXCLUSIVE));
assertThat(file9).isEqualTo(file6);
assertThat(fmsm.spaceStat.physicalFileCount.get()).isEqualTo(5);
// a physical file whose size is bigger than maxPhysicalFileSize cannot be reused
file9.incSize(fmsm.maxPhysicalFileSize);
fmsm.returnPhysicalFileForNextReuse(subtaskKey1, 2, file9);
assertThat(fmsm.spaceStat.physicalFileCount.get()).isEqualTo(4);
PhysicalFile file10 =
fmsm.getOrCreatePhysicalFileForCheckpoint(
subtaskKey1, 2, CheckpointedStateScope.SHARED);
assertThat(file10.getFilePath().getParent())
.isEqualTo(fmsm.getManagedDir(subtaskKey1, CheckpointedStateScope.SHARED));
assertThat(file10).isNotEqualTo(file9);
assertThat(fmsm.spaceStat.physicalFileCount.get()).isEqualTo(5);
assertThat(fmsm.getManagedDir(subtaskKey2, CheckpointedStateScope.EXCLUSIVE))
.isEqualTo(fmsm.getManagedDir(subtaskKey1, CheckpointedStateScope.EXCLUSIVE));
}
}
@Test
public void testCheckpointNotification() throws Exception {
try (FileMergingSnapshotManagerBase fmsm =
(FileMergingSnapshotManagerBase)
createFileMergingSnapshotManager(checkpointBaseDir);
CloseableRegistry closeableRegistry = new CloseableRegistry()) {
fmsm.registerSubtaskForSharedStates(subtaskKey1);
fmsm.registerSubtaskForSharedStates(subtaskKey2);
BiFunctionWithException<
FileMergingSnapshotManager.SubtaskKey,
Long,
SegmentFileStateHandle,
Exception>
writer =
((subtaskKey, checkpointId) -> {
return writeCheckpointAndGetStream(
subtaskKey,
checkpointId,
CheckpointedStateScope.SHARED,
fmsm,
closeableRegistry)
.closeAndGetHandle();
});
SegmentFileStateHandle cp1StateHandle1 = writer.apply(subtaskKey1, 1L);
SegmentFileStateHandle cp1StateHandle2 = writer.apply(subtaskKey2, 1L);
fmsm.notifyCheckpointComplete(subtaskKey1, 1);
assertFileInManagedDir(fmsm, cp1StateHandle1);
assertFileInManagedDir(fmsm, cp1StateHandle2);
assertThat(fmsm.spaceStat.physicalFileCount.get()).isEqualTo(2);
assertThat(fmsm.spaceStat.logicalFileCount.get()).isEqualTo(2);
// complete checkpoint-2
SegmentFileStateHandle cp2StateHandle1 = writer.apply(subtaskKey1, 2L);
SegmentFileStateHandle cp2StateHandle2 = writer.apply(subtaskKey2, 2L);
fmsm.notifyCheckpointComplete(subtaskKey1, 2);
fmsm.notifyCheckpointComplete(subtaskKey2, 2);
assertFileInManagedDir(fmsm, cp2StateHandle1);
assertFileInManagedDir(fmsm, cp2StateHandle2);
assertThat(fmsm.spaceStat.physicalFileCount.get()).isEqualTo(2);
assertThat(fmsm.spaceStat.logicalFileCount.get()).isEqualTo(4);
assertThat(fmsm.isCheckpointDiscard(1)).isFalse();
// subsume checkpoint-1
assertThat(fileExists(cp1StateHandle1)).isTrue();
assertThat(fileExists(cp1StateHandle2)).isTrue();
fmsm.notifyCheckpointSubsumed(subtaskKey1, 1);
assertThat(fileExists(cp1StateHandle1)).isTrue();
assertThat(fileExists(cp1StateHandle2)).isTrue();
assertThat(fmsm.spaceStat.physicalFileCount.get()).isEqualTo(2);
assertThat(fmsm.spaceStat.logicalFileCount.get()).isEqualTo(3);
assertThat(fmsm.isCheckpointDiscard(1)).isFalse();
fmsm.notifyCheckpointSubsumed(subtaskKey2, 1);
assertThat(fmsm.isCheckpointDiscard(1)).isTrue();
assertThat(fmsm.spaceStat.physicalFileCount.get()).isEqualTo(2);
assertThat(fmsm.spaceStat.logicalFileCount.get()).isEqualTo(2);
// abort checkpoint-3
SegmentFileStateHandle cp3StateHandle1 = writer.apply(subtaskKey1, 3L);
SegmentFileStateHandle cp3StateHandle2 = writer.apply(subtaskKey2, 3L);
assertThat(fmsm.spaceStat.physicalFileCount.get()).isEqualTo(2);
assertThat(fmsm.spaceStat.logicalFileCount.get()).isEqualTo(4);
assertFileInManagedDir(fmsm, cp3StateHandle1);
assertFileInManagedDir(fmsm, cp3StateHandle2);
fmsm.notifyCheckpointAborted(subtaskKey1, 3);
assertThat(fileExists(cp3StateHandle1)).isTrue();
assertThat(fmsm.spaceStat.physicalFileCount.get()).isEqualTo(2);
assertThat(fmsm.spaceStat.logicalFileCount.get()).isEqualTo(3);
assertThat(fmsm.isCheckpointDiscard(3)).isFalse();
fmsm.notifyCheckpointAborted(subtaskKey2, 3);
assertThat(fmsm.isCheckpointDiscard(3)).isTrue();
assertThat(fmsm.spaceStat.physicalFileCount.get()).isEqualTo(2);
assertThat(fmsm.spaceStat.logicalFileCount.get()).isEqualTo(2);
}
}
@Test
public void testSpaceControl() throws Exception {
try (FileMergingSnapshotManagerBase fmsm =
(FileMergingSnapshotManagerBase)
createFileMergingSnapshotManager(checkpointBaseDir);
CloseableRegistry closeableRegistry = new CloseableRegistry()) {
fmsm.registerSubtaskForSharedStates(subtaskKey1);
BiFunctionWithException<Long, Integer, SegmentFileStateHandle, Exception> writer =
((checkpointId, size) -> {
return writeCheckpointAndGetStream(
subtaskKey1,
checkpointId,
CheckpointedStateScope.SHARED,
fmsm,
closeableRegistry,
size)
.closeAndGetHandle();
});
Integer eighthOfFile = 4 * 1024 * 1024;
// Doing checkpoint-1 with 6 files
SegmentFileStateHandle stateHandle1 = writer.apply(1L, eighthOfFile);
SegmentFileStateHandle stateHandle2 = writer.apply(1L, eighthOfFile);
SegmentFileStateHandle stateHandle3 = writer.apply(1L, eighthOfFile);
SegmentFileStateHandle stateHandle4 = writer.apply(1L, eighthOfFile);
SegmentFileStateHandle stateHandle5 = writer.apply(1L, eighthOfFile);
SegmentFileStateHandle stateHandle6 = writer.apply(1L, eighthOfFile);
fmsm.notifyCheckpointComplete(subtaskKey1, 1);
assertThat(fmsm.spaceStat.physicalFileCount.get()).isEqualTo(1);
assertThat(fmsm.spaceStat.logicalFileCount.get()).isEqualTo(6);
// complete checkpoint-2 with 3 files written and 1 file reused from checkpoint 1
assertThat(fmsm.couldReusePreviousStateHandle(stateHandle1)).isTrue();
SegmentFileStateHandle stateHandle7 = writer.apply(2L, eighthOfFile);
SegmentFileStateHandle stateHandle8 = writer.apply(2L, eighthOfFile);
SegmentFileStateHandle stateHandle9 = writer.apply(2L, eighthOfFile);
fmsm.reusePreviousStateHandle(2, Collections.singletonList(stateHandle1));
fmsm.notifyCheckpointComplete(subtaskKey1, 2);
assertThat(fmsm.spaceStat.physicalFileCount.get()).isEqualTo(2);
assertThat(fmsm.spaceStat.logicalFileCount.get()).isEqualTo(9);
// subsume checkpoint-1
fmsm.notifyCheckpointSubsumed(subtaskKey1, 1);
assertThat(fmsm.spaceStat.physicalFileCount.get()).isEqualTo(2);
assertThat(fmsm.spaceStat.logicalFileCount.get()).isEqualTo(4);
// complete checkpoint-3 with 1 files reuse from checkpoint 1 and 2.
assertThat(fmsm.couldReusePreviousStateHandle(stateHandle1)).isFalse();
assertThat(fmsm.couldReusePreviousStateHandle(stateHandle7)).isFalse();
assertThat(fmsm.couldReusePreviousStateHandle(stateHandle9)).isTrue();
SegmentFileStateHandle stateHandle10 = writer.apply(3L, eighthOfFile);
SegmentFileStateHandle stateHandle11 = writer.apply(3L, eighthOfFile);
SegmentFileStateHandle stateHandle12 = writer.apply(3L, eighthOfFile);
fmsm.notifyCheckpointComplete(subtaskKey1, 3);
assertThat(fmsm.spaceStat.physicalFileCount.get()).isEqualTo(2);
assertThat(fmsm.spaceStat.logicalFileCount.get()).isEqualTo(7);
// subsume checkpoint-2
fmsm.notifyCheckpointSubsumed(subtaskKey1, 2);
assertThat(fmsm.spaceStat.physicalFileCount.get()).isEqualTo(1);
assertThat(fmsm.spaceStat.logicalFileCount.get()).isEqualTo(3);
}
}
}
| AcrossCheckpointFileMergingSnapshotManagerTest |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/assumptions/Predicate_final_method_assertions_in_assumptions_Test.java | {
"start": 1175,
"end": 2237
} | class ____ extends BaseAssumptionsRunnerTest {
public static Stream<AssumptionRunner<?>> provideAssumptionsRunners() {
Predicate<MapEntry<String, String>> ballSportPredicate = sport -> sport.value.contains("ball");
return Stream.of(assumptionRunner(ballSportPredicate,
value -> assumeThat(value).accepts(entry("sport", "football"),
entry("sport", "basketball")),
value -> assumeThat(value).accepts(entry("sport", "boxing"), entry("sport", "marathon"))),
assumptionRunner(ballSportPredicate,
value -> assumeThat(value).rejects(entry("sport", "boxing"), entry("sport", "marathon")),
value -> assumeThat(value).rejects(entry("sport", "football"),
entry("sport", "basketball"))));
}
}
| Predicate_final_method_assertions_in_assumptions_Test |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/internal/bigintegers/BigIntegers_assertIsNotPositive_Test.java | {
"start": 1153,
"end": 2869
} | class ____ extends BigIntegersBaseTest {
@Test
void should_succeed_since_actual_is_not_positive() {
numbers.assertIsNotPositive(someInfo(), new BigInteger("-6"));
}
@Test
void should_succeed_since_actual_is_zero() {
numbers.assertIsNotPositive(someInfo(), BigInteger.ZERO);
}
@Test
void should_fail_since_actual_is_positive() {
// WHEN
var assertionError = expectAssertionError(() -> numbers.assertIsNotPositive(someInfo(), new BigInteger("6")));
// THEN
then(assertionError).hasMessage(shouldBeLessOrEqual(new BigInteger("6"), BigInteger.ZERO).create());
}
@Test
void should_fail_since_actual_can_be_positive_according_to_custom_comparison_strategy() {
// WHEN
var assertionError = expectAssertionError(() -> numbersWithAbsValueComparisonStrategy.assertIsNotPositive(someInfo(),
new BigInteger("-1")));
// THEN
then(assertionError).hasMessage(shouldBeLessOrEqual(new BigInteger("-1"), BigInteger.ZERO,
absValueComparisonStrategy).create());
}
@Test
void should_fail_since_actual_is_positive_according_to_custom_comparison_strategy() {
// WHEN
var assertionError = expectAssertionError(() -> numbersWithAbsValueComparisonStrategy.assertIsNotPositive(someInfo(),
BigInteger.ONE));
// THEN
then(assertionError).hasMessage(shouldBeLessOrEqual(BigInteger.ONE, BigInteger.ZERO, absValueComparisonStrategy).create());
}
}
| BigIntegers_assertIsNotPositive_Test |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/streaming/runtime/watermarkstatus/HeapPriorityQueue.java | {
"start": 1075,
"end": 2118
} | class ____ similar functions with {@link
* org.apache.flink.runtime.state.heap.HeapPriorityQueue}. It is introduced as the replacement of
* {@link org.apache.flink.runtime.state.heap.HeapPriorityQueue} to be used in {@link
* StatusWatermarkValve}, to avoid affecting the performance of memory state backend.
*
* <p>The reason why the performance of memory state backend will be affected if we reuse the {@link
* org.apache.flink.runtime.state.heap.HeapPriorityQueue}: In some scenarios, the {@link
* org.apache.flink.runtime.state.heap.HeapPriorityQueueElement} will only have one
* implementation(used by memory state backend), which allows the jvm to inline its
* methods(getInternalIndex, setInternalIndex). If we reuse it in {@link StatusWatermarkValve}, it
* will cause it to have multiple implementations. Once there are multiple implementations, its
* methods will be difficult to be inlined by jvm, which will result in poor performance of memory
* state backend.
*
* @param <T> type of the contained elements.
*/
public | has |
java | junit-team__junit5 | junit-platform-launcher/src/main/java/org/junit/platform/launcher/EngineDiscoveryResult.java | {
"start": 912,
"end": 1045
} | class ____ {
/**
* Status of test discovery by a
* {@link org.junit.platform.engine.TestEngine}.
*/
public | EngineDiscoveryResult |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/util/ReflectionTestUtils.java | {
"start": 11066,
"end": 13423
} | class ____ which to get the field; may
* be {@code null} if the field is an instance field
* @param name the name of the field to get; never {@code null}
* @return the field's current value
* @since 4.2
* @see #getField(Object, String)
* @see #getField(Class, String)
* @see ReflectionUtils#findField(Class, String, Class)
* @see ReflectionUtils#makeAccessible(Field)
* @see ReflectionUtils#getField(Field, Object)
* @see AopTestUtils#getUltimateTargetObject(Object)
*/
@SuppressWarnings("NullAway") // Dataflow analysis limitation
public static @Nullable Object getField(@Nullable Object targetObject, @Nullable Class<?> targetClass, String name) {
Assert.isTrue(targetObject != null || targetClass != null,
"Either targetObject or targetClass for the field must be specified");
if (targetObject != null && SPRING_AOP_PRESENT) {
targetObject = AopTestUtils.getUltimateTargetObject(targetObject);
}
if (targetClass == null) {
targetClass = targetObject.getClass();
}
Field field = ReflectionUtils.findField(targetClass, name);
if (field == null) {
throw new IllegalArgumentException(String.format("Could not find field '%s' on %s or target class [%s]",
name, safeToString(targetObject), targetClass));
}
if (logger.isDebugEnabled()) {
logger.debug(String.format("Getting field '%s' from %s or target class [%s]", name,
safeToString(targetObject), targetClass));
}
ReflectionUtils.makeAccessible(field);
return ReflectionUtils.getField(field, targetObject);
}
/**
* Invoke the setter method with the given {@code name} on the supplied
* target object with the supplied {@code value}.
* <p>This method delegates to
* {@link #invokeSetterMethod(Object, String, Object, Class)}, supplying
* {@code null} for the parameter type.
* @param target the target object on which to invoke the specified setter
* method
* @param name the name of the setter method to invoke or the corresponding
* property name
* @param value the value to provide to the setter method
*/
public static void invokeSetterMethod(Object target, String name, Object value) {
invokeSetterMethod(target, name, value, null);
}
/**
* Invoke the setter method with the given {@code name} on the supplied
* target object with the supplied {@code value}.
* <p>This method traverses the | from |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.