language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
spring-projects__spring-security
|
config/src/test/java/org/springframework/security/config/annotation/web/configurers/NamespaceRememberMeTests.java
|
{
"start": 16204,
"end": 16565
}
|
class ____ extends UsersConfig {
@Bean
SecurityFilterChain filterChain(HttpSecurity http) throws Exception {
// @formatter:off
http
.formLogin(withDefaults())
.rememberMe((me) -> me
.rememberMeParameter("rememberMe"));
return http.build();
// @formatter:on
}
}
@Configuration
@EnableWebSecurity
static
|
RememberMeParameterConfig
|
java
|
apache__camel
|
components/camel-openstack/src/test/java/org/apache/camel/component/openstack/swift/ObjectProducerTest.java
|
{
"start": 2105,
"end": 7035
}
|
class ____ extends SwiftProducerTestSupport {
private static final String CONTAINER_NAME = "containerName";
private static final String OBJECT_NAME = "objectName";
private static final String ETAG = UUID.randomUUID().toString();
@Mock
private SwiftObject mockOsObject;
@Mock
private ObjectStorageObjectService objectService;
@Captor
private ArgumentCaptor<String> containerNameCaptor;
@Captor
private ArgumentCaptor<String> objectNameCaptor;
@Captor
private ArgumentCaptor<Payload<?>> payloadArgumentCaptor;
@Captor
private ArgumentCaptor<ObjectLocation> locationCaptor;
@Captor
private ArgumentCaptor<Map<String, String>> dataCaptor;
@BeforeEach
public void setUp() {
when(objectStorageService.objects()).thenReturn(objectService);
producer = new ObjectProducer(endpoint, client);
when(mockOsObject.getETag()).thenReturn(ETAG);
}
@Test
public void createTest() throws Exception {
when(objectService.put(anyString(), anyString(), any())).thenReturn(ETAG);
msg.setHeader(OpenstackConstants.OPERATION, OpenstackConstants.CREATE);
msg.setHeader(SwiftConstants.CONTAINER_NAME, CONTAINER_NAME);
msg.setHeader(SwiftConstants.OBJECT_NAME, OBJECT_NAME);
final Payload<?> payload = getTmpPayload();
msg.setBody(payload);
producer.process(exchange);
verify(objectService).put(containerNameCaptor.capture(), objectNameCaptor.capture(), payloadArgumentCaptor.capture());
assertEquals(CONTAINER_NAME, containerNameCaptor.getValue());
assertEquals(OBJECT_NAME, objectNameCaptor.getValue());
assertEquals(payload, payloadArgumentCaptor.getValue());
assertEquals(ETAG, msg.getBody(String.class));
}
@Test
public void getTest() throws Exception {
when(objectService.get(CONTAINER_NAME, OBJECT_NAME)).thenReturn(mockOsObject);
when(endpoint.getOperation()).thenReturn(OpenstackConstants.GET);
msg.setHeader(SwiftConstants.CONTAINER_NAME, CONTAINER_NAME);
msg.setHeader(SwiftConstants.OBJECT_NAME, OBJECT_NAME);
producer.process(exchange);
assertEquals(ETAG, msg.getBody(SwiftObject.class).getETag());
}
@Test
public void getAllFromContainerTest() throws Exception {
List<SwiftObject> objectsList = new ArrayList<>();
objectsList.add(mockOsObject);
doReturn(objectsList).when(objectService).list(CONTAINER_NAME);
when(endpoint.getOperation()).thenReturn(OpenstackConstants.GET_ALL);
msg.setHeader(SwiftConstants.CONTAINER_NAME, CONTAINER_NAME);
producer.process(exchange);
assertEquals(mockOsObject, msg.getBody(List.class).get(0));
}
@Test
public void deleteObjectTest() throws Exception {
when(objectService.delete(anyString(), anyString())).thenReturn(ActionResponse.actionSuccess());
msg.setHeader(OpenstackConstants.OPERATION, OpenstackConstants.DELETE);
msg.setHeader(SwiftConstants.CONTAINER_NAME, CONTAINER_NAME);
msg.setHeader(SwiftConstants.OBJECT_NAME, OBJECT_NAME);
producer.process(exchange);
verify(objectService).delete(containerNameCaptor.capture(), objectNameCaptor.capture());
assertEquals(CONTAINER_NAME, containerNameCaptor.getValue());
assertEquals(OBJECT_NAME, objectNameCaptor.getValue());
}
@Test
public void updateMetadataTest() throws Exception {
final Map<String, String> md = new HashMap<>();
md.put("key", "val");
msg.setHeader(OpenstackConstants.OPERATION, SwiftConstants.CREATE_UPDATE_METADATA);
msg.setHeader(SwiftConstants.CONTAINER_NAME, CONTAINER_NAME);
msg.setHeader(SwiftConstants.OBJECT_NAME, OBJECT_NAME);
msg.setBody(md);
producer.process(exchange);
verify(objectService).updateMetadata(locationCaptor.capture(), dataCaptor.capture());
ObjectLocation location = locationCaptor.getValue();
assertEquals(CONTAINER_NAME, location.getContainerName());
assertEquals(OBJECT_NAME, location.getObjectName());
assertEquals(md, dataCaptor.getValue());
}
@Test
public void getMetadataTest() throws Exception {
final Map<String, String> md = new HashMap<>();
md.put("key", "val");
when(objectService.getMetadata(CONTAINER_NAME, OBJECT_NAME)).thenReturn(md);
msg.setHeader(OpenstackConstants.OPERATION, SwiftConstants.GET_METADATA);
msg.setHeader(SwiftConstants.CONTAINER_NAME, CONTAINER_NAME);
msg.setHeader(SwiftConstants.OBJECT_NAME, OBJECT_NAME);
producer.process(exchange);
assertEquals(md, msg.getBody(Map.class));
}
private Payload<File> getTmpPayload() throws IOException {
return Payloads.create(File.createTempFile("payloadPreffix", ".txt"));
}
}
|
ObjectProducerTest
|
java
|
apache__camel
|
core/camel-core-model/src/main/java/org/apache/camel/model/BeanFactoryDefinition.java
|
{
"start": 11737,
"end": 17515
}
|
class ____ use for creating and configuring the bean. The builder will use
* the properties values to configure the bean.
*/
public BeanFactoryDefinition<P> builderClass(String builderClass) {
setBuilderClass(builderClass);
return this;
}
/**
* Name of method when using builder class. This method is invoked after configuring to create the actual bean. This
* method is often named build (used by default).
*/
public BeanFactoryDefinition<P> builderMethod(String builderMethod) {
setBuilderMethod(builderMethod);
return this;
}
/**
* Calls a groovy script for creating the local bean
*
* If the script use the prefix <tt>resource:</tt> such as <tt>resource:classpath:com/foo/myscript.groovy</tt>,
* <tt>resource:file:/var/myscript.groovy</tt>, then its loaded from the external resource.
*
* @param script the script
*/
public P groovy(String script) {
setScriptLanguage("groovy");
setScript(script);
return parent;
}
/**
* Calls joor script (Java source that is runtime compiled to Java bytecode) for creating the local bean
*
* If the script use the prefix <tt>resource:</tt> such as <tt>resource:classpath:com/foo/myscript.groovy</tt>,
* <tt>resource:file:/var/myscript.groovy</tt>, then its loaded from the external resource.
*
* @param script the script
*/
public P joor(String script) {
setScriptLanguage("joor");
setScript(script);
return parent;
}
/**
* Calls java (Java source that is runtime compiled to Java bytecode) for creating the local bean
*
* If the script use the prefix <tt>resource:</tt> such as <tt>resource:classpath:com/foo/myscript.groovy</tt>,
* <tt>resource:file:/var/myscript.groovy</tt>, then its loaded from the external resource.
*
* @param script the script
*/
public P java(String script) {
return joor(script);
}
/**
* Calls a custom language for creating the local bean
*
* If the script use the prefix <tt>resource:</tt> such as <tt>resource:classpath:com/foo/myscript.groovy</tt>,
* <tt>resource:file:/var/myscript.groovy</tt>, then its loaded from the external resource.
*
* @param language the language
* @param script the script
*/
public P language(String language, String script) {
setScriptLanguage(language);
setScript(script);
return parent;
}
/**
* Calls a MvEL script for creating the local bean
*
* If the script use the prefix <tt>resource:</tt> such as <tt>resource:classpath:com/foo/myscript.groovy</tt>,
* <tt>resource:file:/var/myscript.groovy</tt>, then its loaded from the external resource.
*
* @param script the script
*/
public P mvel(String script) {
setScriptLanguage("mvel");
setScript(script);
return parent;
}
/**
* Calls a OGNL script for creating the local bean
*
* If the script use the prefix <tt>resource:</tt> such as <tt>resource:classpath:com/foo/myscript.groovy</tt>,
* <tt>resource:file:/var/myscript.groovy</tt>, then its loaded from the external resource.
*
* @param script the script
*/
public P ognl(String script) {
setScriptLanguage("ognl");
setScript(script);
return parent;
}
/**
* Sets a constructor for creating the bean. Arguments correspond to specific index of the constructor argument
* list, starting from zero.
*
* @param index the constructor index (starting from zero)
* @param value the constructor value
*/
public BeanFactoryDefinition<P> constructor(Integer index, String value) {
if (constructors == null) {
constructors = new LinkedHashMap<>();
}
constructors.put(index, value);
return this;
}
/**
* Optional constructor arguments for creating the bean. Arguments correspond to specific index of the constructor
* argument list, starting from zero.
*/
public BeanFactoryDefinition<P> constructors(Map<Integer, Object> constructors) {
this.constructors = constructors;
return this;
}
/**
* Sets a property to set on the created local bean
*
* @param key the property name
* @param value the property value
*/
public BeanFactoryDefinition<P> property(String key, String value) {
if (properties == null) {
properties = new LinkedHashMap<>();
}
properties.put(key, value);
return this;
}
/**
* Sets properties to set on the created local bean
*/
public BeanFactoryDefinition<P> properties(Map<String, Object> properties) {
this.properties = properties;
return this;
}
/**
* Whether the script should support using Camel property placeholder syntax {{ }}.
*/
public BeanFactoryDefinition<P> scriptPropertyPlaceholders(boolean scriptPropertyPlaceholders) {
this.scriptPropertyPlaceholders = scriptPropertyPlaceholders ? "true" : "false";
return this;
}
/**
* Whether the script should support using Camel property placeholder syntax {{ }}.
*/
public BeanFactoryDefinition<P> scriptPropertyPlaceholders(String scriptPropertyPlaceholders) {
this.scriptPropertyPlaceholders = scriptPropertyPlaceholders;
return this;
}
public P end() {
return parent;
}
@Override
public Resource getResource() {
return resource;
}
@Override
public void setResource(Resource resource) {
this.resource = resource;
}
}
|
to
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/FieldCanBeFinalTest.java
|
{
"start": 1522,
"end": 1960
}
|
class ____ {
// BUG: Diagnostic contains: private final int x
private int x;
Test() {
x = 42;
}
}
""")
.doTest();
}
@Test
public void keepAnnotatedFields_ignored() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import com.google.errorprone.annotations.Keep;
|
Test
|
java
|
quarkusio__quarkus
|
extensions/oidc/runtime/src/test/java/io/quarkus/oidc/runtime/UserInfoTest.java
|
{
"start": 417,
"end": 3097
}
|
class ____ {
UserInfo userInfo = new UserInfo(
"{"
+ "\"sub\": \"alice123456\","
+ "\"name\": \"alice\","
+ "\"first_name\": \"Alice\","
+ "\"family_name\": \"Brown\","
+ "\"preferred_username\": \"Alice Alice\","
+ "\"display_name\": \"Alice Brown\","
+ "\"email\": \"alice@email.com\","
+ "\"admin\": true,"
+ "\"custom\": null,"
+ "\"id\": 1234,"
+ "\"permissions\": [\"read\", \"write\"],"
+ "\"scopes\": {\"scope\": \"see\"}"
+ "}");
@Test
public void testGetName() {
assertEquals("alice", userInfo.getName());
}
@Test
public void testGetFirstName() {
assertEquals("Alice", userInfo.getFirstName());
}
@Test
public void testGetFamilyName() {
assertEquals("Brown", userInfo.getFamilyName());
}
@Test
public void testPreferredName() {
assertEquals("Alice Alice", userInfo.getPreferredUserName());
}
@Test
public void testDisplayName() {
assertEquals("Alice Brown", userInfo.getDisplayName());
}
@Test
public void testGetEmail() {
assertEquals("alice@email.com", userInfo.getEmail());
}
@Test
public void testGetSubject() {
assertEquals("alice123456", userInfo.getSubject());
}
@Test
public void testGetString() {
assertEquals("alice", userInfo.getString("name"));
assertNull(userInfo.getString("names"));
}
@Test
public void testGetBoolean() {
assertTrue(userInfo.getBoolean("admin"));
assertNull(userInfo.getBoolean("admins"));
}
@Test
public void testGetLong() {
assertEquals(1234, userInfo.getLong("id"));
assertNull(userInfo.getLong("ids"));
}
@Test
public void testGetArray() {
JsonArray array = userInfo.getArray("permissions");
assertNotNull(array);
assertEquals(2, array.size());
assertEquals("read", array.getString(0));
assertEquals("write", array.getString(1));
assertNull(userInfo.getArray("permit"));
}
@Test
public void testGetObject() {
JsonObject map = userInfo.getObject("scopes");
assertNotNull(map);
assertEquals(1, map.size());
assertEquals("see", map.getString("scope"));
assertNull(userInfo.getObject("scope"));
}
@Test
public void testGetNullProperty() {
assertNull(userInfo.getString("custom"));
}
}
|
UserInfoTest
|
java
|
apache__avro
|
lang/java/avro/src/main/java/org/apache/avro/SchemaCompatibility.java
|
{
"start": 1635,
"end": 5221
}
|
class ____ be instantiated. */
private SchemaCompatibility() {
}
/** Message to annotate reader/writer schema pairs that are compatible. */
public static final String READER_WRITER_COMPATIBLE_MESSAGE = "Reader schema can always successfully decode data written using the writer schema.";
/**
* Validates that the provided reader schema can be used to decode avro data
* written with the provided writer schema.
*
* @param reader schema to check.
* @param writer schema to check.
* @return a result object identifying any compatibility errors.
*/
public static SchemaPairCompatibility checkReaderWriterCompatibility(final Schema reader, final Schema writer) {
final SchemaCompatibilityResult compatibility = new ReaderWriterCompatibilityChecker().getCompatibility(reader,
writer);
final String message;
switch (compatibility.getCompatibility()) {
case INCOMPATIBLE: {
message = String.format(
"Data encoded using writer schema:%n%s%n" + "will or may fail to decode using reader schema:%n%s%n",
writer.toString(true), reader.toString(true));
break;
}
case COMPATIBLE: {
message = READER_WRITER_COMPATIBLE_MESSAGE;
break;
}
default:
throw new AvroRuntimeException("Unknown compatibility: " + compatibility);
}
return new SchemaPairCompatibility(compatibility, reader, writer, message);
}
// -----------------------------------------------------------------------------------------------
/**
* Tests the equality of two Avro named schemas.
*
* <p>
* Matching includes reader name aliases.
* </p>
*
* @param reader Named reader schema.
* @param writer Named writer schema.
* @return whether the names of the named schemas match or not.
*/
public static boolean schemaNameEquals(final Schema reader, final Schema writer) {
if (objectsEqual(reader.getName(), writer.getName())) {
return true;
}
// Apply reader aliases:
return reader.getAliases().contains(writer.getFullName());
}
/**
* Identifies the writer field that corresponds to the specified reader field.
*
* <p>
* Matching includes reader name aliases.
* </p>
*
* @param writerSchema Schema of the record where to look for the writer field.
* @param readerField Reader field to identify the corresponding writer field
* of.
* @return the writer field, if any does correspond, or None.
*/
public static Field lookupWriterField(final Schema writerSchema, final Field readerField) {
assert (writerSchema.getType() == Type.RECORD);
final List<Field> writerFields = new ArrayList<>();
final Field direct = writerSchema.getField(readerField.name());
if (direct != null) {
writerFields.add(direct);
}
for (final String readerFieldAliasName : readerField.aliases()) {
final Field writerField = writerSchema.getField(readerFieldAliasName);
if (writerField != null) {
writerFields.add(writerField);
}
}
switch (writerFields.size()) {
case 0:
return null;
case 1:
return writerFields.get(0);
default: {
throw new AvroRuntimeException(String.format(
"Reader record field %s matches multiple fields in writer record schema %s", readerField, writerSchema));
}
}
}
/**
* Reader/writer schema pair that can be used as a key in a hash map.
*
* This reader/writer pair differentiates Schema objects based on their system
* hash code.
*/
private static final
|
cannot
|
java
|
google__error-prone
|
test_helpers/src/main/java/com/google/errorprone/BugCheckerRefactoringTestHelper.java
|
{
"start": 13610,
"end": 14875
}
|
class ____ {
private final JavaFileObject input;
private ExpectOutput(JavaFileObject input) {
this.input = input;
}
@CanIgnoreReturnValue
public BugCheckerRefactoringTestHelper addOutputLines(String path, String... output) {
return addInputAndOutput(input, forSourceLines(path, output));
}
/**
* Adds an output file.
*
* @deprecated prefer {@link #addOutputLines}. Declaring tests in the same file using text
* blocks is more readable, as it encourages writing small, focussed tests.
*/
@Deprecated
@CanIgnoreReturnValue
public BugCheckerRefactoringTestHelper addOutput(String outputFilename) {
return addInputAndOutput(input, forResource(clazz, outputFilename));
}
@CanIgnoreReturnValue
public BugCheckerRefactoringTestHelper expectUnchanged() {
return addInputAndOutput(input, input);
}
}
private static void closeCompiler(Context context) {
JavaCompiler compiler = context.get(JavaCompiler.compilerKey);
if (compiler != null) {
compiler.close();
}
}
/**
* Wraps a {@code InstanceReturningScannerSupplier}, but silently skips {@link #applyOverrides}
* instead of throwing {@code UOE}.
*/
private static
|
ExpectOutput
|
java
|
apache__thrift
|
lib/java/src/main/java/org/apache/thrift/TMultiplexedProcessor.java
|
{
"start": 5883,
"end": 6228
}
|
class ____ extends TProtocolDecorator {
TMessage messageBegin;
public StoredMessageProtocol(TProtocol protocol, TMessage messageBegin) {
super(protocol);
this.messageBegin = messageBegin;
}
@Override
public TMessage readMessageBegin() throws TException {
return messageBegin;
}
}
}
|
StoredMessageProtocol
|
java
|
google__guava
|
android/guava/src/com/google/common/collect/MoreCollectors.java
|
{
"start": 1344,
"end": 3636
}
|
class ____ {
/*
* TODO(lowasser): figure out if we can convert this to a concurrent AtomicReference-based
* collector without breaking j2cl?
*/
private static final Collector<Object, ?, Optional<Object>> TO_OPTIONAL =
Collector.of(
ToOptionalState::new,
ToOptionalState::add,
ToOptionalState::combine,
ToOptionalState::getOptional,
Collector.Characteristics.UNORDERED);
/**
* A collector that converts a stream of zero or one elements to an {@code Optional}.
*
* @throws IllegalArgumentException if the stream consists of two or more elements.
* @throws NullPointerException if any element in the stream is {@code null}.
* @return {@code Optional.of(onlyElement)} if the stream has exactly one element (must not be
* {@code null}) and returns {@code Optional.empty()} if it has none.
*/
@SuppressWarnings("unchecked")
public static <T> Collector<T, ?, Optional<T>> toOptional() {
return (Collector) TO_OPTIONAL;
}
private static final Object NULL_PLACEHOLDER = new Object();
private static final Collector<@Nullable Object, ?, @Nullable Object> ONLY_ELEMENT =
Collector.<@Nullable Object, ToOptionalState, @Nullable Object>of(
ToOptionalState::new,
(state, o) -> state.add((o == null) ? NULL_PLACEHOLDER : o),
ToOptionalState::combine,
state -> {
Object result = state.getElement();
return (result == NULL_PLACEHOLDER) ? null : result;
},
Collector.Characteristics.UNORDERED);
/**
* A collector that takes a stream containing exactly one element and returns that element. The
* returned collector throws an {@code IllegalArgumentException} if the stream consists of two or
* more elements, and a {@code NoSuchElementException} if the stream is empty.
*/
@SuppressWarnings("unchecked")
public static <T extends @Nullable Object> Collector<T, ?, T> onlyElement() {
return (Collector) ONLY_ELEMENT;
}
/**
* This atrocity is here to let us report several of the elements in the stream if there were more
* than one, not just two.
*/
@SuppressWarnings("EmptyList") // ImmutableList doesn't support nullable element types
private static final
|
MoreCollectors
|
java
|
hibernate__hibernate-orm
|
hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/modifiedflags/HasChangedStringMap.java
|
{
"start": 1095,
"end": 3879
}
|
class ____ extends AbstractModifiedFlagsEntityTest {
private Integer sme1_id;
private Integer sme2_id;
@BeforeClassTemplate
public void initData(EntityManagerFactoryScope scope) {
StringMapEntity sme1 = new StringMapEntity();
StringMapEntity sme2 = new StringMapEntity();
// Revision 1 (sme1: initialy empty, sme2: initialy 1 mapping)
scope.inEntityManager( em -> {
em.getTransaction().begin();
sme2.getStrings().put( "1", "a" );
em.persist( sme1 );
em.persist( sme2 );
sme1_id = sme1.getId();
sme2_id = sme2.getId();
em.getTransaction().commit();
} );
// Revision 2 (sme1: adding 2 mappings, sme2: no changes)
scope.inEntityManager( em -> {
em.getTransaction().begin();
StringMapEntity sme1Loaded = em.find( StringMapEntity.class, sme1_id );
sme1Loaded.getStrings().put( "1", "a" );
sme1Loaded.getStrings().put( "2", "b" );
em.getTransaction().commit();
} );
// Revision 3 (sme1: removing an existing mapping, sme2: replacing a value)
scope.inEntityManager( em -> {
em.getTransaction().begin();
StringMapEntity sme1Loaded = em.find( StringMapEntity.class, sme1_id );
StringMapEntity sme2Loaded = em.find( StringMapEntity.class, sme2_id );
sme1Loaded.getStrings().remove( "1" );
sme2Loaded.getStrings().put( "1", "b" );
em.getTransaction().commit();
} );
// No revision (sme1: removing a non-existing mapping, sme2: replacing with the same value)
scope.inEntityManager( em -> {
em.getTransaction().begin();
StringMapEntity sme1Loaded = em.find( StringMapEntity.class, sme1_id );
StringMapEntity sme2Loaded = em.find( StringMapEntity.class, sme2_id );
sme1Loaded.getStrings().remove( "3" );
sme2Loaded.getStrings().put( "1", "b" );
em.getTransaction().commit();
} );
}
@Test
public void testHasChanged(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
final var auditReader = AuditReaderFactory.get( em );
List list = queryForPropertyHasChanged(
auditReader,
StringMapEntity.class, sme1_id,
"strings"
);
assertEquals( 3, list.size() );
assertEquals( makeList( 1, 2, 3 ), extractRevisionNumbers( list ) );
list = queryForPropertyHasChanged(
auditReader,
StringMapEntity.class, sme2_id,
"strings"
);
assertEquals( 2, list.size() );
assertEquals( makeList( 1, 3 ), extractRevisionNumbers( list ) );
list = queryForPropertyHasNotChanged(
auditReader,
StringMapEntity.class, sme1_id,
"strings"
);
assertEquals( 0, list.size() );
list = queryForPropertyHasNotChanged(
auditReader,
StringMapEntity.class, sme2_id,
"strings"
);
assertEquals( 0, list.size() ); // in rev 2 there was no version generated for sme2_id
} );
}
}
|
HasChangedStringMap
|
java
|
alibaba__nacos
|
core/src/main/java/com/alibaba/nacos/core/remote/RpcAckCallbackSynchronizer.java
|
{
"start": 1185,
"end": 5301
}
|
class ____ {
@SuppressWarnings("checkstyle:linelength")
public static final Map<String, Map<String, DefaultRequestFuture>> CALLBACK_CONTEXT = new ConcurrentLinkedHashMap.Builder<String, Map<String, DefaultRequestFuture>>()
.maximumWeightedCapacity(1000000)
.listener((s, pushCallBack) -> pushCallBack.entrySet().forEach(
stringDefaultPushFutureEntry -> stringDefaultPushFutureEntry.getValue().setFailResult(new TimeoutException()))).build();
/**
* notify ack.
*
* @param connectionId connectionId
* @param response response
*/
public static void ackNotify(String connectionId, Response response) {
Map<String, DefaultRequestFuture> stringDefaultPushFutureMap = CALLBACK_CONTEXT.get(connectionId);
if (stringDefaultPushFutureMap == null) {
Loggers.REMOTE_DIGEST
.warn("Ack receive on a outdated connection ,connection id={},requestId={} ", connectionId,
response.getRequestId());
return;
}
DefaultRequestFuture currentCallback = stringDefaultPushFutureMap.remove(response.getRequestId());
if (currentCallback == null) {
Loggers.REMOTE_DIGEST
.warn("Ack receive on a outdated request ,connection id={},requestId={} ", connectionId,
response.getRequestId());
return;
}
if (response.isSuccess()) {
currentCallback.setResponse(response);
} else {
currentCallback.setFailResult(new NacosException(response.getErrorCode(), response.getMessage()));
}
}
/**
* sync callback.
*
* @param connectionId connectionId
* @param requestId requestId
* @param defaultPushFuture defaultPushFuture
* @throws NacosException NacosException
*/
public static void syncCallback(String connectionId, String requestId, DefaultRequestFuture defaultPushFuture)
throws NacosException {
Map<String, DefaultRequestFuture> stringDefaultPushFutureMap = initContextIfNecessary(connectionId);
if (!stringDefaultPushFutureMap.containsKey(requestId)) {
DefaultRequestFuture pushCallBackPrev = stringDefaultPushFutureMap
.putIfAbsent(requestId, defaultPushFuture);
if (pushCallBackPrev == null) {
return;
}
}
throw new NacosException(NacosException.INVALID_PARAM, "request id conflict");
}
/**
* clear context of connectionId.
*
* @param connectionId connectionId
*/
public static void clearContext(String connectionId) {
CALLBACK_CONTEXT.remove(connectionId);
}
/**
* init context of connectionId if necessary.
*
* @param connectionId connectionId
*/
public static Map<String, DefaultRequestFuture> initContextIfNecessary(String connectionId) {
if (!CALLBACK_CONTEXT.containsKey(connectionId)) {
Map<String, DefaultRequestFuture> context = new HashMap<>(128);
Map<String, DefaultRequestFuture> stringDefaultRequestFutureMap = CALLBACK_CONTEXT
.putIfAbsent(connectionId, context);
return stringDefaultRequestFutureMap == null ? context : stringDefaultRequestFutureMap;
} else {
return CALLBACK_CONTEXT.get(connectionId);
}
}
/**
* clear context of requestId.
*
* @param connectionId connectionId
* @param requestId requestId
*/
public static void clearFuture(String connectionId, String requestId) {
Map<String, DefaultRequestFuture> stringDefaultPushFutureMap = CALLBACK_CONTEXT.get(connectionId);
if (stringDefaultPushFutureMap == null || !stringDefaultPushFutureMap.containsKey(requestId)) {
return;
}
stringDefaultPushFutureMap.remove(requestId);
}
}
|
RpcAckCallbackSynchronizer
|
java
|
elastic__elasticsearch
|
libs/lz4/src/test/java/org/elasticsearch/lz4/AbstractLZ4TestCase.java
|
{
"start": 2654,
"end": 4241
}
|
class ____ implements TesterBase<ByteBuffer> {
@Override
public ByteBuffer allocate(int length) {
ByteBuffer bb;
int slice = randomInt(5);
// Modified to only test heap ByteBuffers
bb = ByteBuffer.allocate(length + slice);
bb.position(slice);
bb = bb.slice();
if (randomBoolean()) {
bb.order(ByteOrder.LITTLE_ENDIAN);
} else {
bb.order(ByteOrder.BIG_ENDIAN);
}
return bb;
}
@Override
public ByteBuffer copyOf(byte[] array) {
ByteBuffer bb = allocate(array.length).put(array);
// Modified to not test read only buffers as they do not make the array accessible
bb.position(0);
return bb;
}
@Override
public byte[] copyOf(ByteBuffer data, int off, int len) {
byte[] copy = new byte[len];
data.position(off);
data.get(copy);
return copy;
}
@Override
public int maxCompressedLength(int len) {
return LZ4Utils.maxCompressedLength(len);
}
@Override
public void fill(ByteBuffer instance, byte b) {
for (int i = 0; i < instance.capacity(); ++i) {
instance.put(i, b);
}
}
}
}
public
|
ByteBufferTesterBase
|
java
|
quarkusio__quarkus
|
extensions/websockets-next/deployment/src/test/java/io/quarkus/websockets/next/test/client/UnhandledOpenFailureCloseStrategyTest.java
|
{
"start": 639,
"end": 1702
}
|
class ____ {
@RegisterExtension
public static final QuarkusUnitTest test = new QuarkusUnitTest()
.withApplicationRoot(root -> {
root.addClasses(ServerEndpoint.class, ClientOpenErrorEndpoint.class);
}).overrideConfigKey("quarkus.websockets-next.client.unhandled-failure-strategy", "close");
@Inject
WebSocketConnector<ClientOpenErrorEndpoint> connector;
@TestHTTPResource("/")
URI testUri;
@Test
void testError() throws InterruptedException {
WebSocketClientConnection connection = connector
.baseUri(testUri)
.connectAndAwait();
assertTrue(ServerEndpoint.CLOSED_LATCH.await(5, TimeUnit.SECONDS));
assertTrue(ClientOpenErrorEndpoint.CLOSED_LATCH.await(5, TimeUnit.SECONDS));
assertTrue(connection.isClosed());
assertEquals(WebSocketCloseStatus.INVALID_MESSAGE_TYPE.code(), connection.closeReason().getCode());
assertTrue(ClientOpenErrorEndpoint.MESSAGES.isEmpty());
}
}
|
UnhandledOpenFailureCloseStrategyTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/FieldAndDocumentLevelSecurityRequestInterceptor.java
|
{
"start": 1430,
"end": 1578
}
|
class ____ interceptors that disables features when field level security is configured for indices a request
* is going to execute on.
*/
abstract
|
for
|
java
|
google__guava
|
guava/src/com/google/common/util/concurrent/AbstractExecutionThreadService.java
|
{
"start": 1571,
"end": 7683
}
|
class ____ implements Service {
/* use AbstractService for state management */
private final Service delegate =
new AbstractService() {
@Override
protected final void doStart() {
Executor executor = renamingDecorator(executor(), () -> serviceName());
executor.execute(
() -> {
try {
startUp();
notifyStarted();
// If stopAsync() is called while starting we may be in the STOPPING state in
// which case we should skip right down to shutdown.
if (isRunning()) {
try {
AbstractExecutionThreadService.this.run();
} catch (Throwable t) {
restoreInterruptIfIsInterruptedException(t);
try {
shutDown();
} catch (Exception ignored) {
restoreInterruptIfIsInterruptedException(ignored);
t.addSuppressed(ignored);
}
notifyFailed(t);
return;
}
}
shutDown();
notifyStopped();
} catch (Throwable t) {
restoreInterruptIfIsInterruptedException(t);
notifyFailed(t);
}
});
}
@Override
protected void doStop() {
triggerShutdown();
}
@Override
public String toString() {
return AbstractExecutionThreadService.this.toString();
}
};
/** Constructor for use by subclasses. */
protected AbstractExecutionThreadService() {}
/**
* Start the service. This method is invoked on the execution thread.
*
* <p>By default this method does nothing.
*/
protected void startUp() throws Exception {}
/**
* Run the service. This method is invoked on the execution thread. Implementations must respond
* to stop requests. You could poll for lifecycle changes in a work loop:
*
* <pre>
* public void run() {
* while ({@link #isRunning()}) {
* // perform a unit of work
* }
* }
* </pre>
*
* <p>...or you could respond to stop requests by implementing {@link #triggerShutdown()}, which
* should cause {@link #run()} to return.
*/
protected abstract void run() throws Exception;
/**
* Stop the service. This method is invoked on the execution thread.
*
* <p>By default this method does nothing.
*/
// TODO: consider supporting a TearDownTestCase-like API
protected void shutDown() throws Exception {}
/**
* Invoked to request the service to stop.
*
* <p>By default this method does nothing.
*
* <p>Currently, this method is invoked while holding a lock. If an implementation of this method
* blocks, it can prevent this service from changing state. If you need to performing a blocking
* operation in order to trigger shutdown, consider instead registering a listener and
* implementing {@code stopping}. Note, however, that {@code stopping} does not run at exactly the
* same times as {@code triggerShutdown}.
*/
protected void triggerShutdown() {}
/**
* Returns the {@link Executor} that will be used to run this service. Subclasses may override
* this method to use a custom {@link Executor}, which may configure its worker thread with a
* specific name, thread group or priority. The returned executor's {@link
* Executor#execute(Runnable) execute()} method is called when this service is started, and should
* return promptly.
*
* <p>The default implementation returns a new {@link Executor} that sets the name of its threads
* to the string returned by {@link #serviceName}
*/
protected Executor executor() {
return command -> newThread(serviceName(), command).start();
}
@Override
public String toString() {
return serviceName() + " [" + state() + "]";
}
@Override
public final boolean isRunning() {
return delegate.isRunning();
}
@Override
public final State state() {
return delegate.state();
}
/**
* @since 13.0
*/
@Override
public final void addListener(Listener listener, Executor executor) {
delegate.addListener(listener, executor);
}
/**
* @since 14.0
*/
@Override
public final Throwable failureCause() {
return delegate.failureCause();
}
/**
* @since 15.0
*/
@CanIgnoreReturnValue
@Override
public final Service startAsync() {
delegate.startAsync();
return this;
}
/**
* @since 15.0
*/
@CanIgnoreReturnValue
@Override
public final Service stopAsync() {
delegate.stopAsync();
return this;
}
/**
* @since 15.0
*/
@Override
public final void awaitRunning() {
delegate.awaitRunning();
}
/**
* @since 28.0
*/
@Override
public final void awaitRunning(Duration timeout) throws TimeoutException {
Service.super.awaitRunning(timeout);
}
/**
* @since 15.0
*/
@Override
public final void awaitRunning(long timeout, TimeUnit unit) throws TimeoutException {
delegate.awaitRunning(timeout, unit);
}
/**
* @since 15.0
*/
@Override
public final void awaitTerminated() {
delegate.awaitTerminated();
}
/**
* @since 28.0
*/
@Override
public final void awaitTerminated(Duration timeout) throws TimeoutException {
Service.super.awaitTerminated(timeout);
}
/**
* @since 15.0
*/
@Override
public final void awaitTerminated(long timeout, TimeUnit unit) throws TimeoutException {
delegate.awaitTerminated(timeout, unit);
}
/**
* Returns the name of this service. {@link AbstractExecutionThreadService} may include the name
* in debugging output.
*
* <p>Subclasses may override this method.
*
* @since 14.0 (present in 10.0 as getServiceName)
*/
protected String serviceName() {
return getClass().getSimpleName();
}
}
|
AbstractExecutionThreadService
|
java
|
redisson__redisson
|
redisson-quarkus/redisson-quarkus-30/cache/integration-tests/src/test/java/org/redisson/quarkus/client/it/QuarkusRedissonClientResourceTest.java
|
{
"start": 518,
"end": 1002
}
|
class ____ {
@Container
public static final GenericContainer REDIS = new FixedHostPortGenericContainer("redis:latest")
.withFixedExposedPort(6379, 6379);
@Test
public void testCacheResult() {
given()
.when().get("/quarkus-redisson-client/cacheResult")
.then()
.statusCode(200)
.body(is("true"));
}
}
|
QuarkusRedissonClientResourceTest
|
java
|
apache__kafka
|
streams/src/main/java/org/apache/kafka/streams/processor/internals/assignment/DefaultKafkaStreamsState.java
|
{
"start": 1628,
"end": 6698
}
|
class ____ implements KafkaStreamsState {
private static final Logger LOG = LoggerFactory.getLogger(DefaultKafkaStreamsState.class);
private final ProcessId processId;
private final int numProcessingThreads;
private final Map<String, String> clientTags;
private final SortedSet<TaskId> previousActiveTasks;
private final SortedSet<TaskId> previousStandbyTasks;
private final SortedMap<String, Set<TaskId>> taskIdsByConsumer;
private final Optional<HostInfo> hostInfo;
private final Optional<Map<TaskId, Long>> taskLagTotals; // contains lag for all stateful tasks in the app topology
private final Optional<String> rackId;
public DefaultKafkaStreamsState(final ProcessId processId,
final int numProcessingThreads,
final Map<String, String> clientTags,
final SortedSet<TaskId> previousActiveTasks,
final SortedSet<TaskId> previousStandbyTasks,
final SortedMap<String, Set<TaskId>> taskIdsByConsumer,
final Optional<HostInfo> hostInfo,
final Optional<Map<TaskId, Long>> taskLagTotals,
final Optional<String> rackId) {
this.processId = processId;
this.numProcessingThreads = numProcessingThreads;
this.clientTags = unmodifiableMap(clientTags);
this.previousActiveTasks = unmodifiableSortedSet(previousActiveTasks);
this.previousStandbyTasks = unmodifiableSortedSet(previousStandbyTasks);
this.taskIdsByConsumer = unmodifiableSortedMap(taskIdsByConsumer);
this.hostInfo = hostInfo;
this.taskLagTotals = taskLagTotals;
this.rackId = rackId;
}
@Override
public ProcessId processId() {
return processId;
}
@Override
public int numProcessingThreads() {
return numProcessingThreads;
}
@Override
public SortedSet<String> consumerClientIds() {
return new TreeSet<>(taskIdsByConsumer.keySet());
}
@Override
public SortedSet<TaskId> previousActiveTasks() {
return previousActiveTasks;
}
@Override
public SortedSet<TaskId> previousStandbyTasks() {
return previousStandbyTasks;
}
@Override
public long lagFor(final TaskId task) {
if (taskLagTotals.isEmpty()) {
LOG.error("lagFor was called on a KafkaStreamsState {} that does not support lag computations.", processId);
throw new UnsupportedOperationException("Lag computation was not requested for KafkaStreamsState with process " + processId);
}
final Long totalLag = taskLagTotals.get().get(task);
if (totalLag == null) {
LOG.error("Task lag lookup failed: {} not in {}", task,
Arrays.toString(taskLagTotals.get().keySet().toArray()));
throw new IllegalStateException("Tried to lookup lag for unknown task " + task);
}
return totalLag;
}
@Override
public SortedSet<TaskId> prevTasksByLag(final String consumerClientId) {
if (taskLagTotals.isEmpty()) {
LOG.error("prevTasksByLag was called on a KafkaStreamsState {} that does not support lag computations.", processId);
throw new UnsupportedOperationException("Lag computation was not requested for KafkaStreamsState with process " + processId);
}
final SortedSet<TaskId> prevTasksByLag =
new TreeSet<>(comparingLong(this::lagFor).thenComparing(TaskId::compareTo));
final Set<TaskId> prevOwnedStatefulTasks = taskIdsByConsumer.containsKey(consumerClientId)
? taskIdsByConsumer.get(consumerClientId) : new HashSet<>();
for (final TaskId task : prevOwnedStatefulTasks) {
if (taskLagTotals.get().containsKey(task)) {
prevTasksByLag.add(task);
} else {
LOG.debug(
"Skipping previous task {} since it's not part of the current assignment",
task
);
}
}
return prevTasksByLag;
}
@Override
public Map<TaskId, Long> statefulTasksToLagSums() {
if (taskLagTotals.isEmpty()) {
LOG.error("statefulTasksToLagSums was called on a KafkaStreamsState {} that does not support lag computations.", processId);
throw new UnsupportedOperationException("Lag computation was not requested for KafkaStreamsState with process " + processId);
}
return taskLagTotals.get().keySet()
.stream()
.collect(Collectors.toMap(taskId -> taskId, this::lagFor));
}
@Override
public Optional<HostInfo> hostInfo() {
return hostInfo;
}
@Override
public Map<String, String> clientTags() {
return clientTags;
}
@Override
public Optional<String> rackId() {
return rackId;
}
}
|
DefaultKafkaStreamsState
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/assumptions/BDDAssumptionsTest.java
|
{
"start": 11398,
"end": 11805
}
|
class ____ {
private final double[] actual = { 1.0, 2.0 };
@Test
void should_run_test_when_assumption_passes() {
thenCode(() -> given(actual).contains(1.0)).doesNotThrowAnyException();
}
@Test
void should_ignore_test_when_assumption_fails() {
expectAssumptionNotMetException(() -> given(actual).contains(0.0f));
}
}
@Nested
|
BDDAssumptions_given_double_array_Test
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/scheduler/OpportunisticContainerAllocator.java
|
{
"start": 2547,
"end": 2654
}
|
class ____ {
private int maxAllocationsPerAMHeartbeat = -1;
/**
* This
|
OpportunisticContainerAllocator
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/cache/annotation/AnnotationCacheOperationSourceTests.java
|
{
"start": 14332,
"end": 14475
}
|
interface ____ {
@Cacheable
void interfaceCacheConfig();
@CachePut
void interfaceCacheableOverride();
}
private static
|
CacheConfigIfc
|
java
|
spring-projects__spring-boot
|
module/spring-boot-restclient-test/src/test/java/org/springframework/boot/restclient/test/autoconfigure/RestClientTestRestTemplateAndRestClientTogetherIntegrationTests.java
|
{
"start": 1629,
"end": 2886
}
|
class ____ {
@Autowired
private ExampleRestTemplateService restTemplateClient;
@Autowired
private ExampleRestClientService restClientClient;
@Autowired
private MockServerRestTemplateCustomizer templateCustomizer;
@Autowired
private MockServerRestClientCustomizer clientCustomizer;
@Autowired
private MockRestServiceServer server;
@Test
void serverShouldNotWork() {
assertThatIllegalStateException().isThrownBy(
() -> this.server.expect(requestTo(uri("/test"))).andRespond(withSuccess("hello", MediaType.TEXT_HTML)))
.withMessageContaining("Unable to use auto-configured");
}
@Test
void restTemplateClientRestCallViaCustomizer() {
this.templateCustomizer.getServer()
.expect(requestTo("/test"))
.andRespond(withSuccess("hello", MediaType.TEXT_HTML));
assertThat(this.restTemplateClient.test()).isEqualTo("hello");
}
@Test
void restClientClientRestCallViaCustomizer() {
this.clientCustomizer.getServer()
.expect(requestTo(uri("/test")))
.andRespond(withSuccess("there", MediaType.TEXT_HTML));
assertThat(this.restClientClient.test()).isEqualTo("there");
}
private static String uri(String path) {
return "https://example.com" + path;
}
}
|
RestClientTestRestTemplateAndRestClientTogetherIntegrationTests
|
java
|
elastic__elasticsearch
|
x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/AbstractSearchableSnapshotsRestTestCase.java
|
{
"start": 2452,
"end": 33294
}
|
class ____ extends ESRestTestCase {
private static final String WRITE_REPOSITORY_NAME = "repository";
private static final String READ_REPOSITORY_NAME = "read-repository";
private static final String SNAPSHOT_NAME = "searchable-snapshot";
protected abstract String writeRepositoryType();
protected abstract Settings writeRepositorySettings();
protected boolean useReadRepository() {
return false;
}
protected String readRepositoryType() {
return writeRepositoryType();
}
protected Settings readRepositorySettings() {
return writeRepositorySettings();
}
private void runSearchableSnapshotsTest(SearchableSnapshotsTestCaseBody testCaseBody) throws Exception {
runSearchableSnapshotsTest(testCaseBody, false);
}
private void runSearchableSnapshotsTest(SearchableSnapshotsTestCaseBody testCaseBody, boolean sourceOnly) throws Exception {
runSearchableSnapshotsTest(testCaseBody, sourceOnly, randomIntBetween(1, 500), null);
}
private void runSearchableSnapshotsTest(
final SearchableSnapshotsTestCaseBody testCaseBody,
final boolean sourceOnly,
final int numDocs,
@Nullable Settings indexSettings
) throws Exception {
final String repositoryType = writeRepositoryType();
Settings repositorySettings = writeRepositorySettings();
if (sourceOnly) {
repositorySettings = Settings.builder().put("delegate_type", repositoryType).put(repositorySettings).build();
}
logger.info("creating repository [{}] of type [{}]", WRITE_REPOSITORY_NAME, repositoryType);
registerRepository(WRITE_REPOSITORY_NAME, sourceOnly ? "source" : repositoryType, true, repositorySettings);
final String readRepository;
if (useReadRepository()) {
final String readRepositoryType = readRepositoryType();
Settings readRepositorySettings = readRepositorySettings();
if (sourceOnly) {
readRepositorySettings = Settings.builder().put("delegate_type", readRepositoryType).put(readRepositorySettings).build();
}
logger.info("creating read repository [{}] of type [{}]", READ_REPOSITORY_NAME, readRepositoryType);
registerRepository(READ_REPOSITORY_NAME, sourceOnly ? "source" : readRepositoryType, true, readRepositorySettings);
readRepository = READ_REPOSITORY_NAME;
} else {
readRepository = WRITE_REPOSITORY_NAME;
}
final String indexName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT);
logger.info("creating index [{}]", indexName);
createIndex(indexName, indexSettings != null ? indexSettings : indexSettings(randomIntBetween(1, 5), 0).build(), """
"properties": {
"field": {
"type": "integer"
},
"text": {
"type": "text",
"fields": {
"raw": {
"type": "keyword"
}
}
}
}
""");
ensureGreen(indexName);
logger.info("indexing [{}] documents", numDocs);
final int indexingThreads = 2;
final CountDownLatch indexingLatch = new CountDownLatch(indexingThreads);
final AtomicLong remainingDocs = new AtomicLong(numDocs);
for (int i = 0; i < indexingThreads; i++) {
var thread = new Thread(() -> {
try {
do {
final StringBuilder bulkBody = new StringBuilder();
int bulkSize = 0;
long n;
while ((n = remainingDocs.decrementAndGet()) >= 0) {
bulkBody.append(Strings.format("""
{"index": {"_id":"%d"} }
{"field": %d, "text": "Document number %d"}
""", n, n, n));
bulkSize += 1;
if (bulkSize >= 500) {
break;
}
}
if (bulkSize > 0) {
Request documents = new Request(HttpPost.METHOD_NAME, '/' + indexName + "/_bulk");
documents.addParameter("refresh", Boolean.TRUE.toString());
documents.setJsonEntity(bulkBody.toString());
assertOK(client().performRequest(documents));
}
} while (remainingDocs.get() > 0);
} catch (Exception e) {
throw new AssertionError(e);
} finally {
indexingLatch.countDown();
}
});
thread.start();
}
indexingLatch.await();
if (randomBoolean()) {
final StringBuilder bulkUpdateBody = new StringBuilder();
for (int i = 0; i < randomIntBetween(1, numDocs); i++) {
bulkUpdateBody.append("{\"update\":{\"_id\":\"").append(i).append("\"}}\n");
bulkUpdateBody.append("{\"doc\":{").append("\"text\":\"Updated document number ").append(i).append("\"}}\n");
}
final Request bulkUpdate = new Request(HttpPost.METHOD_NAME, '/' + indexName + "/_bulk");
bulkUpdate.addParameter("refresh", Boolean.TRUE.toString());
bulkUpdate.setJsonEntity(bulkUpdateBody.toString());
assertOK(client().performRequest(bulkUpdate));
}
logger.info("force merging index [{}]", indexName);
forceMerge(indexName, randomBoolean(), randomBoolean());
// Remove the snapshots, if a previous test failed to delete them. This is
// useful for third party tests that runs the test against a real external service.
deleteSnapshot(SNAPSHOT_NAME, true);
logger.info("creating snapshot [{}]", SNAPSHOT_NAME);
createSnapshot(WRITE_REPOSITORY_NAME, SNAPSHOT_NAME, true);
logger.info("deleting index [{}]", indexName);
deleteIndex(indexName);
final String restoredIndexName = randomBoolean() ? indexName : randomAlphaOfLength(10).toLowerCase(Locale.ROOT);
logger.info("restoring index [{}] from snapshot [{}] as [{}]", indexName, SNAPSHOT_NAME, restoredIndexName);
mountSnapshot(indexName, restoredIndexName, readRepository);
ensureGreen(restoredIndexName);
final Number count = count(restoredIndexName);
assertThat("Wrong index count for index " + restoredIndexName, count.intValue(), equalTo(numDocs));
testCaseBody.runTest(restoredIndexName, numDocs);
logger.info("deleting mounted index [{}]", indexName);
deleteIndex(restoredIndexName);
logger.info("deleting snapshot [{}]", SNAPSHOT_NAME);
deleteSnapshot(SNAPSHOT_NAME, false);
}
public void testSearchResults() throws Exception {
runSearchableSnapshotsTest((restoredIndexName, numDocs) -> {
for (int i = 0; i < 10; i++) {
assertSearchResults(restoredIndexName, numDocs, randomFrom(Boolean.TRUE, Boolean.FALSE, null));
}
});
}
public void testSourceOnlyRepository() throws Exception {
runSearchableSnapshotsTest((indexName, numDocs) -> {
for (int i = 0; i < 10; i++) {
if (randomBoolean()) {
logger.info("clearing searchable snapshots cache for [{}] before search", indexName);
clearCache(indexName);
}
Map<String, Object> searchResults = search(
indexName,
QueryBuilders.matchAllQuery(),
randomFrom(Boolean.TRUE, Boolean.FALSE, null)
);
assertThat(extractValue(searchResults, "hits.total.value"), equalTo(numDocs));
// takes a snapshot of the searchable snapshot index into the source-only repository should fail
String sourceOnlySnapshot = "source-only-snap-" + randomAlphaOfLength(10).toLowerCase(Locale.ROOT);
final Request request = new Request(HttpPut.METHOD_NAME, "_snapshot/" + WRITE_REPOSITORY_NAME + '/' + sourceOnlySnapshot);
request.addParameter("wait_for_completion", "true");
request.setJsonEntity(Strings.format("""
{
"include_global_state": false,
"indices" : "%s"
}
""", indexName));
final Response response = adminClient().performRequest(request);
assertThat(response.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus()));
final List<Map<String, Object>> failures = extractValue(responseAsMap(response), "snapshot.failures");
assertThat(failures, notNullValue());
assertThat(failures.size(), greaterThan(0));
for (Map<String, Object> failure : failures) {
assertThat(extractValue(failure, "status"), equalTo(RestStatus.INTERNAL_SERVER_ERROR.toString()));
assertThat(
extractValue(failure, "reason"),
allOf(
containsString("is not a regular index"),
containsString("cannot be snapshotted into a source-only repository")
)
);
}
}
}, true);
}
public void testCloseAndReopen() throws Exception {
runSearchableSnapshotsTest((restoredIndexName, numDocs) -> {
closeIndex(restoredIndexName);
ensureGreen(restoredIndexName);
final Request openRequest = new Request(HttpPost.METHOD_NAME, restoredIndexName + "/_open");
assertOK(client().performRequest(openRequest));
ensureGreen(restoredIndexName);
for (int i = 0; i < 10; i++) {
assertSearchResults(restoredIndexName, numDocs, randomFrom(Boolean.TRUE, Boolean.FALSE, null));
}
});
}
public void testStats() throws Exception {
runSearchableSnapshotsTest((restoredIndexName, numDocs) -> {
final Map<String, Object> stats = searchableSnapshotStats(restoredIndexName);
assertThat("Expected searchable snapshots stats for [" + restoredIndexName + ']', stats.size(), greaterThan(0));
final int nbShards = Integer.valueOf(extractValue(indexSettings(restoredIndexName), IndexMetadata.SETTING_NUMBER_OF_SHARDS));
assertThat("Expected searchable snapshots stats for " + nbShards + " shards but got " + stats, stats.size(), equalTo(nbShards));
});
}
public void testClearCache() throws Exception {
@SuppressWarnings("unchecked")
final Function<Map<?, ?>, Long> sumCachedBytesWritten = stats -> stats.values()
.stream()
.filter(o -> o instanceof List)
.flatMap(o -> ((List) o).stream())
.filter(o -> o instanceof Map)
.map(o -> ((Map<?, ?>) o).get("files"))
.filter(o -> o instanceof List)
.flatMap(o -> ((List) o).stream())
.filter(o -> o instanceof Map)
.map(o -> ((Map<?, ?>) o).get("cached_bytes_written"))
.filter(o -> o instanceof Map)
.map(o -> ((Map<?, ?>) o).get("sum"))
.mapToLong(o -> ((Number) o).longValue())
.sum();
runSearchableSnapshotsTest((restoredIndexName, numDocs) -> {
Map<String, Object> searchResults = search(restoredIndexName, QueryBuilders.matchAllQuery(), Boolean.TRUE);
assertThat(extractValue(searchResults, "hits.total.value"), equalTo(numDocs));
waitForIdlingSearchableSnapshotsThreadPools();
final long bytesInCacheBeforeClear = sumCachedBytesWritten.apply(searchableSnapshotStats(restoredIndexName));
assertThat(bytesInCacheBeforeClear, greaterThan(0L));
clearCache(restoredIndexName);
final long bytesInCacheAfterClear = sumCachedBytesWritten.apply(searchableSnapshotStats(restoredIndexName));
assertThat("Searchable snapshot cache wasn't cleared", bytesInCacheAfterClear, equalTo(bytesInCacheBeforeClear));
searchResults = search(restoredIndexName, QueryBuilders.matchAllQuery(), Boolean.TRUE);
assertThat(extractValue(searchResults, "hits.total.value"), equalTo(numDocs));
waitForIdlingSearchableSnapshotsThreadPools();
assertBusy(() -> {
final long bytesInCacheAfterSearch = sumCachedBytesWritten.apply(searchableSnapshotStats(restoredIndexName));
assertThat(bytesInCacheAfterSearch, greaterThanOrEqualTo(bytesInCacheBeforeClear));
});
});
}
public void testSnapshotOfSearchableSnapshot() throws Exception {
runSearchableSnapshotsTest((restoredIndexName, numDocs) -> {
if (randomBoolean()) {
logger.info("--> closing index [{}]", restoredIndexName);
final Request closeRequest = new Request(HttpPost.METHOD_NAME, restoredIndexName + "/_close");
assertOK(client().performRequest(closeRequest));
}
ensureGreen(restoredIndexName);
final String snapshot2Name = "snapshotception";
// Remove the snapshots, if a previous test failed to delete them. This is
// useful for third party tests that runs the test against a real external service.
deleteSnapshot(snapshot2Name, true);
final Request snapshotRequest = new Request(HttpPut.METHOD_NAME, "_snapshot/" + WRITE_REPOSITORY_NAME + '/' + snapshot2Name);
snapshotRequest.addParameter("wait_for_completion", "true");
try (XContentBuilder builder = jsonBuilder()) {
builder.startObject();
builder.field("indices", restoredIndexName);
builder.field("include_global_state", "false");
builder.endObject();
snapshotRequest.setEntity(new StringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON));
}
assertOK(client().performRequest(snapshotRequest));
final List<Map<String, Map<String, Object>>> snapshotShardsStats = extractValue(
responseAsMap(
client().performRequest(
new Request(HttpGet.METHOD_NAME, "/_snapshot/" + WRITE_REPOSITORY_NAME + "/" + snapshot2Name + "/_status")
)
),
"snapshots.indices." + restoredIndexName + ".shards"
);
assertThat(snapshotShardsStats.size(), equalTo(1));
for (Map<String, Object> value : snapshotShardsStats.get(0).values()) {
assertThat(extractValue(value, "stats.total.file_count"), equalTo(0));
assertThat(extractValue(value, "stats.incremental.file_count"), equalTo(0));
}
deleteIndex(restoredIndexName);
restoreSnapshot(WRITE_REPOSITORY_NAME, snapshot2Name, true);
ensureGreen(restoredIndexName);
deleteSnapshot(snapshot2Name, false);
assertSearchResults(restoredIndexName, numDocs, randomFrom(Boolean.TRUE, Boolean.FALSE, null));
});
}
public void testQueryScript() throws Exception {
runSearchableSnapshotsTest((indexName, numDocs) -> {
final int nbThreads = 5;
final CyclicBarrier barrier = new CyclicBarrier(nbThreads);
final AtomicBoolean maybeStop = new AtomicBoolean(false);
final CountDownLatch done = new CountDownLatch(nbThreads);
logger.info("--> starting concurrent search queries");
for (int threadId = 0; threadId < nbThreads; threadId++) {
int finalThreadId = threadId;
Thread thread = new Thread(() -> {
try {
for (int runs = 0; runs < 10; runs++) {
if (maybeStop.get()) {
return;
}
barrier.await(30L, TimeUnit.SECONDS);
if (finalThreadId == 0) {
// we want the cache to be empty so that cached data will have to be fetched
clearCache(indexName);
}
barrier.await(30L, TimeUnit.SECONDS);
if (maybeStop.get()) {
return;
}
// we want the thread pools to have no active workers when the first script query will be cached in query cache
waitForIdlingSearchableSnapshotsThreadPools();
barrier.await(30L, TimeUnit.SECONDS);
if (maybeStop.get()) {
return;
}
Request searchRequest = new Request(HttpPost.METHOD_NAME, '/' + indexName + "/_search");
searchRequest.addParameter("search_type", "query_then_fetch");
searchRequest.setJsonEntity(
new SearchSourceBuilder().trackTotalHits(true)
.query(
QueryBuilders.scriptQuery(
new Script(
ScriptType.INLINE,
Script.DEFAULT_SCRIPT_LANG,
"doc['text.raw'].value.toString().length() > 0",
Collections.emptyMap()
)
)
)
.toString()
);
Response searchResponse = client().performRequest(searchRequest);
assertThat(extractValue(responseAsMap(searchResponse), "hits.total.value"), equalTo(numDocs));
assertOK(searchResponse);
}
} catch (Exception e) {
maybeStop.set(true);
throw new AssertionError(e);
} finally {
done.countDown();
}
});
thread.start();
}
logger.info("--> waiting for searches to complete");
done.await();
},
false,
10_000,
indexSettings(1, 0).put(SearchableSnapshots.SNAPSHOT_CACHE_PREWARM_ENABLED_SETTING.getKey(), true)
.put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true)
.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true)
.build()
);
}
private void clearCache(String restoredIndexName) throws IOException {
final Request request = new Request(HttpPost.METHOD_NAME, restoredIndexName + "/_searchable_snapshots/cache/clear");
assertOK(client().performRequest(request));
}
public void assertSearchResults(String indexName, int numDocs, Boolean ignoreThrottled) throws IOException {
if (randomBoolean()) {
logger.info("clearing searchable snapshots cache for [{}] before search", indexName);
clearCache(indexName);
}
final int randomTieBreaker = randomIntBetween(0, numDocs - 1);
Map<String, Object> searchResults;
switch (randomInt(3)) {
case 0 -> {
searchResults = search(indexName, QueryBuilders.termQuery("field", String.valueOf(randomTieBreaker)), ignoreThrottled);
assertThat(extractValue(searchResults, "hits.total.value"), equalTo(1));
@SuppressWarnings("unchecked")
Map<String, Object> searchHit = (Map<String, Object>) ((List<?>) extractValue(searchResults, "hits.hits")).get(0);
assertThat(extractValue(searchHit, "_index"), equalTo(indexName));
assertThat(extractValue(searchHit, "_source.field"), equalTo(randomTieBreaker));
}
case 1 -> {
searchResults = search(indexName, QueryBuilders.rangeQuery("field").lt(randomTieBreaker), ignoreThrottled);
assertThat(extractValue(searchResults, "hits.total.value"), equalTo(randomTieBreaker));
}
case 2 -> {
searchResults = search(indexName, QueryBuilders.rangeQuery("field").gte(randomTieBreaker), ignoreThrottled);
assertThat(extractValue(searchResults, "hits.total.value"), equalTo(numDocs - randomTieBreaker));
}
case 3 -> {
searchResults = search(indexName, QueryBuilders.matchQuery("text", "document"), ignoreThrottled);
assertThat(extractValue(searchResults, "hits.total.value"), equalTo(numDocs));
}
default -> fail("Unsupported randomized search query");
}
}
protected static void deleteSnapshot(String snapshot, boolean ignoreMissing) throws IOException {
final Request request = new Request(HttpDelete.METHOD_NAME, "_snapshot/" + WRITE_REPOSITORY_NAME + '/' + snapshot);
try {
final Response response = client().performRequest(request);
assertAcked(
"Failed to delete snapshot [" + snapshot + "] in repository [" + WRITE_REPOSITORY_NAME + "]: " + response,
response
);
} catch (IOException e) {
if (ignoreMissing && e instanceof ResponseException) {
Response response = ((ResponseException) e).getResponse();
assertThat(response.getStatusLine().getStatusCode(), equalTo(RestStatus.NOT_FOUND.getStatus()));
return;
}
throw e;
}
}
protected static void mountSnapshot(String snapshotIndexName, String mountIndexName, String repositoryName) throws IOException {
final Request request = new Request(HttpPost.METHOD_NAME, "/_snapshot/" + repositoryName + "/" + SNAPSHOT_NAME + "/_mount");
request.addParameter("wait_for_completion", Boolean.toString(true));
request.addParameter("storage", randomFrom("full_copy", "shared_cache"));
final XContentBuilder builder = JsonXContent.contentBuilder().startObject().field("index", snapshotIndexName);
if (snapshotIndexName.equals(mountIndexName) == false || randomBoolean()) {
builder.field("renamed_index", mountIndexName);
}
builder.endObject();
request.setJsonEntity(Strings.toString(builder));
final Response response = client().performRequest(request);
assertThat(
"Failed to restore snapshot [" + SNAPSHOT_NAME + "] in repository [" + repositoryName + "]: " + response,
response.getStatusLine().getStatusCode(),
equalTo(RestStatus.OK.getStatus())
);
}
private static void assertAcked(String message, Response response) throws IOException {
final int responseStatusCode = response.getStatusLine().getStatusCode();
assertThat(
message + ": expecting response code [200] but got [" + responseStatusCode + ']',
responseStatusCode,
equalTo(RestStatus.OK.getStatus())
);
final Map<String, Object> responseAsMap = responseAsMap(response);
assertThat(message + ": response is not acknowledged", extractValue(responseAsMap, "acknowledged"), equalTo(Boolean.TRUE));
}
protected static void forceMerge(String index, boolean onlyExpungeDeletes, boolean flush) throws IOException {
final Request request = new Request(HttpPost.METHOD_NAME, '/' + index + "/_forcemerge");
request.addParameter("only_expunge_deletes", Boolean.toString(onlyExpungeDeletes));
request.addParameter("flush", Boolean.toString(flush));
assertOK(client().performRequest(request));
}
protected static Number count(String index) throws IOException {
final Response response = client().performRequest(new Request(HttpPost.METHOD_NAME, '/' + index + "/_count"));
assertThat(
"Failed to execute count request on index [" + index + "]: " + response,
response.getStatusLine().getStatusCode(),
equalTo(RestStatus.OK.getStatus())
);
final Map<String, Object> responseAsMap = responseAsMap(response);
assertThat(
"Shard failures when executing count request on index [" + index + "]: " + response,
extractValue(responseAsMap, "_shards.failed"),
equalTo(0)
);
return (Number) extractValue(responseAsMap, "count");
}
protected static Map<String, Object> search(String index, QueryBuilder query, Boolean ignoreThrottled) throws IOException {
final Request request = new Request(HttpPost.METHOD_NAME, '/' + index + "/_search");
request.setJsonEntity(new SearchSourceBuilder().trackTotalHits(true).query(query).toString());
// If warning are returned than these must exist in this set:
Set<String> expectedWarnings = new HashSet<>();
expectedWarnings.add(TransportSearchAction.FROZEN_INDICES_DEPRECATION_MESSAGE.replace("{}", index));
if (ignoreThrottled != null) {
request.addParameter("ignore_throttled", ignoreThrottled.toString());
expectedWarnings.add(
"[ignore_throttled] parameter is deprecated because frozen indices have been deprecated. "
+ "Consider cold or frozen tiers in place of frozen indices."
);
}
RequestOptions requestOptions = RequestOptions.DEFAULT.toBuilder().setWarningsHandler(warnings -> {
for (String warning : warnings) {
if (expectedWarnings.contains(warning) == false) {
return true;
}
}
return false;
}).build();
request.setOptions(requestOptions);
final Response response = client().performRequest(request);
assertThat(
"Failed to execute search request on index [" + index + "]: " + response,
response.getStatusLine().getStatusCode(),
equalTo(RestStatus.OK.getStatus())
);
final Map<String, Object> responseAsMap = responseAsMap(response);
assertThat(
"Shard failures when executing search request on index [" + index + "]: " + response,
extractValue(responseAsMap, "_shards.failed"),
equalTo(0)
);
return responseAsMap;
}
protected static Map<String, Object> searchableSnapshotStats(String index) throws IOException {
final Request request = new Request(HttpGet.METHOD_NAME, '/' + index + "/_searchable_snapshots/stats");
request.addParameter("level", "shards");
final Response response = client().performRequest(request);
assertThat(
"Failed to retrieve searchable snapshots stats for on index [" + index + "]: " + response,
response.getStatusLine().getStatusCode(),
equalTo(RestStatus.OK.getStatus())
);
final Map<String, Object> responseAsMap = responseAsMap(response);
assertThat(
"Shard failures when retrieving searchable snapshots stats for index [" + index + "]: " + response,
extractValue(responseAsMap, "_shards.failed"),
equalTo(0)
);
return extractValue(responseAsMap, "indices." + index + ".shards");
}
protected static Map<String, Object> indexSettings(String index) throws IOException {
final Response response = client().performRequest(new Request(HttpGet.METHOD_NAME, '/' + index));
assertThat(
"Failed to get settings on index [" + index + "]: " + response,
response.getStatusLine().getStatusCode(),
equalTo(RestStatus.OK.getStatus())
);
return extractValue(responseAsMap(response), index + ".settings");
}
@SuppressWarnings("unchecked")
protected static void waitForIdlingSearchableSnapshotsThreadPools() throws Exception {
final Set<String> searchableSnapshotsThreadPools = Set.of(
SearchableSnapshots.CACHE_FETCH_ASYNC_THREAD_POOL_NAME,
SearchableSnapshots.CACHE_PREWARMING_THREAD_POOL_NAME
);
assertBusy(() -> {
final Response response = client().performRequest(new Request(HttpGet.METHOD_NAME, "/_nodes/stats/thread_pool"));
assertThat(response.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus()));
final Map<String, Object> nodes = extractValue(responseAsMap(response), "nodes");
assertThat(nodes, notNullValue());
for (String node : nodes.keySet()) {
final Map<String, Object> threadPools = extractValue((Map<String, Object>) nodes.get(node), "thread_pool");
searchableSnapshotsThreadPools.forEach(threadPoolName -> {
final Map<String, Object> threadPoolStats = (Map<String, Object>) threadPools.get(threadPoolName);
assertThat(threadPoolStats, notNullValue());
final Number active = extractValue(threadPoolStats, "active");
assertThat(threadPoolName + " has still active tasks", active.longValue(), equalTo(0L));
final Number queue = extractValue(threadPoolStats, "queue");
assertThat(threadPoolName + " has still enqueued tasks", queue.longValue(), equalTo(0L));
});
}
}, 30L, TimeUnit.SECONDS);
}
@SuppressWarnings("unchecked")
protected static <T> T extractValue(Map<String, Object> map, String path) {
return (T) XContentMapValues.extractValue(path, map);
}
/**
* The body of a test case, which runs after the searchable snapshot has been created and restored.
*/
@FunctionalInterface
|
AbstractSearchableSnapshotsRestTestCase
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistCh.java
|
{
"start": 8982,
"end": 17158
}
|
class ____
implements Mapper<Text, FileOperation, WritableComparable<?>, Text> {
private JobConf jobconf;
private boolean ignoreFailures;
private int failcount = 0;
private int succeedcount = 0;
private String getCountString() {
return "Succeeded: " + succeedcount + " Failed: " + failcount;
}
/** {@inheritDoc} */
public void configure(JobConf job) {
this.jobconf = job;
ignoreFailures=job.getBoolean(Option.IGNORE_FAILURES.propertyname,false);
}
/** Run a FileOperation */
public void map(Text key, FileOperation value,
OutputCollector<WritableComparable<?>, Text> out, Reporter reporter
) throws IOException {
try {
value.run(jobconf);
++succeedcount;
reporter.incrCounter(Counter.SUCCEED, 1);
} catch (IOException e) {
++failcount;
reporter.incrCounter(Counter.FAIL, 1);
String s = "FAIL: " + value + ", " + StringUtils.stringifyException(e);
out.collect(null, new Text(s));
LOG.info(s);
} finally {
reporter.setStatus(getCountString());
}
}
/** {@inheritDoc} */
public void close() throws IOException {
if (failcount == 0 || ignoreFailures) {
return;
}
throw new IOException(getCountString());
}
}
private static void check(Configuration conf, List<FileOperation> ops
) throws InvalidInputException {
List<Path> srcs = new ArrayList<Path>();
for(FileOperation op : ops) {
srcs.add(op.src);
}
DistTool.checkSource(conf, srcs);
}
private static List<FileOperation> fetchList(Configuration conf, Path inputfile
) throws IOException {
List<FileOperation> result = new ArrayList<FileOperation>();
for(String line : readFile(conf, inputfile)) {
result.add(new FileOperation(line));
}
return result;
}
/** This is the main driver for recursively changing files properties. */
public int run(String[] args) throws Exception {
List<FileOperation> ops = new ArrayList<FileOperation>();
Path logpath = null;
boolean isIgnoreFailures = false;
try {
for (int idx = 0; idx < args.length; idx++) {
if ("-f".equals(args[idx])) {
if (++idx == args.length) {
System.out.println("urilist_uri not specified");
System.out.println(USAGE);
return -1;
}
ops.addAll(fetchList(jobconf, new Path(args[idx])));
} else if (Option.IGNORE_FAILURES.cmd.equals(args[idx])) {
isIgnoreFailures = true;
} else if ("-log".equals(args[idx])) {
if (++idx == args.length) {
System.out.println("logdir not specified");
System.out.println(USAGE);
return -1;
}
logpath = new Path(args[idx]);
} else if ('-' == args[idx].codePointAt(0)) {
System.out.println("Invalid switch " + args[idx]);
System.out.println(USAGE);
ToolRunner.printGenericCommandUsage(System.out);
return -1;
} else {
ops.add(new FileOperation(args[idx]));
}
}
// mandatory command-line parameters
if (ops.isEmpty()) {
throw new IllegalStateException("Operation is empty");
}
LOG.info("ops=" + ops);
LOG.info("isIgnoreFailures=" + isIgnoreFailures);
jobconf.setBoolean(Option.IGNORE_FAILURES.propertyname, isIgnoreFailures);
check(jobconf, ops);
try {
if (setup(ops, logpath)) {
JobClient.runJob(jobconf);
}
} finally {
try {
if (logpath == null) {
//delete log directory
final Path logdir = FileOutputFormat.getOutputPath(jobconf);
if (logdir != null) {
logdir.getFileSystem(jobconf).delete(logdir, true);
}
}
}
finally {
//delete job directory
final String jobdir = jobconf.get(JOB_DIR_LABEL);
if (jobdir != null) {
final Path jobpath = new Path(jobdir);
jobpath.getFileSystem(jobconf).delete(jobpath, true);
}
}
}
} catch(DuplicationException e) {
LOG.error("Input error:", e);
return DuplicationException.ERROR_CODE;
} catch(Exception e) {
LOG.error(NAME + " failed: ", e);
System.out.println(USAGE);
ToolRunner.printGenericCommandUsage(System.out);
return -1;
}
return 0;
}
/** Calculate how many maps to run. */
private static int getMapCount(int srcCount, int numNodes) {
int numMaps = (int)(srcCount / OP_PER_MAP);
numMaps = Math.min(numMaps, numNodes * MAX_MAPS_PER_NODE);
return Math.max(numMaps, 1);
}
private boolean setup(List<FileOperation> ops, Path log)
throws IOException {
final String randomId = getRandomId();
JobClient jClient = new JobClient(jobconf);
Path stagingArea;
try {
stagingArea = JobSubmissionFiles.getStagingDir(
jClient.getClusterHandle(), jobconf);
} catch (InterruptedException ie){
throw new IOException(ie);
}
Path jobdir = new Path(stagingArea + NAME + "_" + randomId);
FsPermission mapredSysPerms =
new FsPermission(JobSubmissionFiles.JOB_DIR_PERMISSION);
FileSystem.mkdirs(jClient.getFs(), jobdir, mapredSysPerms);
LOG.info(JOB_DIR_LABEL + "=" + jobdir);
if (log == null) {
log = new Path(jobdir, "_logs");
}
FileOutputFormat.setOutputPath(jobconf, log);
LOG.info("log=" + log);
//create operation list
FileSystem fs = jobdir.getFileSystem(jobconf);
Path opList = new Path(jobdir, "_" + OP_LIST_LABEL);
jobconf.set(OP_LIST_LABEL, opList.toString());
int opCount = 0, synCount = 0;
try (SequenceFile.Writer opWriter = SequenceFile.createWriter(fs, jobconf, opList, Text.class,
FileOperation.class, SequenceFile.CompressionType.NONE)) {
for(FileOperation op : ops) {
FileStatus srcstat = fs.getFileStatus(op.src);
if (srcstat.isDirectory() && op.isDifferent(srcstat)) {
++opCount;
opWriter.append(new Text(op.src.toString()), op);
}
Stack<Path> pathstack = new Stack<Path>();
for(pathstack.push(op.src); !pathstack.empty(); ) {
for(FileStatus stat : fs.listStatus(pathstack.pop())) {
if (stat.isDirectory()) {
pathstack.push(stat.getPath());
}
if (op.isDifferent(stat)) {
++opCount;
if (++synCount > SYNC_FILE_MAX) {
opWriter.sync();
synCount = 0;
}
Path f = stat.getPath();
opWriter.append(new Text(f.toString()), new FileOperation(f, op));
}
}
}
}
}
checkDuplication(fs, opList, new Path(jobdir, "_sorted"), jobconf);
jobconf.setInt(OP_COUNT_LABEL, opCount);
LOG.info(OP_COUNT_LABEL + "=" + opCount);
jobconf.setNumMapTasks(getMapCount(opCount,
new JobClient(jobconf).getClusterStatus().getTaskTrackers()));
return opCount != 0;
}
private static void checkDuplication(FileSystem fs, Path file, Path sorted,
Configuration conf) throws IOException {
SequenceFile.Sorter sorter = new SequenceFile.Sorter(fs,
new Text.Comparator(), Text.class, FileOperation.class, conf);
sorter.sort(file, sorted);
try (SequenceFile.Reader in = new SequenceFile.Reader(fs, sorted, conf)) {
FileOperation curop = new FileOperation();
Text prevsrc = null, cursrc = new Text();
for(; in.next(cursrc, curop); ) {
if (prevsrc != null && cursrc.equals(prevsrc)) {
throw new DuplicationException(
"Invalid input, there are duplicated files in the sources: "
+ prevsrc + ", " + cursrc);
}
prevsrc = cursrc;
cursrc = new Text();
curop = new FileOperation();
}
}
}
public static void main(String[] args) throws Exception {
System.exit(ToolRunner.run(new DistCh(new Configuration()), args));
}
}
|
ChangeFilesMapper
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/events/ListenerTest.java
|
{
"start": 3110,
"end": 3564
}
|
class ____ {
@Id
@GeneratedValue
private Long id;
private String name;
public Customer() {
}
public Customer(String name) {
this.name = name;
}
public Long getId() {
return id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
//tag::events-jpa-callbacks-example[]
@Entity(name = "Person")
@EntityListeners(LastUpdateListener.class)
public static
|
Customer
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/ExecutionHistory.java
|
{
"start": 1035,
"end": 1355
}
|
class ____ the historical executions of an {@link ExecutionVertex} in a {@link
* LinkedHashMap} with a size bound. When the map grows beyond the size bound, elements are dropped
* from the head of the map (FIFO order). Note that the historical executions does not include the
* current execution attempt.
*/
public
|
hosts
|
java
|
spring-projects__spring-boot
|
module/spring-boot-webmvc/src/test/java/org/springframework/boot/webmvc/autoconfigure/WebMvcAutoConfigurationTests.java
|
{
"start": 60691,
"end": 60936
}
|
class ____ implements WebMvcConfigurer {
private final Validator validator = mock(Validator.class);
@Override
public Validator getValidator() {
return this.validator;
}
}
@Configuration(proxyBeanMethods = false)
static
|
MvcValidator
|
java
|
apache__camel
|
components/camel-http/src/main/java/org/apache/camel/component/http/HttpComponent.java
|
{
"start": 3691,
"end": 51208
}
|
class ____ extends HttpCommonComponent implements RestProducerFactory, SSLContextParametersAware {
private static final Logger LOG = LoggerFactory.getLogger(HttpComponent.class);
@Metadata(label = "advanced",
description = "To use the custom HttpClientConfigurer to perform configuration of the HttpClient that will be used.")
protected HttpClientConfigurer httpClientConfigurer;
@Metadata(label = "advanced", description = "To use a custom and shared HttpClientConnectionManager to manage connections."
+ " If this has been configured then this is always used for all endpoints created by this component.")
protected HttpClientConnectionManager clientConnectionManager;
@Metadata(label = "advanced",
description = "To use a custom org.apache.hc.core5.http.protocol.HttpContext when executing requests.")
protected HttpContext httpContext;
@Metadata(label = "security", description = "To configure security using SSLContextParameters."
+ " Important: Only one instance of org.apache.camel.support.jsse.SSLContextParameters is supported per HttpComponent."
+ " If you need to use 2 or more different instances, you need to define a new HttpComponent per instance you need.")
protected SSLContextParameters sslContextParameters;
@Metadata(label = "security",
description = "To use a custom X509HostnameVerifier such as DefaultHostnameVerifier or NoopHostnameVerifier.")
protected HostnameVerifier x509HostnameVerifier = new DefaultHostnameVerifier();
@Metadata(label = "advanced", defaultValue = "false",
description = "To use System Properties as fallback for configuration for configuring HTTP Client")
private boolean useSystemProperties;
@Metadata(label = "producer,advanced", description = "To use a custom org.apache.hc.client5.http.cookie.CookieStore."
+ " By default the org.apache.hc.client5.http.cookie.BasicCookieStore is used which is an in-memory only cookie store."
+ " Notice if bridgeEndpoint=true then the cookie store is forced to be a noop cookie store as cookie"
+ " shouldn't be stored as we are just bridging (eg acting as a proxy).")
protected CookieStore cookieStore;
// timeout
@Metadata(label = "timeout", defaultValue = "" + 3 * 60 * 1000,
description = "Returns the connection lease request timeout (in millis) used when requesting"
+ " a connection from the connection manager."
+ " A timeout value of zero is interpreted as a disabled timeout.")
protected long connectionRequestTimeout = 3 * 60 * 1000L;
@Metadata(label = "timeout", defaultValue = "" + 3 * 60 * 1000,
description = "Determines the timeout (in millis) until a new connection is fully established."
+ " A timeout value of zero is interpreted as an infinite timeout.")
protected long connectTimeout = 3 * 60 * 1000L;
@Metadata(label = "timeout", defaultValue = "" + 3 * 60 * 1000,
description = "Determines the default socket timeout (in millis) value for blocking I/O operations.")
protected long soTimeout = 3 * 60 * 1000L;
@Metadata(label = "timeout",
description = "Determines the timeout (in millis) until arrival of a response from the opposite"
+ " endpoint. A timeout value of zero is interpreted as an infinite timeout."
+ " Please note that response timeout may be unsupported by HTTP transports "
+ "with message multiplexing.")
protected long responseTimeout;
// proxy
@Metadata(label = "producer,proxy", description = "Proxy server host")
protected String proxyHost;
@Metadata(label = "producer,proxy", description = "Proxy server port")
protected Integer proxyPort;
@Metadata(label = "producer,proxy", enums = "http,https",
description = "Proxy server authentication protocol scheme to use")
protected String proxyAuthScheme;
@Metadata(label = "producer,proxy", enums = "Basic,Digest,NTLM",
description = "Proxy authentication method to use (NTLM is deprecated)")
protected String proxyAuthMethod;
@Metadata(label = "producer,proxy", secret = true, description = "Proxy server username")
protected String proxyAuthUsername;
@Metadata(label = "producer,proxy", secret = true, description = "Proxy server password")
protected String proxyAuthPassword;
@Deprecated
@Metadata(label = "producer,proxy", description = "Proxy server host")
protected String proxyAuthHost;
@Deprecated
@Metadata(label = "producer,proxy", description = "Proxy server port")
protected Integer proxyAuthPort;
@Deprecated
@Metadata(label = "producer,proxy", description = "Proxy authentication domain to use with NTLM")
protected String proxyAuthDomain;
@Deprecated
@Metadata(label = "producer,proxy",
description = "Proxy authentication domain (workstation name) to use with NTLM (NTLM is deprecated)")
protected String proxyAuthNtHost;
// options to the default created http connection manager
@Metadata(label = "advanced", defaultValue = "200", description = "The maximum number of connections.")
protected int maxTotalConnections = 200;
@Metadata(label = "advanced", defaultValue = "20", description = "The maximum number of connections per route.")
protected int connectionsPerRoute = 20;
// It's MILLISECONDS, the default value is always keepAlive
@Metadata(label = "advanced",
description = "The time for connection to live, the time unit is millisecond, the default value is always keepAlive.")
protected long connectionTimeToLive = -1;
@Metadata(label = "security", defaultValue = "false", description = "Enable usage of global SSL context parameters.")
protected boolean useGlobalSslContextParameters;
@Metadata(label = "producer,advanced", defaultValue = "8192",
description = "This threshold in bytes controls whether the response payload"
+ " should be stored in memory as a byte array or be streaming based. Set this to -1 to always use streaming mode.")
protected int responsePayloadStreamingThreshold = 8192;
@Metadata(label = "advanced", description = "Disables automatic redirect handling")
protected boolean redirectHandlingDisabled;
@Metadata(label = "advanced", description = "Disables automatic request recovery and re-execution")
protected boolean automaticRetriesDisabled;
@Metadata(label = "advanced", description = "Disables automatic content decompression")
protected boolean contentCompressionDisabled;
@Metadata(label = "advanced", description = "Disables state (cookie) management")
protected boolean cookieManagementDisabled;
@Metadata(label = "advanced", description = "Disables authentication scheme caching")
protected boolean authCachingDisabled;
@Metadata(label = "advanced", description = "Disables connection state tracking")
protected boolean connectionStateDisabled;
@Metadata(label = "advanced",
description = "Disables the default user agent set by this builder if none has been provided by the user")
protected boolean defaultUserAgentDisabled;
@Metadata(label = "producer",
description = "Whether to skip Camel control headers (CamelHttp... headers) to influence this endpoint. Control headers from previous HTTP components can influence"
+ " how this Camel component behaves such as CamelHttpPath, CamelHttpQuery, etc.")
private boolean skipControlHeaders;
@Metadata(label = "producer",
description = "Whether to skip mapping all the Camel headers as HTTP request headers."
+ " This is useful when you know that calling the HTTP service should not include any custom headers.")
protected boolean skipRequestHeaders;
@Metadata(label = "producer",
description = "Whether to skip mapping all the HTTP response headers to Camel headers.")
protected boolean skipResponseHeaders;
@Metadata(label = "producer,advanced",
defaultValue = "true",
description = "If this option is true then IN exchange headers will be copied to OUT exchange headers according to copy strategy."
+ " Setting this to false, allows to only include the headers from the HTTP response (not propagating IN headers).")
protected boolean copyHeaders = true;
@Metadata(label = "producer,advanced", defaultValue = "false",
description = "Whether to the HTTP request should follow redirects."
+ " By default the HTTP request does not follow redirects ")
protected boolean followRedirects;
@Metadata(label = "producer,advanced", description = "To set a custom HTTP User-Agent request header")
protected String userAgent;
@Metadata(label = "producer,advanced", autowired = true, description = "To use a custom activity listener")
protected HttpActivityListener httpActivityListener;
@Metadata(label = "producer",
description = "To enable logging HTTP request and response. You can use a custom LoggingHttpActivityListener as httpActivityListener to control logging options.")
protected boolean logHttpActivity;
@Metadata(label = "producer,advanced", defaultValue = "true",
description = "Whether the Content-Type header should automatic include charset for string based content.")
protected boolean contentTypeCharsetEnabled = true;
public HttpComponent() {
}
/**
* Creates the HttpClientConfigurer based on the given parameters
*
* @param parameters the map of parameters
* @param secure whether the endpoint is secure (e.g., https)
* @return the configurer
* @throws Exception is thrown if error creating configurer
*/
protected HttpClientConfigurer createHttpClientConfigurer(Map<String, Object> parameters, boolean secure) throws Exception {
// prefer to use endpoint configured over component configured
HttpClientConfigurer configurer
= resolveAndRemoveReferenceParameter(parameters, "httpClientConfigurer", HttpClientConfigurer.class);
if (configurer == null) {
// fallback to component configured
configurer = getHttpClientConfigurer();
}
HttpCredentialsHelper credentialsProvider = new HttpCredentialsHelper();
configurer = configureBasicAuthentication(parameters, configurer, credentialsProvider);
configurer = configureHttpProxy(parameters, configurer, secure, credentialsProvider);
configurer = configureOAuth2Authentication(parameters, configurer);
return configurer;
}
private HttpClientConfigurer configureOAuth2Authentication(
Map<String, Object> parameters, HttpClientConfigurer configurer) {
String clientId = getParameter(parameters, "oauth2ClientId", String.class);
String clientSecret = getParameter(parameters, "oauth2ClientSecret", String.class);
String tokenEndpoint = getParameter(parameters, "oauth2TokenEndpoint", String.class);
String scope = getParameter(parameters, "oauth2Scope", String.class);
String resourceIndicator = getParameter(parameters, "oauth2ResourceIndicator", String.class);
HttpConfiguration configDefaults = new HttpConfiguration();
boolean cacheTokens = getParameter(
parameters,
"oauth2CacheTokens",
boolean.class,
configDefaults.isOauth2CacheTokens());
long cachedTokensDefaultExpirySeconds = getParameter(
parameters,
"oauth2CachedTokensDefaultExpirySeconds",
long.class,
configDefaults.getOauth2CachedTokensDefaultExpirySeconds());
long cachedTokensExpirationMarginSeconds = getParameter(
parameters,
"oauth2CachedTokensExpirationMarginSeconds",
long.class,
configDefaults.getOauth2CachedTokensExpirationMarginSeconds());
boolean useBodyAuthentication = getParameter(
parameters,
"oauth2BodyAuthentication",
boolean.class,
configDefaults.isOauth2BodyAuthentication());
if (clientId != null && clientSecret != null && tokenEndpoint != null) {
return CompositeHttpConfigurer.combineConfigurers(configurer,
new OAuth2ClientConfigurer(
clientId,
clientSecret,
tokenEndpoint,
resourceIndicator,
scope,
cacheTokens,
cachedTokensDefaultExpirySeconds,
cachedTokensExpirationMarginSeconds,
useBodyAuthentication));
}
return configurer;
}
private HttpClientConfigurer configureBasicAuthentication(
Map<String, Object> parameters, HttpClientConfigurer configurer,
HttpCredentialsHelper credentialsProvider) {
String authUsername = getParameter(parameters, "authUsername", String.class);
String authPassword = getParameter(parameters, "authPassword", String.class);
if (authUsername != null && authPassword != null) {
String authDomain = getParameter(parameters, "authDomain", String.class);
String authHost = getParameter(parameters, "authHost", String.class);
return CompositeHttpConfigurer.combineConfigurers(configurer,
new DefaultAuthenticationHttpClientConfigurer(
authUsername, authPassword, authDomain, authHost, null, credentialsProvider));
} else if (this.httpConfiguration != null) {
if ("basic".equalsIgnoreCase(this.httpConfiguration.getAuthMethod())
|| "bearer".equalsIgnoreCase(this.httpConfiguration.getAuthMethod())) {
return CompositeHttpConfigurer.combineConfigurers(configurer,
new DefaultAuthenticationHttpClientConfigurer(
this.httpConfiguration.getAuthUsername(),
this.httpConfiguration.getAuthPassword(), this.httpConfiguration.getAuthDomain(),
this.httpConfiguration.getAuthHost(), this.httpConfiguration.getAuthBearerToken(),
credentialsProvider));
}
}
return configurer;
}
private HttpClientConfigurer configureHttpProxy(
Map<String, Object> parameters, HttpClientConfigurer configurer, boolean secure,
HttpCredentialsHelper credentialsProvider) {
String proxyAuthScheme = getParameter(parameters, "proxyAuthScheme", String.class, getProxyAuthScheme());
if (proxyAuthScheme == null) {
// fallback and use either http or https depending on secure
proxyAuthScheme = secure ? "https" : "http";
}
// these are old names and are deprecated
String proxyAuthHost = getParameter(parameters, "proxyAuthHost", String.class, getProxyAuthHost());
Integer proxyAuthPort = getParameter(parameters, "proxyAuthPort", Integer.class, getProxyAuthPort());
if (proxyAuthHost == null) {
proxyAuthHost = getParameter(parameters, "proxyHost", String.class, getProxyHost());
}
if (proxyAuthPort == null) {
proxyAuthPort = getParameter(parameters, "proxyPort", Integer.class, getProxyPort());
}
if (proxyAuthHost != null && proxyAuthPort != null) {
String proxyAuthUsername = getParameter(parameters, "proxyAuthUsername", String.class, getProxyAuthUsername());
String proxyAuthPassword = getParameter(parameters, "proxyAuthPassword", String.class, getProxyAuthPassword());
String proxyAuthDomain = getParameter(parameters, "proxyAuthDomain", String.class, getProxyAuthDomain());
String proxyAuthNtHost = getParameter(parameters, "proxyAuthNtHost", String.class, getProxyAuthNtHost());
LOG.debug("Configuring HTTP client to use HTTP proxy {}:{}", proxyAuthHost, proxyAuthPort);
if (proxyAuthUsername != null && proxyAuthPassword != null) {
return CompositeHttpConfigurer.combineConfigurers(
configurer,
new ProxyHttpClientConfigurer(
proxyAuthHost, proxyAuthPort, proxyAuthScheme, proxyAuthUsername, proxyAuthPassword,
proxyAuthDomain, proxyAuthNtHost, credentialsProvider));
} else {
return CompositeHttpConfigurer.combineConfigurers(configurer,
new ProxyHttpClientConfigurer(proxyAuthHost, proxyAuthPort, proxyAuthScheme));
}
}
return configurer;
}
@Override
protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception {
Map<String, Object> httpClientParameters = new HashMap<>(parameters);
final Map<String, Object> httpClientOptions = new HashMap<>();
// timeout values can be configured on both component and endpoint level, where endpoint takes priority
Timeout valConnectionRequestTimeout
= getAndRemoveParameter(parameters, "connectionRequestTimeout", Timeout.class,
Timeout.ofMilliseconds(connectionRequestTimeout));
if (!Timeout.ofMinutes(3).equals(valConnectionRequestTimeout)) {
httpClientOptions.put("connectionRequestTimeout", valConnectionRequestTimeout);
}
Timeout valResponseTimeout
= getAndRemoveParameter(parameters, "responseTimeout", Timeout.class, Timeout.ofMilliseconds(responseTimeout));
if (!Timeout.ofMilliseconds(0).equals(valResponseTimeout)) {
httpClientOptions.put("responseTimeout", valResponseTimeout);
}
Timeout valConnectTimeout
= getAndRemoveParameter(parameters, "connectTimeout", Timeout.class, Timeout.ofMilliseconds(connectTimeout));
if (!Timeout.ofMinutes(3).equals(valConnectTimeout)) {
httpClientOptions.put("connectTimeout", valConnectTimeout);
}
final Map<String, Object> httpConnectionOptions = new HashMap<>();
Timeout valSoTimeout = getAndRemoveParameter(parameters, "soTimeout", Timeout.class, Timeout.ofMilliseconds(soTimeout));
if (!Timeout.ofMinutes(3).equals(valSoTimeout)) {
httpConnectionOptions.put("soTimeout", valSoTimeout);
}
HttpBinding httpBinding = resolveAndRemoveReferenceParameter(parameters, "httpBinding", HttpBinding.class);
HttpContext httpContext = resolveAndRemoveReferenceParameter(parameters, "httpContext", HttpContext.class);
SSLContextParameters sslContextParameters
= resolveAndRemoveReferenceParameter(parameters, "sslContextParameters", SSLContextParameters.class);
if (sslContextParameters == null) {
sslContextParameters = getSslContextParameters();
}
if (sslContextParameters == null) {
// only secure (https) should use global SSL
boolean secure = HttpHelper.isSecureConnection(uri);
if (secure) {
sslContextParameters = retrieveGlobalSslContextParameters();
}
}
String httpMethodRestrict = getAndRemoveParameter(parameters, "httpMethodRestrict", String.class);
boolean muteException = getAndRemoveParameter(parameters, "muteException", boolean.class, isMuteException());
HeaderFilterStrategy headerFilterStrategy
= resolveAndRemoveReferenceParameter(parameters, "headerFilterStrategy", HeaderFilterStrategy.class);
// the actual protocol if present in the remainder part should take precedence
String secureProtocol = uri;
if (remaining.startsWith("http:") || remaining.startsWith("https:")) {
secureProtocol = remaining;
}
boolean secure = HttpHelper.isSecureConnection(secureProtocol) || sslContextParameters != null;
// the remaining part should be without protocol as that was how this component was originally created
remaining = org.apache.camel.component.http.HttpUtil.removeHttpOrHttpsProtocol(remaining);
// need to set the scheme on address uri depending on if it's secure or not
String addressUri = (secure ? "https://" : "http://") + remaining;
addressUri = UnsafeUriCharactersEncoder.encodeHttpURI(addressUri);
URI uriHttpUriAddress = new URI(addressUri);
// the endpoint uri should use the component name as the scheme, so we need to re-create it once more
String scheme = StringHelper.before(uri, "://");
// uri part should be without protocol as that was how this component was originally created
uri = org.apache.camel.component.http.HttpUtil.removeHttpOrHttpsProtocol(uri);
// create the configurer to use for this endpoint
HttpClientConfigurer configurer = createHttpClientConfigurer(parameters, secure);
URI endpointUri = URISupport.createRemainingURI(uriHttpUriAddress, httpClientParameters);
endpointUri = URISupport.createRemainingURI(
new URI(
scheme,
endpointUri.getUserInfo(),
endpointUri.getHost(),
endpointUri.getPort(),
endpointUri.getPath(),
endpointUri.getQuery(),
endpointUri.getFragment()),
httpClientParameters);
// create the endpoint and set the http uri to be null
String endpointUriString = endpointUri.toString();
LOG.debug("Creating endpoint uri {}", endpointUriString);
final HttpClientConnectionManager localConnectionManager
= createConnectionManager(parameters, sslContextParameters, httpConnectionOptions);
final HttpClientBuilder clientBuilder = createHttpClientBuilder(uri, parameters, httpClientOptions);
HttpEndpoint endpoint = new HttpEndpoint(endpointUriString, this, clientBuilder, localConnectionManager, configurer);
endpoint.setResponseTimeout(valResponseTimeout.toMilliseconds());
endpoint.setSoTimeout(valSoTimeout.toMilliseconds());
endpoint.setConnectTimeout(valConnectTimeout.toMilliseconds());
endpoint.setConnectionRequestTimeout(valConnectionRequestTimeout.toMilliseconds());
endpoint.setCopyHeaders(copyHeaders);
endpoint.setSkipControlHeaders(skipControlHeaders);
endpoint.setSkipRequestHeaders(skipRequestHeaders);
endpoint.setSkipResponseHeaders(skipResponseHeaders);
endpoint.setUserAgent(userAgent);
endpoint.setMuteException(muteException);
endpoint.setHttpActivityListener(httpActivityListener);
endpoint.setLogHttpActivity(logHttpActivity);
endpoint.setContentTypeCharsetEnabled(contentTypeCharsetEnabled);
// configure the endpoint with the common configuration from the component
if (getHttpConfiguration() != null) {
Map<String, Object> properties = new HashMap<>();
BeanIntrospection beanIntrospection = PluginHelper.getBeanIntrospection(getCamelContext());
beanIntrospection.getProperties(getHttpConfiguration(), properties, null);
setProperties(endpoint, properties);
}
// configure the endpoint
setProperties(endpoint, parameters);
// we cannot change the port of an URI, we must create a new one with an explicit port value
URI httpUri = URISupport.createRemainingURI(
new URI(
uriHttpUriAddress.getScheme(),
uriHttpUriAddress.getUserInfo(),
uriHttpUriAddress.getHost(),
uriHttpUriAddress.getPort(),
uriHttpUriAddress.getPath(),
uriHttpUriAddress.getQuery(),
uriHttpUriAddress.getFragment()),
parameters);
endpoint.setHttpUri(httpUri);
if (headerFilterStrategy != null) {
endpoint.setHeaderFilterStrategy(headerFilterStrategy);
} else {
setEndpointHeaderFilterStrategy(endpoint);
}
endpoint.setHttpBinding(getHttpBinding());
if (httpBinding != null) {
endpoint.setHttpBinding(httpBinding);
}
if (httpMethodRestrict != null) {
endpoint.setHttpMethodRestrict(httpMethodRestrict);
}
endpoint.setHttpContext(getHttpContext());
if (httpContext != null) {
endpoint.setHttpContext(httpContext);
}
if (endpoint.getCookieStore() == null) {
endpoint.setCookieStore(getCookieStore());
}
endpoint.setHttpClientOptions(httpClientOptions);
endpoint.setHttpConnectionOptions(httpConnectionOptions);
return endpoint;
}
protected HttpClientConnectionManager createConnectionManager(
final Map<String, Object> parameters,
final SSLContextParameters sslContextParameters, Map<String, Object> httpConnectionOptions)
throws GeneralSecurityException, IOException {
if (clientConnectionManager != null) {
return clientConnectionManager;
}
final HostnameVerifier resolvedHostnameVerifier
= resolveAndRemoveReferenceParameter(parameters, "x509HostnameVerifier", HostnameVerifier.class);
final HostnameVerifier hostnameVerifier = Optional.ofNullable(resolvedHostnameVerifier).orElse(x509HostnameVerifier);
// need to check the parameters of maxTotalConnections and connectionsPerRoute
final int maxTotalConnections = getAndRemoveParameter(parameters, "maxTotalConnections", int.class, 0);
final int connectionsPerRoute = getAndRemoveParameter(parameters, "connectionsPerRoute", int.class, 0);
// do not remove as we set this later again
final boolean sysProp = getParameter(parameters, "useSystemProperties", boolean.class, useSystemProperties);
final Registry<ConnectionSocketFactory> connectionRegistry
= createConnectionRegistry(hostnameVerifier, sslContextParameters, sysProp);
// allow the builder pattern
httpConnectionOptions.putAll(PropertiesHelper.extractProperties(parameters, "httpConnection."));
SocketConfig.Builder socketConfigBuilder = SocketConfig.custom();
PropertyBindingSupport.bindProperties(getCamelContext(), socketConfigBuilder, httpConnectionOptions);
return createConnectionManager(connectionRegistry, maxTotalConnections, connectionsPerRoute,
socketConfigBuilder.build());
}
protected HttpClientBuilder createHttpClientBuilder(
final String uri, final Map<String, Object> parameters,
final Map<String, Object> httpClientOptions) {
// http client can be configured from URI options
HttpClientBuilder clientBuilder = HttpClientBuilder.create();
// allow the builder pattern
httpClientOptions.putAll(PropertiesHelper.extractProperties(parameters, "httpClient."));
PropertyBindingSupport.bindProperties(getCamelContext(), clientBuilder, httpClientOptions);
// set the Request configure this way and allow the builder pattern
RequestConfig.Builder requestConfigBuilder = RequestConfig.custom();
PropertyBindingSupport.bindProperties(getCamelContext(), requestConfigBuilder, httpClientOptions);
clientBuilder.setDefaultRequestConfig(requestConfigBuilder.build());
// validate that we could resolve all httpClient. parameters as this component is lenient
validateParameters(uri, httpClientOptions, null);
// endpoint parameter can override component level
boolean fr = getParameter(parameters, "followRedirects", Boolean.class, followRedirects);
if (redirectHandlingDisabled || !fr) {
clientBuilder.disableRedirectHandling();
}
if (automaticRetriesDisabled) {
clientBuilder.disableAutomaticRetries();
}
if (contentCompressionDisabled) {
clientBuilder.disableContentCompression();
}
if (cookieManagementDisabled) {
clientBuilder.disableCookieManagement();
}
if (authCachingDisabled) {
clientBuilder.disableAuthCaching();
}
if (connectionStateDisabled) {
clientBuilder.disableConnectionState();
}
if (defaultUserAgentDisabled) {
clientBuilder.disableDefaultUserAgent();
}
if (fr) {
clientBuilder.setRedirectStrategy(DefaultRedirectStrategy.INSTANCE);
}
return clientBuilder;
}
protected Registry<ConnectionSocketFactory> createConnectionRegistry(
HostnameVerifier x509HostnameVerifier, SSLContextParameters sslContextParams,
boolean useSystemProperties)
throws GeneralSecurityException, IOException {
// create the default connection registry to use
RegistryBuilder<ConnectionSocketFactory> builder = RegistryBuilder.create();
builder.register("http", PlainConnectionSocketFactory.getSocketFactory());
if (sslContextParams != null) {
builder.register("https",
new SSLConnectionSocketFactory(sslContextParams.createSSLContext(getCamelContext()), x509HostnameVerifier));
} else {
builder.register("https", new SSLConnectionSocketFactory(
useSystemProperties ? SSLContexts.createSystemDefault() : SSLContexts.createDefault(),
x509HostnameVerifier));
}
return builder.build();
}
protected HttpClientConnectionManager createConnectionManager(
Registry<ConnectionSocketFactory> registry, int maxTotalConnections, int connectionsPerRoute,
SocketConfig defaultSocketConfig) {
// set up the connection live time
PoolingHttpClientConnectionManager answer = new PoolingHttpClientConnectionManager(
registry, PoolConcurrencyPolicy.STRICT, TimeValue.ofMilliseconds(getConnectionTimeToLive()), null);
int localMaxTotalConnections = maxTotalConnections;
if (localMaxTotalConnections == 0) {
localMaxTotalConnections = getMaxTotalConnections();
}
if (localMaxTotalConnections > 0) {
answer.setMaxTotal(localMaxTotalConnections);
}
answer.setDefaultSocketConfig(defaultSocketConfig);
int localConnectionsPerRoute = connectionsPerRoute;
if (localConnectionsPerRoute == 0) {
localConnectionsPerRoute = getConnectionsPerRoute();
}
if (localConnectionsPerRoute > 0) {
answer.setDefaultMaxPerRoute(localConnectionsPerRoute);
}
LOG.debug("Created ClientConnectionManager {}", answer);
return answer;
}
@Override
protected boolean useIntrospectionOnEndpoint() {
return false;
}
@SuppressWarnings("unchecked")
@Override
public Producer createProducer(
CamelContext camelContext, String host,
String verb, String basePath, String uriTemplate, String queryParameters,
String consumes, String produces, RestConfiguration configuration, Map<String, Object> parameters)
throws Exception {
// avoid leading slash
basePath = FileUtil.stripLeadingSeparator(basePath);
uriTemplate = FileUtil.stripLeadingSeparator(uriTemplate);
// get the endpoint
String url = host;
if (!ObjectHelper.isEmpty(basePath)) {
url += "/" + basePath;
}
if (!ObjectHelper.isEmpty(uriTemplate)) {
url += "/" + uriTemplate;
}
RestConfiguration config = configuration;
if (config == null) {
config = CamelContextHelper.getRestConfiguration(getCamelContext(), null, "http");
}
Map<String, Object> map = new HashMap<>();
// build query string, and append any endpoint configuration properties
if (config.getProducerComponent() == null || config.getProducerComponent().equals("http")) {
// setup endpoint options
map.put("httpMethod", verb);
if (config.getEndpointProperties() != null && !config.getEndpointProperties().isEmpty()) {
map.putAll(config.getEndpointProperties());
}
}
url = HttpUtil.recreateUrl(map, url);
parameters = parameters != null ? new HashMap<>(parameters) : new HashMap<>();
// there are cases where we might end up here without component being created beforehand
// we need to abide by the component properties specified in the parameters when creating
// the component, one such case is when we switch from "http" to "https" component name
RestProducerFactoryHelper.setupComponentFor(url, camelContext, (Map<String, Object>) parameters.remove("component"));
HttpEndpoint endpoint = (HttpEndpoint) camelContext.getEndpoint(url, parameters);
String path = uriTemplate != null ? uriTemplate : basePath;
HeaderFilterStrategy headerFilterStrategy
= resolveAndRemoveReferenceParameter(parameters, "headerFilterStrategy", HeaderFilterStrategy.class);
if (headerFilterStrategy != null) {
endpoint.setHeaderFilterStrategy(headerFilterStrategy);
} else {
endpoint.setHeaderFilterStrategy(new HttpRestHeaderFilterStrategy(path, queryParameters));
}
// the endpoint must be started before creating the producer
ServiceHelper.startService(endpoint);
return endpoint.createProducer();
}
public HttpClientConfigurer getHttpClientConfigurer() {
return httpClientConfigurer;
}
/**
* To use the custom HttpClientConfigurer to perform configuration of the HttpClient that will be used.
*/
public void setHttpClientConfigurer(HttpClientConfigurer httpClientConfigurer) {
this.httpClientConfigurer = httpClientConfigurer;
}
public HttpClientConnectionManager getClientConnectionManager() {
return clientConnectionManager;
}
/**
* To use a custom and shared HttpClientConnectionManager to manage connections. If this has been configured, then
* this is always used for all endpoints created by this component.
*/
public void setClientConnectionManager(HttpClientConnectionManager clientConnectionManager) {
this.clientConnectionManager = clientConnectionManager;
}
public HttpContext getHttpContext() {
return httpContext;
}
/**
* To use a custom org.apache.http.protocol.HttpContext when executing requests.
*/
public void setHttpContext(HttpContext httpContext) {
this.httpContext = httpContext;
}
public SSLContextParameters getSslContextParameters() {
return sslContextParameters;
}
/**
* To configure security using SSLContextParameters. Important: Only one instance of
* org.apache.camel.support.jsse.SSLContextParameters is supported per HttpComponent. If you need to use two or more
* different instances, you need to define a new HttpComponent per instance.
*/
public void setSslContextParameters(SSLContextParameters sslContextParameters) {
this.sslContextParameters = sslContextParameters;
}
@Override
public boolean isUseGlobalSslContextParameters() {
return this.useGlobalSslContextParameters;
}
/**
* Enable usage of global SSL context parameters.
*/
@Override
public void setUseGlobalSslContextParameters(boolean useGlobalSslContextParameters) {
this.useGlobalSslContextParameters = useGlobalSslContextParameters;
}
public HostnameVerifier getX509HostnameVerifier() {
return x509HostnameVerifier;
}
/**
* To use a custom X509HostnameVerifier such as DefaultHostnameVerifier or NoopHostnameVerifier.
*/
public void setX509HostnameVerifier(HostnameVerifier x509HostnameVerifier) {
this.x509HostnameVerifier = x509HostnameVerifier;
}
public boolean isUseSystemProperties() {
return useSystemProperties;
}
public void setUseSystemProperties(boolean useSystemProperties) {
this.useSystemProperties = useSystemProperties;
}
public int getMaxTotalConnections() {
return maxTotalConnections;
}
/**
* The maximum number of connections.
*/
public void setMaxTotalConnections(int maxTotalConnections) {
this.maxTotalConnections = maxTotalConnections;
}
public int getConnectionsPerRoute() {
return connectionsPerRoute;
}
/**
* The maximum number of connections per route.
*/
public void setConnectionsPerRoute(int connectionsPerRoute) {
this.connectionsPerRoute = connectionsPerRoute;
}
public long getConnectionTimeToLive() {
return connectionTimeToLive;
}
/**
* The time for connection to live, the time unit is millisecond; the default value is always keepAlive.
*/
public void setConnectionTimeToLive(long connectionTimeToLive) {
this.connectionTimeToLive = connectionTimeToLive;
}
public CookieStore getCookieStore() {
return cookieStore;
}
/**
* To use a custom org.apache.http.client.CookieStore. By default, the org.apache.http.impl.client.BasicCookieStore
* is used which is an in-memory only cookie store. Notice if bridgeEndpoint=true then the cookie store is forced to
* be a noop cookie store as cookie shouldn't be stored as we are just bridging (e.g., acting as a proxy).
*/
public void setCookieStore(CookieStore cookieStore) {
this.cookieStore = cookieStore;
}
public long getConnectionRequestTimeout() {
return connectionRequestTimeout;
}
/**
* Returns the connection lease request timeout used when requesting a connection from the connection manager.
* <p>
* A timeout value of zero is interpreted as a disabled timeout.
* </p>
* <p>
* Default: 3 minutes
* </p>
*/
public void setConnectionRequestTimeout(long connectionRequestTimeout) {
this.connectionRequestTimeout = connectionRequestTimeout;
}
public long getConnectTimeout() {
return connectTimeout;
}
/**
* Determines the timeout until a new connection is fully established. This may also include transport security
* negotiation exchanges such as {@code SSL} or {@code TLS} protocol negotiation.
* <p>
* A timeout value of zero is interpreted as an infinite timeout.
* </p>
* <p>
* Default: 3 minutes
* </p>
*/
public void setConnectTimeout(long connectTimeout) {
this.connectTimeout = connectTimeout;
}
public long getSoTimeout() {
return soTimeout;
}
/**
* Determines the default socket timeout value for blocking I/O operations.
* <p>
* Default: 3 minutes
* </p>
*/
public void setSoTimeout(long soTimeout) {
this.soTimeout = soTimeout;
}
public long getResponseTimeout() {
return responseTimeout;
}
/**
* Determines the timeout until arrival of a response from the opposite endpoint.
* <p>
* A timeout value of zero is interpreted as an infinite timeout.
* </p>
* <p>
* Please note that response timeout may be unsupported by HTTP transports with message multiplexing.
* </p>
* <p>
* Default: {@code 0}
* </p>
*/
public void setResponseTimeout(long responseTimeout) {
this.responseTimeout = responseTimeout;
}
public String getProxyHost() {
return proxyHost;
}
public void setProxyHost(String proxyHost) {
this.proxyHost = proxyHost;
}
public Integer getProxyPort() {
return proxyPort;
}
public void setProxyPort(Integer proxyPort) {
this.proxyPort = proxyPort;
}
public String getProxyAuthScheme() {
return proxyAuthScheme;
}
public void setProxyAuthScheme(String proxyAuthScheme) {
this.proxyAuthScheme = proxyAuthScheme;
}
public String getProxyAuthMethod() {
return proxyAuthMethod;
}
public void setProxyAuthMethod(String proxyAuthMethod) {
this.proxyAuthMethod = proxyAuthMethod;
}
public String getProxyAuthUsername() {
return proxyAuthUsername;
}
public void setProxyAuthUsername(String proxyAuthUsername) {
this.proxyAuthUsername = proxyAuthUsername;
}
public String getProxyAuthPassword() {
return proxyAuthPassword;
}
public void setProxyAuthPassword(String proxyAuthPassword) {
this.proxyAuthPassword = proxyAuthPassword;
}
@Deprecated
public String getProxyAuthHost() {
return proxyAuthHost;
}
@Deprecated
public void setProxyAuthHost(String proxyAuthHost) {
this.proxyAuthHost = proxyAuthHost;
}
@Deprecated
public Integer getProxyAuthPort() {
return proxyAuthPort;
}
@Deprecated
public void setProxyAuthPort(Integer proxyAuthPort) {
this.proxyAuthPort = proxyAuthPort;
}
public String getProxyAuthDomain() {
return proxyAuthDomain;
}
public void setProxyAuthDomain(String proxyAuthDomain) {
this.proxyAuthDomain = proxyAuthDomain;
}
public String getProxyAuthNtHost() {
return proxyAuthNtHost;
}
public void setProxyAuthNtHost(String proxyAuthNtHost) {
this.proxyAuthNtHost = proxyAuthNtHost;
}
public int getResponsePayloadStreamingThreshold() {
return responsePayloadStreamingThreshold;
}
public void setResponsePayloadStreamingThreshold(int responsePayloadStreamingThreshold) {
this.responsePayloadStreamingThreshold = responsePayloadStreamingThreshold;
}
public boolean isRedirectHandlingDisabled() {
return redirectHandlingDisabled;
}
public void setRedirectHandlingDisabled(boolean redirectHandlingDisabled) {
this.redirectHandlingDisabled = redirectHandlingDisabled;
}
public boolean isAutomaticRetriesDisabled() {
return automaticRetriesDisabled;
}
public void setAutomaticRetriesDisabled(boolean automaticRetriesDisabled) {
this.automaticRetriesDisabled = automaticRetriesDisabled;
}
public boolean isContentCompressionDisabled() {
return contentCompressionDisabled;
}
public void setContentCompressionDisabled(boolean contentCompressionDisabled) {
this.contentCompressionDisabled = contentCompressionDisabled;
}
public boolean isCookieManagementDisabled() {
return cookieManagementDisabled;
}
public void setCookieManagementDisabled(boolean cookieManagementDisabled) {
this.cookieManagementDisabled = cookieManagementDisabled;
}
public boolean isAuthCachingDisabled() {
return authCachingDisabled;
}
public void setAuthCachingDisabled(boolean authCachingDisabled) {
this.authCachingDisabled = authCachingDisabled;
}
public boolean isConnectionStateDisabled() {
return connectionStateDisabled;
}
public void setConnectionStateDisabled(boolean connectionStateDisabled) {
this.connectionStateDisabled = connectionStateDisabled;
}
public boolean isDefaultUserAgentDisabled() {
return defaultUserAgentDisabled;
}
public void setDefaultUserAgentDisabled(boolean defaultUserAgentDisabled) {
this.defaultUserAgentDisabled = defaultUserAgentDisabled;
}
public boolean isCopyHeaders() {
return copyHeaders;
}
public void setCopyHeaders(boolean copyHeaders) {
this.copyHeaders = copyHeaders;
}
public boolean isSkipControlHeaders() {
return skipControlHeaders;
}
public void setSkipControlHeaders(boolean skipControlHeaders) {
this.skipControlHeaders = skipControlHeaders;
}
public boolean isSkipRequestHeaders() {
return skipRequestHeaders;
}
public void setSkipRequestHeaders(boolean skipRequestHeaders) {
this.skipRequestHeaders = skipRequestHeaders;
}
public boolean isSkipResponseHeaders() {
return skipResponseHeaders;
}
public void setSkipResponseHeaders(boolean skipResponseHeaders) {
this.skipResponseHeaders = skipResponseHeaders;
}
public boolean isFollowRedirects() {
return followRedirects;
}
public void setFollowRedirects(boolean followRedirects) {
this.followRedirects = followRedirects;
}
public String getUserAgent() {
return userAgent;
}
public void setUserAgent(String userAgent) {
this.userAgent = userAgent;
}
public HttpActivityListener getHttpActivityListener() {
return httpActivityListener;
}
public void setHttpActivityListener(HttpActivityListener httpActivityListener) {
this.httpActivityListener = httpActivityListener;
}
public boolean isLogHttpActivity() {
return logHttpActivity;
}
public void setLogHttpActivity(boolean logHttpActivity) {
this.logHttpActivity = logHttpActivity;
}
public boolean isContentTypeCharsetEnabled() {
return contentTypeCharsetEnabled;
}
public void setContentTypeCharsetEnabled(boolean contentTypeCharsetEnabled) {
this.contentTypeCharsetEnabled = contentTypeCharsetEnabled;
}
@Override
public void doStart() throws Exception {
super.doStart();
if (logHttpActivity && httpActivityListener == null) {
httpActivityListener = new LoggingHttpActivityListener();
}
CamelContextAware.trySetCamelContext(httpActivityListener, getCamelContext());
ServiceHelper.startService(httpActivityListener);
}
@Override
public void doStop() throws Exception {
// shutdown connection manager
if (clientConnectionManager != null) {
LOG.info("Shutting down ClientConnectionManager: {}", clientConnectionManager);
clientConnectionManager.close();
clientConnectionManager = null;
}
ServiceHelper.stopService(httpActivityListener);
super.doStop();
}
}
|
HttpComponent
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/boot/internal/MetadataImpl.java
|
{
"start": 3108,
"end": 19670
}
|
class ____ implements MetadataImplementor, Serializable {
private final UUID uuid;
private final MetadataBuildingOptions metadataBuildingOptions;
private final BootstrapContext bootstrapContext;
private final Map<String,PersistentClass> entityBindingMap;
private final List<Component> composites;
private final Map<Class<?>, Component> genericComponentsMap;
private final Map<Class<?>, DiscriminatorType<?>> embeddableDiscriminatorTypesMap;
private final Map<Class<?>, MappedSuperclass> mappedSuperclassMap;
private final Map<String,Collection> collectionBindingMap;
private final Map<String, TypeDefinition> typeDefinitionMap;
private final Map<String, FilterDefinition> filterDefinitionMap;
private final Map<String, FetchProfile> fetchProfileMap;
private final Map<String, String> imports;
private final Map<String, IdentifierGeneratorDefinition> idGeneratorDefinitionMap;
private final Map<String, NamedHqlQueryDefinition<?>> namedQueryMap;
private final Map<String, NamedNativeQueryDefinition<?>> namedNativeQueryMap;
private final Map<String, NamedProcedureCallDefinition> namedProcedureCallMap;
private final Map<String, NamedResultSetMappingDescriptor> sqlResultSetMappingMap;
private final Map<String, NamedEntityGraphDefinition> namedEntityGraphMap;
private final Map<String, SqmFunctionDescriptor> sqlFunctionMap;
private final Database database;
public MetadataImpl(
UUID uuid,
MetadataBuildingOptions metadataBuildingOptions,
Map<String, PersistentClass> entityBindingMap,
List<Component> composites,
Map<Class<?>, Component> genericComponentsMap,
Map<Class<?>, DiscriminatorType<?>> embeddableDiscriminatorTypesMap,
Map<Class<?>, MappedSuperclass> mappedSuperclassMap,
Map<String, Collection> collectionBindingMap,
Map<String, TypeDefinition> typeDefinitionMap,
Map<String, FilterDefinition> filterDefinitionMap,
Map<String, FetchProfile> fetchProfileMap,
Map<String, String> imports,
Map<String, IdentifierGeneratorDefinition> idGeneratorDefinitionMap,
Map<String, NamedHqlQueryDefinition<?>> namedQueryMap,
Map<String, NamedNativeQueryDefinition<?>> namedNativeQueryMap,
Map<String, NamedProcedureCallDefinition> namedProcedureCallMap,
Map<String, NamedResultSetMappingDescriptor> sqlResultSetMappingMap,
Map<String, NamedEntityGraphDefinition> namedEntityGraphMap,
Map<String, SqmFunctionDescriptor> sqlFunctionMap,
Database database,
BootstrapContext bootstrapContext) {
this.uuid = uuid;
this.metadataBuildingOptions = metadataBuildingOptions;
this.entityBindingMap = entityBindingMap;
this.composites = composites;
this.genericComponentsMap = genericComponentsMap;
this.embeddableDiscriminatorTypesMap = embeddableDiscriminatorTypesMap;
this.mappedSuperclassMap = mappedSuperclassMap;
this.collectionBindingMap = collectionBindingMap;
this.typeDefinitionMap = typeDefinitionMap;
this.filterDefinitionMap = filterDefinitionMap;
this.fetchProfileMap = fetchProfileMap;
this.imports = imports;
this.idGeneratorDefinitionMap = idGeneratorDefinitionMap;
this.namedQueryMap = namedQueryMap;
this.namedNativeQueryMap = namedNativeQueryMap;
this.namedProcedureCallMap = namedProcedureCallMap;
this.sqlResultSetMappingMap = sqlResultSetMappingMap;
this.namedEntityGraphMap = namedEntityGraphMap;
this.sqlFunctionMap = sqlFunctionMap;
this.database = database;
this.bootstrapContext = bootstrapContext;
}
@Override
public MetadataBuildingOptions getMetadataBuildingOptions() {
return metadataBuildingOptions;
}
@Override
public TypeConfiguration getTypeConfiguration() {
return bootstrapContext.getTypeConfiguration();
}
@Override
public SqmFunctionRegistry getFunctionRegistry() {
return bootstrapContext.getFunctionRegistry();
}
@Override
public SessionFactoryBuilder getSessionFactoryBuilder() {
final var defaultBuilder = getFactoryBuilder();
SessionFactoryBuilder builder = null;
List<String> activeFactoryNames = null;
for ( var discoveredBuilderFactory : getSessionFactoryBuilderFactories() ) {
final SessionFactoryBuilder returnedBuilder =
discoveredBuilderFactory.getSessionFactoryBuilder( this, defaultBuilder );
if ( returnedBuilder != null ) {
if ( activeFactoryNames == null ) {
activeFactoryNames = new ArrayList<>();
}
activeFactoryNames.add( discoveredBuilderFactory.getClass().getName() );
builder = returnedBuilder;
}
}
if ( activeFactoryNames != null && activeFactoryNames.size() > 1 ) {
throw new HibernateException(
"Multiple active SessionFactoryBuilderFactory definitions were discovered: " +
join( ", ", activeFactoryNames )
);
}
return builder == null ? defaultBuilder : builder;
}
private Iterable<SessionFactoryBuilderFactory> getSessionFactoryBuilderFactories() {
return getClassLoaderService().loadJavaServices( SessionFactoryBuilderFactory.class );
}
private SessionFactoryBuilderImplementor getFactoryBuilder() {
return metadataBuildingOptions.getServiceRegistry()
.requireService( SessionFactoryBuilderService.class )
.createSessionFactoryBuilder( this, bootstrapContext );
}
private ClassLoaderService getClassLoaderService() {
return metadataBuildingOptions.getServiceRegistry().requireService( ClassLoaderService.class );
}
@Override
public SessionFactoryImplementor buildSessionFactory() {
return (SessionFactoryImplementor) getSessionFactoryBuilder().build();
}
@Override
public UUID getUUID() {
return uuid;
}
@Override
public Database getDatabase() {
return database;
}
@Override
public java.util.Collection<PersistentClass> getEntityBindings() {
return entityBindingMap.values();
}
@Override
public PersistentClass getEntityBinding(String entityName) {
return entityBindingMap.get( entityName );
}
@Override
public java.util.Collection<Collection> getCollectionBindings() {
return collectionBindingMap.values();
}
@Override
public Collection getCollectionBinding(String role) {
return collectionBindingMap.get( role );
}
@Override
public Map<String, String> getImports() {
return imports;
}
@Override
public NamedHqlQueryDefinition<?> getNamedHqlQueryMapping(String name) {
return namedQueryMap.get( name );
}
@Override
public void visitNamedHqlQueryDefinitions(Consumer<NamedHqlQueryDefinition<?>> definitionConsumer) {
namedQueryMap.values().forEach( definitionConsumer );
}
@Override
public NamedNativeQueryDefinition<?> getNamedNativeQueryMapping(String name) {
return namedNativeQueryMap.get( name );
}
@Override
public void visitNamedNativeQueryDefinitions(Consumer<NamedNativeQueryDefinition<?>> definitionConsumer) {
namedNativeQueryMap.values().forEach( definitionConsumer );
}
@Override
public NamedProcedureCallDefinition getNamedProcedureCallMapping(String name) {
return namedProcedureCallMap.get( name );
}
@Override
public void visitNamedProcedureCallDefinition(Consumer<NamedProcedureCallDefinition> definitionConsumer) {
namedProcedureCallMap.values().forEach( definitionConsumer );
}
@Override
public NamedResultSetMappingDescriptor getResultSetMapping(String name) {
return sqlResultSetMappingMap.get( name );
}
@Override
public void visitNamedResultSetMappingDefinition(Consumer<NamedResultSetMappingDescriptor> definitionConsumer) {
sqlResultSetMappingMap.values().forEach( definitionConsumer );
}
@Override
public TypeDefinition getTypeDefinition(String typeName) {
return typeDefinitionMap.get( typeName );
}
@Override
public Map<String, FilterDefinition> getFilterDefinitions() {
return filterDefinitionMap;
}
@Override
public FilterDefinition getFilterDefinition(String name) {
return filterDefinitionMap.get( name );
}
@Override
public FetchProfile getFetchProfile(String name) {
return fetchProfileMap.get( name );
}
@Override
public java.util.Collection<FetchProfile> getFetchProfiles() {
return fetchProfileMap.values();
}
@Override
public NamedEntityGraphDefinition getNamedEntityGraph(String name) {
return namedEntityGraphMap.get( name );
}
@Override
public Map<String, NamedEntityGraphDefinition> getNamedEntityGraphs() {
return namedEntityGraphMap;
}
@Override
public IdentifierGeneratorDefinition getIdentifierGenerator(String name) {
return idGeneratorDefinitionMap.get( name );
}
@Override
public Map<String, SqmFunctionDescriptor> getSqlFunctionMap() {
return sqlFunctionMap;
}
@Override
public Set<String> getContributors() {
final HashSet<String> contributors = new HashSet<>();
entityBindingMap.forEach( (s, persistentClass)
-> contributors.add( persistentClass.getContributor() ) );
for ( var namespace : database.getNamespaces() ) {
for ( var table : namespace.getTables() ) {
contributors.add( table.getContributor() );
}
for ( var sequence : namespace.getSequences() ) {
contributors.add( sequence.getContributor() );
}
}
return contributors;
}
@Override
public java.util.Collection<Table> collectTableMappings() {
final ArrayList<Table> tables = new ArrayList<>();
for ( var namespace : database.getNamespaces() ) {
tables.addAll( namespace.getTables() );
}
return tables;
}
@Override
public NamedObjectRepository buildNamedQueryRepository() {
return new NamedObjectRepositoryImpl(
mapOfSize( namedQueryMap.size() ),
mapOfSize( namedNativeQueryMap.size() ),
mapOfSize( namedProcedureCallMap.size() ),
mapOfSize( sqlResultSetMappingMap.size() )
);
}
@Override
public void orderColumns(boolean forceOrdering) {
final var columnOrderingStrategy = metadataBuildingOptions.getColumnOrderingStrategy();
// No need to order columns when using the no-op strategy
if ( columnOrderingStrategy != ColumnOrderingStrategyLegacy.INSTANCE ) {
final boolean shouldOrderTableColumns = forceOrdering || shouldOrderTableColumns();
for ( var namespace : database.getNamespaces() ) {
if ( shouldOrderTableColumns ) {
for ( var table : namespace.getTables() ) {
handleTable( table, columnOrderingStrategy );
handlePrimaryKey( table, columnOrderingStrategy );
handleForeignKeys( table, columnOrderingStrategy );
}
}
for ( var userDefinedType : namespace.getUserDefinedTypes() ) {
handleUDT( userDefinedType, columnOrderingStrategy );
}
}
}
}
private void handleTable(Table table, ColumnOrderingStrategy columnOrderingStrategy) {
final var tableColumns = columnOrderingStrategy.orderTableColumns( table, this );
if ( tableColumns != null ) {
table.reorderColumns( tableColumns );
}
}
private void handleUDT(UserDefinedType userDefinedType, ColumnOrderingStrategy columnOrderingStrategy) {
if ( userDefinedType instanceof UserDefinedObjectType objectType
&& objectType.getColumns().size() > 1 ) {
final var objectTypeColumns =
columnOrderingStrategy.orderUserDefinedTypeColumns( objectType, this );
if ( objectTypeColumns != null ) {
objectType.reorderColumns( objectTypeColumns );
}
}
}
private void handleForeignKeys(Table table, ColumnOrderingStrategy columnOrderingStrategy) {
for ( var foreignKey : table.getForeignKeyCollection() ) {
final var columns = foreignKey.getColumns();
if ( columns.size() > 1 ) {
if ( foreignKey.getReferencedColumns().isEmpty() ) {
final var targetPrimaryKey =
foreignKey.getReferencedTable().getPrimaryKey();
// Make sure we order the columns of the primary key first,
// so that foreign key ordering can rely on this
if ( targetPrimaryKey.getOriginalOrder() == null ) {
final var primaryKeyColumns =
columnOrderingStrategy.orderConstraintColumns( targetPrimaryKey, this );
if ( primaryKeyColumns != null ) {
targetPrimaryKey.reorderColumns( primaryKeyColumns );
}
}
// Patch up the order of foreign keys based on new order of the target primary key
final int[] originalPrimaryKeyOrder = targetPrimaryKey.getOriginalOrder();
if ( originalPrimaryKeyOrder != null ) {
final var foreignKeyColumnsCopy = new ArrayList<>( columns );
for ( int i = 0; i < foreignKeyColumnsCopy.size(); i++ ) {
columns.set( i, foreignKeyColumnsCopy.get( originalPrimaryKeyOrder[i] ) );
}
}
}
}
}
}
private void handlePrimaryKey(Table table, ColumnOrderingStrategy columnOrderingStrategy) {
final var primaryKey = table.getPrimaryKey();
if ( primaryKey != null
&& primaryKey.getColumns().size() > 1
&& primaryKey.getOriginalOrder() == null ) {
final var primaryKeyColumns =
columnOrderingStrategy.orderConstraintColumns( primaryKey, this );
if ( primaryKeyColumns != null ) {
primaryKey.reorderColumns( primaryKeyColumns );
}
}
}
private boolean shouldOrderTableColumns() {
final var settings =
metadataBuildingOptions.getServiceRegistry()
.requireService( ConfigurationService.class )
.getSettings();
for ( var grouping : ActionGrouping.interpret( this, settings ) ) {
if ( isColumnOrderingRelevant( grouping.scriptAction() )
|| isColumnOrderingRelevant( grouping.databaseAction() ) ) {
return true;
}
}
return false;
}
private static boolean isColumnOrderingRelevant(Action grouping) {
return switch ( grouping ) {
case CREATE, CREATE_DROP, CREATE_ONLY -> true;
default -> false;
};
}
@Override
public void validate() throws MappingException {
for ( var entityBinding : this.getEntityBindings() ) {
entityBinding.validate( this );
}
for ( var collectionBinding : this.getCollectionBindings() ) {
collectionBinding.validate( this );
}
}
@Override
public Set<MappedSuperclass> getMappedSuperclassMappingsCopy() {
return mappedSuperclassMap == null
? emptySet()
: new HashSet<>( mappedSuperclassMap.values() );
}
@Override
public void initSessionFactory(SessionFactoryImplementor sessionFactory) {
// must not use BootstrapContext services here
final var registry = sessionFactory.getServiceRegistry();
assert registry != null;
final var configurationService = registry.requireService( ConfigurationService.class );
final var classLoaderService = registry.requireService( ClassLoaderService.class );
final var eventListenerRegistry = sessionFactory.getEventListenerRegistry();
configurationService.getSettings().forEach( (propertyName, value) -> {
if ( propertyName.startsWith( EVENT_LISTENER_PREFIX ) ) {
final String eventTypeName = propertyName.substring( EVENT_LISTENER_PREFIX.length() + 1 );
final var eventType = EventType.resolveEventTypeByName( eventTypeName );
final String listeners = (String) value;
appendListeners( eventListenerRegistry, classLoaderService, listeners, eventType );
}
} );
}
private <T> void appendListeners(
EventListenerRegistry eventListenerRegistry,
ClassLoaderService classLoaderService,
String listeners,
EventType<T> eventType) {
final var eventListenerGroup = eventListenerRegistry.getEventListenerGroup( eventType );
for ( String listenerImpl : splitAtCommas( listeners ) ) {
@SuppressWarnings("unchecked")
T listener = (T) instantiate( listenerImpl, classLoaderService );
if ( !eventType.baseListenerInterface().isInstance( listener ) ) {
throw new HibernateException( "Event listener '" + listenerImpl
+ "' must implement '" + eventType.baseListenerInterface().getName() + "'");
}
eventListenerGroup.appendListener( listener );
}
}
private static Object instantiate(String listenerImpl, ClassLoaderService classLoaderService) {
try {
return classLoaderService.classForName( listenerImpl ).newInstance();
}
catch (Exception e) {
throw new HibernateException( "Could not instantiate event listener '" + listenerImpl + "'", e );
}
}
@Override
public void visitRegisteredComponents(Consumer<Component> consumer) {
composites.forEach( consumer );
}
@Override
public Component getGenericComponent(Class<?> componentClass) {
return genericComponentsMap.get( componentClass );
}
@Override
public DiscriminatorType<?> resolveEmbeddableDiscriminatorType(
Class<?> embeddableClass,
Supplier<DiscriminatorType<?>> supplier) {
return embeddableDiscriminatorTypesMap.computeIfAbsent( embeddableClass, k -> supplier.get() );
}
@Override
public org.hibernate.type.Type getIdentifierType(String entityName) throws MappingException {
final var persistentClass = entityBindingMap.get( entityName );
if ( persistentClass == null ) {
throw new MappingException( "Persistent
|
MetadataImpl
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/refaster/testdata/output/SuppressWarningsTemplateExample.java
|
{
"start": 923,
"end": 1187
}
|
class ____ {
public int abs2(int x) {
return x < 0 ? -x : x;
}
}
public int abs3(int x) {
@SuppressWarnings("SuppressWarningsTemplate")
int r = x < 0 ? -x : x;
return r;
}
public int abs4(int x) {
return Math.abs(x);
}
}
|
Inner
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/struct/TestPOJOAsArray.java
|
{
"start": 2476,
"end": 2578
}
|
class ____ {
public B value = new B();
}
@JsonPropertyOrder(alphabetic=true)
static
|
A
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/tool/schema/extract/internal/InformationExtractorPostgreSQLImpl.java
|
{
"start": 451,
"end": 2921
}
|
class ____ extends InformationExtractorJdbcDatabaseMetaDataImpl {
public InformationExtractorPostgreSQLImpl(ExtractionContext extractionContext) {
super( extractionContext );
}
@Override
public boolean supportsBulkPrimaryKeyRetrieval() {
return true;
}
@Override
public boolean supportsBulkForeignKeyRetrieval() {
return true;
}
@Override
public NameSpaceIndexesInformation getIndexes(Identifier catalog, Identifier schema) {
final String tableSchema = schema == null ? null : schema.getText();
try ( var preparedStatement = getExtractionContext().getJdbcConnection().prepareStatement( getIndexesSql( tableSchema ) )) {
if ( tableSchema != null ) {
preparedStatement.setString( 1, tableSchema );
}
try ( var resultSet = preparedStatement.executeQuery() ) {
return extractNameSpaceIndexesInformation( resultSet );
}
}
catch (SQLException e) {
throw convertSQLException( e,
"Error while reading index information for namespace "
+ new Namespace.Name( catalog, schema ) );
}
}
private String getIndexesSql(String tableSchema) {
final String sql = """
select\
current_database() as "TABLE_CAT",\
n.nspname as "TABLE_SCHEM",\
ct.relname as "TABLE_NAME",\
not i.indisunique as "NON_UNIQUE",\
null as "INDEX_QUALIFIER",\
ci.relname as "INDEX_NAME",\
case i.indisclustered\
when true then 1\
else\
case am.amname\
when 'hash' then 2\
else 3\
end\
end as "TYPE",\
ic.n as "ORDINAL_POSITION",\
ci.reltuples as "CARDINALITY",\
ci.relpages as "PAGES",\
pg_catalog.pg_get_expr(i.indpred, i.indrelid) as "FILTER_CONDITION",\
trim(both '"' from pg_catalog.pg_get_indexdef(ci.oid, ic.n, false)) as "COLUMN_NAME",\
case am.amname\
when 'btree' then\
case i.indoption[ic.n - 1] & 1::smallint\
when 1 then 'D'\
else 'A'\
end\
end as "ASC_OR_DESC"
from pg_catalog.pg_class ct
join pg_catalog.pg_namespace n on (ct.relnamespace = n.oid)
join pg_catalog.pg_index i on (ct.oid = i.indrelid)
join pg_catalog.pg_class ci on (ci.oid = i.indexrelid)
join pg_catalog.pg_am am on (ci.relam = am.oid)
join information_schema._pg_expandarray(i.indkey) ic on 1=1
""";
return sql + (tableSchema == null ? "" : " where n.nspname = ?");
}
@Override
public boolean supportsBulkIndexRetrieval() {
return true;
}
}
|
InformationExtractorPostgreSQLImpl
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/deser/builder/BuilderErrorHandlingTest.java
|
{
"start": 1677,
"end": 6290
}
|
class ____
{
private String first;
private String second;
@JsonSetter("a")
Builder first(String value) {
this.first = value;
return this;
}
@JsonSetter("b")
Builder second(String value) {
this.second = value;
return this;
}
ValidatingValue build() {
if (first == null) {
throw new ValidationException("Missing first");
}
if (second == null) {
throw new ValidationException("Missing second");
}
return new ValidatingValue(first, second);
}
}
}
/*
/**********************************************************
/* Unit tests
/**********************************************************
*/
private final ObjectMapper MAPPER = newJsonMapper();
private final ObjectMapper MAPPER_WITH_WRAPPING = jsonMapperBuilder()
.enable(DeserializationFeature.WRAP_EXCEPTIONS)
.build();
private final ObjectMapper MAPPER_NO_WRAPPING = jsonMapperBuilder()
.disable(DeserializationFeature.WRAP_EXCEPTIONS)
.build();
@Test
public void testUnknownProperty() throws Exception
{
// first, default failure
String json = a2q("{'x':1,'z':2,'y':4}");
try {
MAPPER.readerFor(ValueClassXY.class)
.with(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES)
.readValue(json);
fail("Should not pass");
} catch (MismatchedInputException e) {
verifyException(e, "Unrecognized property ");
}
// but pass if ok to ignore
ValueClassXY result = MAPPER.readerFor(ValueClassXY.class)
.without(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES)
.readValue(json);
assertEquals(2, result._x);
assertEquals(5, result._y);
}
@Test
public void testWrongShape() throws Exception
{
try {
MAPPER.readValue("123", ValueClassXY.class);
fail("Should not pass");
} catch (MismatchedInputException e) {
verifyException(e, "Cannot construct instance of ");
// should report Builder class, not value here, right?
verifyException(e, "$SimpleBuilderXY");
}
}
// [databind#2938]
@Test
public void testSuccessfulValidatingBuilder() throws Exception
{
ValidatingValue result = MAPPER.readValue(a2q("{'a':'1','b':'2'}"), ValidatingValue.class);
assertEquals("1", result.first);
assertEquals("2", result.second);
}
@Test
public void testFailingValidatingBuilderWithExceptionWrapping() throws Exception
{
try {
MAPPER_WITH_WRAPPING.readValue(a2q("{'a':'1'}"), ValidatingValue.class);
fail("Expected an exception");
} catch (ValueInstantiationException e) {
verifyException(e, "Missing second");
assertTrue(e.getCause() instanceof ValidatingValue.ValidationException);
}
}
@Test
public void testFailingValidatingBuilderWithExceptionWrappingFromTree() throws Exception
{
try {
JsonNode tree = MAPPER_WITH_WRAPPING.readTree(a2q("{'a':'1'}"));
MAPPER_WITH_WRAPPING.treeToValue(tree, ValidatingValue.class);
fail("Expected an exception");
} catch (ValueInstantiationException e) {
verifyException(e, "Missing second");
assertTrue(e.getCause() instanceof ValidatingValue.ValidationException);
}
}
@Test
public void testFailingValidatingBuilderWithoutExceptionWrapping() throws Exception
{
try {
MAPPER_NO_WRAPPING
.readValue(a2q("{'a':'1'}"), ValidatingValue.class);
fail("Expected an exception");
} catch (ValidatingValue.ValidationException e) {
assertEquals("Missing second", e.getMessage());
}
}
@Test
public void testFailingValidatingBuilderWithoutExceptionWrappingFromTree() throws Exception
{
try {
JsonNode tree = MAPPER_NO_WRAPPING.readTree(a2q("{'a':'1'}"));
MAPPER_NO_WRAPPING.treeToValue(tree, ValidatingValue.class);
fail("Expected an exception");
} catch (ValidatingValue.ValidationException e) {
assertEquals("Missing second", e.getMessage());
}
}
}
|
Builder
|
java
|
spring-projects__spring-boot
|
loader/spring-boot-jarmode-tools/src/main/java/org/springframework/boot/jarmode/tools/JarStructure.java
|
{
"start": 969,
"end": 2252
}
|
interface ____ {
/**
* Resolve the specified {@link ZipEntry}, return {@code null} if the entry should not
* be handled.
* @param entry the entry to handle
* @return the resolved {@link Entry}
*/
default @Nullable Entry resolve(ZipEntry entry) {
return resolve(entry.getName());
}
/**
* Resolve the entry with the specified name, return {@code null} if the entry should
* not be handled.
* @param name the name of the entry to handle
* @return the resolved {@link Entry}
*/
@Nullable Entry resolve(String name);
/**
* Create the {@link Manifest} for the launcher jar, applying the specified operator
* on each classpath entry.
* @param libraryTransformer the operator to apply on each classpath entry
* @return the manifest to use for the launcher jar
*/
Manifest createLauncherManifest(UnaryOperator<String> libraryTransformer);
/**
* Return the location of the application classes.
* @return the location of the application classes
*/
String getClassesLocation();
/**
* An entry to handle in the exploded structure.
*
* @param originalLocation the original location
* @param location the relative location
* @param type of the entry
*/
record Entry(String originalLocation, String location, Type type) {
|
JarStructure
|
java
|
spring-projects__spring-data-jpa
|
spring-data-jpa/src/test/java/org/springframework/data/jpa/repository/JavaConfigUserRepositoryTests.java
|
{
"start": 2714,
"end": 2854
}
|
class ____ extends UserRepositoryTests {
@Configuration
@ImportResource("classpath:infrastructure.xml")
static
|
JavaConfigUserRepositoryTests
|
java
|
elastic__elasticsearch
|
server/src/internalClusterTest/java/org/elasticsearch/discovery/ClusterDisruptionIT.java
|
{
"start": 4562,
"end": 29733
}
|
enum ____ {
none,
external,
create;
static ConflictMode randomMode() {
ConflictMode[] values = values();
return values[randomInt(values.length - 1)];
}
}
/**
* Test that we do not lose documents, indexed via requests that return success, under randomly selected disruption schemes.
* We also collect & report the type of indexing failures that occur.
* <p>
* This test is a superset of tests run in the Jepsen test suite, with the exception of versioned updates.
*/
@TestLogging(
value = "_root:DEBUG,org.elasticsearch.action.bulk:TRACE,org.elasticsearch.action.get:TRACE,"
+ "org.elasticsearch.discovery:TRACE,org.elasticsearch.action.support.replication:TRACE,"
+ "org.elasticsearch.cluster.service:TRACE,org.elasticsearch.indices.recovery:TRACE,"
+ "org.elasticsearch.indices.cluster:TRACE,org.elasticsearch.index.shard:TRACE",
reason = "Past failures have required a lot of additional logging to debug"
)
public void testAckedIndexing() throws Exception {
final int seconds = (TEST_NIGHTLY && rarely()) == false ? 1 : 5;
final TimeValue timeout = TimeValue.timeValueSeconds(seconds);
final List<String> nodes = startCluster(rarely() ? 5 : 3);
assertAcked(
prepareCreate("test").setSettings(
Settings.builder()
.put(indexSettings())
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1 + randomInt(2))
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, randomInt(2))
)
);
ensureGreen();
ServiceDisruptionScheme disruptionScheme = addRandomDisruptionScheme();
logger.info("disruption scheme [{}] added", disruptionScheme);
final ConcurrentHashMap<String, String> ackedDocs = new ConcurrentHashMap<>(); // id -> node sent.
final AtomicBoolean stop = new AtomicBoolean(false);
List<Thread> indexers = new ArrayList<>(nodes.size());
List<Semaphore> semaphores = new ArrayList<>(nodes.size());
final AtomicInteger idGenerator = new AtomicInteger(0);
final AtomicReference<CountDownLatch> countDownLatchRef = new AtomicReference<>();
final List<Exception> exceptedExceptions = new CopyOnWriteArrayList<>();
final ConflictMode conflictMode = ConflictMode.randomMode();
final List<String> fieldNames = IntStream.rangeClosed(0, randomInt(10)).mapToObj(n -> "f" + n).toList();
logger.info("starting indexers using conflict mode " + conflictMode);
try {
for (final String node : nodes) {
final Semaphore semaphore = new Semaphore(0);
semaphores.add(semaphore);
final Client client = client(node);
final String name = "indexer_" + indexers.size();
final int numPrimaries = getNumShards("test").numPrimaries;
Thread thread = new Thread(() -> {
while (stop.get() == false) {
String id = null;
try {
if (semaphore.tryAcquire(10, TimeUnit.SECONDS) == false) {
continue;
}
logger.info("[{}] Acquired semaphore and it has {} permits left", name, semaphore.availablePermits());
try {
id = Integer.toString(idGenerator.incrementAndGet());
int shard = Math.floorMod(Murmur3HashFunction.hash(id), numPrimaries);
logger.trace("[{}] indexing id [{}] through node [{}] targeting shard [{}]", name, id, node, shard);
IndexRequestBuilder indexRequestBuilder = client.prepareIndex("test")
.setId(id)
.setSource(Map.of(randomFrom(fieldNames), randomNonNegativeLong()), XContentType.JSON)
.setTimeout(timeout);
if (conflictMode == ConflictMode.external) {
indexRequestBuilder.setVersion(randomIntBetween(1, 10)).setVersionType(VersionType.EXTERNAL);
} else if (conflictMode == ConflictMode.create) {
indexRequestBuilder.setCreate(true);
}
DocWriteResponse response = indexRequestBuilder.get(timeout);
assertThat(response.getResult(), is(oneOf(CREATED, UPDATED)));
ackedDocs.put(id, node);
logger.trace("[{}] indexed id [{}] through node [{}], response [{}]", name, id, node, response);
} catch (ElasticsearchException e) {
exceptedExceptions.add(e);
final String docId = id;
logger.trace(() -> format("[%s] failed id [%s] through node [%s]", name, docId, node), e);
} finally {
countDownLatchRef.get().countDown();
logger.trace("[{}] decreased counter : {}", name, countDownLatchRef.get().getCount());
}
} catch (InterruptedException e) {
// fine - semaphore interrupt
} catch (AssertionError | Exception e) {
logger.info(() -> "unexpected exception in background thread of [" + node + "]", e);
}
}
});
thread.setName(name);
thread.start();
indexers.add(thread);
}
int docsPerIndexer = randomInt(3);
logger.info("indexing {} docs per indexer before partition", docsPerIndexer);
countDownLatchRef.set(new CountDownLatch(docsPerIndexer * indexers.size()));
for (Semaphore semaphore : semaphores) {
semaphore.release(docsPerIndexer);
}
assertTrue(countDownLatchRef.get().await(1, TimeUnit.MINUTES));
for (int iter = 1 + randomInt(2); iter > 0; iter--) {
logger.info("starting disruptions & indexing (iteration [{}])", iter);
disruptionScheme.startDisrupting();
docsPerIndexer = 1 + randomInt(5);
logger.info("indexing {} docs per indexer during partition", docsPerIndexer);
countDownLatchRef.set(new CountDownLatch(docsPerIndexer * indexers.size()));
Collections.shuffle(semaphores, random());
for (Semaphore semaphore : semaphores) {
assertThat(semaphore.availablePermits(), equalTo(0));
semaphore.release(docsPerIndexer);
}
logger.info("waiting for indexing requests to complete");
assertTrue(countDownLatchRef.get().await(docsPerIndexer * seconds * 1000 + 2000, TimeUnit.MILLISECONDS));
logger.info("stopping disruption");
disruptionScheme.stopDisrupting();
for (String node : internalCluster().getNodeNames()) {
ensureStableCluster(
nodes.size(),
TimeValue.timeValueMillis(disruptionScheme.expectedTimeToHeal().millis() + DISRUPTION_HEALING_OVERHEAD.millis()),
true,
node
);
}
// in case of a bridge partition, shard allocation can fail "index.allocation.max_retries" times if the master
// is the super-connected node and recovery source and target are on opposite sides of the bridge
if (disruptionScheme instanceof NetworkDisruption networkDisruption
&& networkDisruption.getDisruptedLinks() instanceof Bridge) {
assertBusy(() -> ClusterRerouteUtils.rerouteRetryFailed(client()));
}
ensureGreen("test");
logger.info("validating successful docs");
assertBusy(() -> {
for (String node : nodes) {
try {
logger.debug("validating through node [{}] ([{}] acked docs)", node, ackedDocs.size());
for (String id : ackedDocs.keySet()) {
assertTrue(
"doc [" + id + "] indexed via node [" + ackedDocs.get(id) + "] not found",
client(node).prepareGet("test", id).setPreference("_local").get().isExists()
);
}
} catch (AssertionError | NoShardAvailableActionException e) {
throw new AssertionError(e.getMessage() + " (checked via node [" + node + "]", e);
}
}
}, 30, TimeUnit.SECONDS);
logger.info("done validating (iteration [{}])", iter);
}
} finally {
logger.info("shutting down indexers");
stop.set(true);
for (Thread indexer : indexers) {
indexer.interrupt();
indexer.join(60000);
}
if (exceptedExceptions.size() > 0) {
StringBuilder sb = new StringBuilder();
for (Exception e : exceptedExceptions) {
sb.append("\n").append(e.getMessage());
}
logger.debug("Indexing exceptions during disruption: {}", sb);
}
}
}
/**
* Test that a document which is indexed on the majority side of a partition, is available from the minority side,
* once the partition is healed
*/
public void testRejoinDocumentExistsInAllShardCopies() throws Exception {
List<String> nodes = startCluster(3);
assertAcked(prepareCreate("test").setSettings(indexSettings(1, 2)).get());
ensureGreen("test");
nodes = new ArrayList<>(nodes);
Collections.shuffle(nodes, random());
String isolatedNode = nodes.get(0);
String notIsolatedNode = nodes.get(1);
TwoPartitions partitions = isolateNode(isolatedNode);
NetworkDisruption scheme = addRandomDisruptionType(partitions);
scheme.startDisrupting();
ensureStableCluster(2, notIsolatedNode);
assertFalse(
client(notIsolatedNode).admin()
.cluster()
.prepareHealth(TEST_REQUEST_TIMEOUT, "test")
.setWaitForYellowStatus()
.get()
.isTimedOut()
);
DocWriteResponse indexResponse = internalCluster().client(notIsolatedNode).prepareIndex("test").setSource("field", "value").get();
assertThat(indexResponse.getVersion(), equalTo(1L));
logger.info("Verifying if document exists via node[{}]", notIsolatedNode);
GetResponse getResponse = internalCluster().client(notIsolatedNode)
.prepareGet("test", indexResponse.getId())
.setPreference("_local")
.get();
assertThat(getResponse.isExists(), is(true));
assertThat(getResponse.getVersion(), equalTo(1L));
assertThat(getResponse.getId(), equalTo(indexResponse.getId()));
scheme.stopDisrupting();
ensureStableCluster(3);
ensureGreen("test");
for (String node : nodes) {
logger.info("Verifying if document exists after isolating node[{}] via node[{}]", isolatedNode, node);
getResponse = internalCluster().client(node).prepareGet("test", indexResponse.getId()).setPreference("_local").get();
assertThat(getResponse.isExists(), is(true));
assertThat(getResponse.getVersion(), equalTo(1L));
assertThat(getResponse.getId(), equalTo(indexResponse.getId()));
}
}
// simulate handling of sending shard failure during an isolation
public void testSendingShardFailure() throws Exception {
List<String> nodes = startCluster(3);
String masterNode = internalCluster().getMasterName();
List<String> nonMasterNodes = nodes.stream().filter(node -> node.equals(masterNode) == false).toList();
String nonMasterNode = randomFrom(nonMasterNodes);
assertAcked(prepareCreate("test").setSettings(indexSettings(3, 2)));
ensureGreen();
String nonMasterNodeId = getNodeId(nonMasterNode);
// fail a random shard
ShardRouting failedShard = randomFrom(
clusterService().state().getRoutingNodes().node(nonMasterNodeId).shardsWithState(ShardRoutingState.STARTED).toList()
);
ShardStateAction service = internalCluster().getInstance(ShardStateAction.class, nonMasterNode);
CountDownLatch latch = new CountDownLatch(1);
AtomicBoolean success = new AtomicBoolean();
String isolatedNode = randomBoolean() ? masterNode : nonMasterNode;
TwoPartitions partitions = isolateNode(isolatedNode);
// we cannot use the NetworkUnresponsive disruption type here as it will swallow the "shard failed" request, calling neither
// onSuccess nor onFailure on the provided listener.
NetworkDisruption networkDisruption = new NetworkDisruption(partitions, NetworkDisruption.DISCONNECT);
setDisruptionScheme(networkDisruption);
networkDisruption.startDisrupting();
service.localShardFailed(
failedShard,
"simulated",
new CorruptIndexException("simulated", (String) null),
new ActionListener<Void>() {
@Override
public void onResponse(final Void aVoid) {
success.set(true);
latch.countDown();
}
@Override
public void onFailure(Exception e) {
success.set(false);
latch.countDown();
assert false;
}
}
);
if (isolatedNode.equals(nonMasterNode)) {
assertNoMaster(nonMasterNode);
} else {
ensureStableCluster(2, nonMasterNode);
}
// heal the partition
networkDisruption.removeAndEnsureHealthy(internalCluster());
// the cluster should stabilize
ensureStableCluster(3);
latch.await();
// the listener should be notified
assertTrue(success.get());
// the failed shard should be gone
List<ShardRouting> shards = clusterService().state().getRoutingTable().allShards("test");
for (ShardRouting shard : shards) {
assertThat(shard.allocationId(), not(equalTo(failedShard.allocationId())));
}
}
public void testCannotJoinIfMasterLostDataFolder() throws Exception {
String masterNode = internalCluster().startMasterOnlyNode();
String dataNode = internalCluster().startDataOnlyNode();
internalCluster().restartNode(masterNode, new InternalTestCluster.RestartCallback() {
@Override
public boolean clearData(String nodeName) {
return true;
}
@Override
public Settings onNodeStopped(String nodeName) {
return Settings.builder()
.put(ClusterBootstrapService.INITIAL_MASTER_NODES_SETTING.getKey(), nodeName)
/*
* the data node might join while the master is still not fully established as master just yet and bypasses the join
* validation that is done before adding the node to the cluster. Only the join validation when handling the publish
* request takes place, but at this point the cluster state has been successfully committed, and will subsequently be
* exposed to the applier. The health check below therefore sees the cluster state with the 2 nodes and thinks all is
* good, even though the data node never accepted this state. What's worse is that it takes 90 seconds for the data
* node to be kicked out of the cluster (lag detection). We speed this up here.
*/
.put(LagDetector.CLUSTER_FOLLOWER_LAG_TIMEOUT_SETTING.getKey(), "10s")
.build();
}
@Override
public boolean validateClusterForming() {
return false;
}
});
assertBusy(() -> {
assertFalse(internalCluster().client(masterNode).admin().cluster().prepareHealth(TEST_REQUEST_TIMEOUT).get().isTimedOut());
assertTrue(
internalCluster().client(masterNode)
.admin()
.cluster()
.prepareHealth(TEST_REQUEST_TIMEOUT)
.setWaitForNodes("2")
.setTimeout(TimeValue.timeValueSeconds(2))
.get()
.isTimedOut()
);
}, 30, TimeUnit.SECONDS);
internalCluster().stopNode(dataNode); // otherwise we will fail during clean-up
}
/**
* Tests that indices are properly deleted even if there is a master transition in between.
* Test for https://github.com/elastic/elasticsearch/issues/11665
*/
public void testIndicesDeleted() throws Exception {
final String idxName = "test";
final List<String> allMasterEligibleNodes = internalCluster().startMasterOnlyNodes(2);
final String dataNode = internalCluster().startDataOnlyNode();
ensureStableCluster(3);
assertAcked(prepareCreate("test"));
final String masterNode1 = internalCluster().getMasterName();
NetworkDisruption networkDisruption = new NetworkDisruption(
new TwoPartitions(masterNode1, dataNode),
NetworkDisruption.UNRESPONSIVE
);
internalCluster().setDisruptionScheme(networkDisruption);
networkDisruption.startDisrupting();
// We know this will time out due to the partition, we check manually below to not proceed until
// the delete has been applied to the master node and the master eligible node.
internalCluster().client(masterNode1).admin().indices().prepareDelete(idxName).setTimeout(TimeValue.ZERO).get();
// Don't restart the master node until we know the index deletion has taken effect on master and the master eligible node.
assertBusy(() -> {
for (String masterNode : allMasterEligibleNodes) {
final ClusterState masterState = internalCluster().clusterService(masterNode).state();
assertTrue("index not deleted on " + masterNode, masterState.metadata().getProject().hasIndex(idxName) == false);
}
});
internalCluster().restartNode(masterNode1, InternalTestCluster.EMPTY_CALLBACK);
ensureYellow();
assertFalse(indexExists(idxName));
}
public void testRestartNodeWhileIndexing() throws Exception {
startCluster(3);
String index = "restart_while_indexing";
createIndex(index, 1, between(1, 2));
AtomicBoolean stopped = new AtomicBoolean();
Thread[] threads = new Thread[between(1, 4)];
AtomicInteger docID = new AtomicInteger();
Set<String> ackedDocs = ConcurrentCollections.newConcurrentSet();
for (int i = 0; i < threads.length; i++) {
threads[i] = new Thread(() -> {
while (stopped.get() == false && docID.get() < 5000) {
String id = Integer.toString(docID.incrementAndGet());
try {
DocWriteResponse response = prepareIndex(index).setId(id)
.setSource(Map.of("f" + randomIntBetween(1, 10), randomNonNegativeLong()), XContentType.JSON)
.get();
assertThat(response.getResult(), is(oneOf(CREATED, UPDATED)));
logger.info("--> index id={} seq_no={}", response.getId(), response.getSeqNo());
ackedDocs.add(response.getId());
} catch (ElasticsearchException ignore) {
logger.info("--> fail to index id={}", id);
}
}
});
threads[i].start();
}
ensureGreen(index);
assertBusy(() -> assertThat(docID.get(), greaterThanOrEqualTo(100)), 1L, TimeUnit.MINUTES);
internalCluster().restartRandomDataNode(new InternalTestCluster.RestartCallback());
ensureGreen(index);
assertBusy(() -> assertThat(docID.get(), greaterThanOrEqualTo(200)), 1L, TimeUnit.MINUTES);
stopped.set(true);
for (Thread thread : threads) {
thread.join();
}
ClusterState clusterState = internalCluster().clusterService().state();
for (ShardRouting shardRouting : clusterState.routingTable().allShards(index)) {
String nodeName = clusterState.nodes().get(shardRouting.currentNodeId()).getName();
IndicesService indicesService = internalCluster().getInstance(IndicesService.class, nodeName);
IndexShard shard = indicesService.getShardOrNull(shardRouting.shardId());
Set<String> docs = IndexShardTestCase.getShardDocUIDs(shard);
assertThat(
"shard [" + shard.routingEntry() + "] docIds [" + docs + "] vs acked docIds [" + ackedDocs + "]",
ackedDocs,
everyItem(is(in(docs)))
);
}
}
public void testRejoinWhileBeingRemoved() {
final String masterNode = internalCluster().startMasterOnlyNode(
Settings.builder()
.put(FOLLOWER_CHECK_INTERVAL_SETTING.getKey(), "100ms")
.put(FOLLOWER_CHECK_RETRY_COUNT_SETTING.getKey(), "1")
.build()
);
final String dataNode = internalCluster().startDataOnlyNode(
Settings.builder()
.put(DISCOVERY_FIND_PEERS_INTERVAL_SETTING.getKey(), "100ms")
.put(LEADER_CHECK_INTERVAL_SETTING.getKey(), "100ms")
.put(LEADER_CHECK_RETRY_COUNT_SETTING.getKey(), "1")
.build()
);
final ClusterService masterClusterService = internalCluster().getInstance(ClusterService.class, masterNode);
final PlainActionFuture<Void> removedNode = new PlainActionFuture<>();
masterClusterService.addListener(clusterChangedEvent -> {
if (removedNode.isDone() == false && clusterChangedEvent.state().nodes().getDataNodes().isEmpty()) {
removedNode.onResponse(null);
}
});
final ClusterService dataClusterService = internalCluster().getInstance(ClusterService.class, dataNode);
final PlainActionFuture<Void> failedLeader = new UnsafePlainActionFuture<>(ClusterApplierService.CLUSTER_UPDATE_THREAD_NAME) {
@Override
protected boolean blockingAllowed() {
// we're deliberately blocking the cluster applier on the master until the data node starts to rejoin
return true;
}
};
final AtomicBoolean dataNodeHasMaster = new AtomicBoolean(true);
dataClusterService.addListener(clusterChangedEvent -> {
dataNodeHasMaster.set(clusterChangedEvent.state().nodes().getMasterNode() != null);
if (failedLeader.isDone() == false && dataNodeHasMaster.get() == false) {
failedLeader.onResponse(null);
}
});
masterClusterService.addHighPriorityApplier(event -> {
// This is applicable for events related to the cluster disruption
if (event.state().nodes().getDataNodes().isEmpty()) {
failedLeader.actionGet();
if (dataNodeHasMaster.get() == false) {
try {
Thread.sleep(100);
} catch (InterruptedException e) {
fail(e);
}
}
}
});
MockTransportService.getInstance(dataNode)
.addRequestHandlingBehavior(FollowersChecker.FOLLOWER_CHECK_ACTION_NAME, (handler, request, channel, task) -> {
if (removedNode.isDone() == false) {
channel.sendResponse(new ElasticsearchException("simulated check failure"));
} else {
handler.messageReceived(request, channel, task);
}
});
removedNode.actionGet(10, TimeUnit.SECONDS);
ensureStableCluster(2);
}
}
|
ConflictMode
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/builder/ErrorHandlerRefTest.java
|
{
"start": 978,
"end": 1779
}
|
class ____ extends ContextTestSupport {
@Test
public void testRef() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:dead");
mock.expectedBodiesReceived("Bye World");
template.sendBody("direct:start", "Bye World");
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
DeadLetterChannelBuilder dlc = new DeadLetterChannelBuilder("mock:dead");
context.getRegistry().bind("myDead", dlc);
errorHandler("myDead");
from("direct:start").throwException(new IllegalArgumentException("Forced"));
}
};
}
}
|
ErrorHandlerRefTest
|
java
|
google__error-prone
|
core/src/main/java/com/google/errorprone/bugpatterns/JUnit4TearDownNotRun.java
|
{
"start": 1761,
"end": 2013
}
|
class ____ methods
* annotated with a non-JUnit4 @After annotation.
*
* @author glorioso@google.com (Nick Glorioso)
*/
@BugPattern(
summary = "tearDown() method will not be run; please add JUnit's @After annotation",
severity = ERROR)
public
|
or
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/ProtoStringFieldReferenceEqualityTest.java
|
{
"start": 1294,
"end": 1432
}
|
class ____ {}
""")
.addSourceLines(
"Proto.java",
"""
public abstract
|
GeneratedMessage
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/converted/converter/NullHandlingTests.java
|
{
"start": 2940,
"end": 3458
}
|
class ____ implements AttributeConverter<Sex,String> {
@Override
public String convertToDatabaseColumn(Sex attribute) {
// HHH-8697
if ( attribute == null ) {
return Sex.UNKNOWN.name().toLowerCase( Locale.ENGLISH );
}
return attribute.name().toLowerCase( Locale.ENGLISH );
}
@Override
public Sex convertToEntityAttribute(String dbData) {
// HHH-9320
if ( dbData == null ) {
return Sex.UNKNOWN;
}
return Sex.valueOf( dbData.toUpperCase( Locale.ENGLISH ) );
}
}
}
|
SexConverter
|
java
|
spring-projects__spring-security
|
ldap/src/main/java/org/springframework/security/ldap/authentication/LdapAuthenticationProvider.java
|
{
"start": 1985,
"end": 6238
}
|
interface ____ responsible for performing the user
* authentication and retrieving the user's information from the directory. Example
* implementations are
* {@link org.springframework.security.ldap.authentication.BindAuthenticator
* BindAuthenticator} which authenticates the user by "binding" as that user, and
* {@link org.springframework.security.ldap.authentication.PasswordComparisonAuthenticator
* PasswordComparisonAuthenticator} which compares the supplied password with the value
* stored in the directory, using an LDAP "compare" operation.
* <p>
* The task of retrieving the user attributes is delegated to the authenticator because
* the permissions on the attributes may depend on the type of authentication being used;
* for example, if binding as the user, it may be necessary to read them with the user's
* own permissions (using the same context used for the bind operation).
*
* <h3>LdapAuthoritiesPopulator</h3> Once the user has been authenticated, this interface
* is called to obtain the set of granted authorities for the user. The
* {@link DefaultLdapAuthoritiesPopulator DefaultLdapAuthoritiesPopulator} can be
* configured to obtain user role information from the user's attributes and/or to perform
* a search for "groups" that the user is a member of and map these to roles.
*
* <p>
* A custom implementation could obtain the roles from a completely different source, for
* example from a database.
*
* <h3>Configuration</h3>
*
* A simple configuration might be as follows:
*
* <pre>
* <bean id="contextSource"
* class="org.springframework.security.ldap.DefaultSpringSecurityContextSource">
* <constructor-arg value="ldap://monkeymachine:389/dc=springframework,dc=org"/>
* <property name="userDn" value="cn=manager,dc=springframework,dc=org"/>
* <property name="password" value="password"/>
* </bean>
*
* <bean id="ldapAuthProvider"
* class="org.springframework.security.ldap.authentication.LdapAuthenticationProvider">
* <constructor-arg>
* <bean class="org.springframework.security.ldap.authentication.BindAuthenticator">
* <constructor-arg ref="contextSource"/>
* <property name="userDnPatterns"><list><value>uid={0},ou=people</value></list></property>
* </bean>
* </constructor-arg>
* <constructor-arg>
* <bean class="org.springframework.security.ldap.userdetails.DefaultLdapAuthoritiesPopulator">
* <constructor-arg ref="contextSource"/>
* <constructor-arg value="ou=groups"/>
* <property name="groupRoleAttribute" value="ou"/>
* </bean>
* </constructor-arg>
* </bean>
* </pre>
*
* <p>
* This would set up the provider to access an LDAP server with URL
* <tt>ldap://monkeymachine:389/dc=springframework,dc=org</tt>. Authentication will be
* performed by attempting to bind with the DN
* <tt>uid=<user-login-name>,ou=people,dc=springframework,dc=org</tt>. After
* successful authentication, roles will be assigned to the user by searching under the DN
* <tt>ou=groups,dc=springframework,dc=org</tt> with the default filter
* <tt>(member=<user's-DN>)</tt>. The role name will be taken from the "ou"
* attribute of each match.
* <p>
* The authenticate method will reject empty passwords outright. LDAP servers may allow an
* anonymous bind operation with an empty password, even if a DN is supplied. In practice
* this means that if the LDAP directory is configured to allow unauthenticated access, it
* might be possible to authenticate as <i>any</i> user just by supplying an empty
* password. More information on the misuse of unauthenticated access can be found in
* <a href="https://www.ietf.org/internet-drafts/draft-ietf-ldapbis-authmeth-19.txt">
* draft -ietf-ldapbis-authmeth-19.txt</a>.
*
* @author Luke Taylor
* @see BindAuthenticator
* @see DefaultLdapAuthoritiesPopulator
*/
public
|
is
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/sql/ast/internal/PessimisticLockKind.java
|
{
"start": 342,
"end": 658
}
|
enum ____ {
NONE,
SHARE,
UPDATE;
public static PessimisticLockKind interpret(LockMode lockMode) {
return switch ( lockMode ) {
case PESSIMISTIC_READ -> SHARE;
case PESSIMISTIC_WRITE, PESSIMISTIC_FORCE_INCREMENT, UPGRADE_NOWAIT, UPGRADE_SKIPLOCKED -> UPDATE;
default -> NONE;
};
}
}
|
PessimisticLockKind
|
java
|
apache__logging-log4j2
|
log4j-core-test/src/test/java/org/apache/logging/log4j/core/async/AsyncWaitStrategyFactoryIncorrectConfigGlobalLoggersTest.java
|
{
"start": 1466,
"end": 3064
}
|
class ____ {
@BeforeAll
static void beforeClass() {
System.setProperty(Constants.LOG4J_CONTEXT_SELECTOR, AsyncLoggerContextSelector.class.getName());
System.setProperty(
ConfigurationFactory.CONFIGURATION_FILE_PROPERTY,
"AsyncWaitStrategyIncorrectFactoryConfigGlobalLoggerTest.xml");
}
@AfterAll
static void afterClass() {
System.clearProperty(Constants.LOG4J_CONTEXT_SELECTOR);
System.clearProperty(ConfigurationFactory.CONFIGURATION_FILE_PROPERTY);
}
@Test
void testIncorrectConfigWaitStrategyFactory() throws Exception {
final LoggerContext context = (LoggerContext) LogManager.getContext(false);
assertThat("context is AsyncLoggerContext", context instanceof AsyncLoggerContext);
final AsyncWaitStrategyFactory asyncWaitStrategyFactory =
context.getConfiguration().getAsyncWaitStrategyFactory();
assertNull(asyncWaitStrategyFactory);
final AsyncLogger logger = (AsyncLogger) context.getRootLogger();
final AsyncLoggerDisruptor delegate = logger.getAsyncLoggerDisruptor();
if (DisruptorUtil.DISRUPTOR_MAJOR_VERSION == 3) {
assertEquals(
TimeoutBlockingWaitStrategy.class,
delegate.getWaitStrategy().getClass());
} else {
assertEquals(
Class.forName("com.lmax.disruptor.TimeoutBlockingWaitStrategy"),
delegate.getWaitStrategy().getClass());
}
}
}
|
AsyncWaitStrategyFactoryIncorrectConfigGlobalLoggersTest
|
java
|
junit-team__junit5
|
junit-jupiter-api/src/main/java/org/junit/jupiter/api/extension/ClassTemplateInvocationContextProvider.java
|
{
"start": 803,
"end": 916
}
|
class ____ in
* different contexts — for example, with different parameters, by
* preparing the test
|
template
|
java
|
apache__camel
|
components/camel-solr/src/test/java/org/apache/camel/component/solr/integration/SolrInsertAndDeleteTest.java
|
{
"start": 1837,
"end": 21174
}
|
class ____ extends SolrTestSupport {
@Test
public void testDeleteById() throws Exception {
//insert, commit and verify
solrInsertTestEntry();
solrCommit();
assertEquals(1, executeSolrQuery("id:" + TEST_ID).getResults().getNumFound(), "wrong number of entries found");
//delete
template.sendBodyAndHeader("direct:start", TEST_ID, SolrConstants.PARAM_OPERATION,
SolrConstants.OPERATION_DELETE_BY_ID);
solrCommit();
//verify
assertEquals(0, executeSolrQuery("id:" + TEST_ID).getResults().getNumFound(), "wrong number of entries found");
}
@Test
public void testDeleteListOfIDsViaSplit() throws Exception {
//insert, commit and verify
solrInsertTestEntry(TEST_ID);
solrInsertTestEntry(TEST_ID2);
solrCommit();
assertEquals(2, executeSolrQuery("id:test*").getResults().getNumFound(), "wrong number of entries found");
//delete
template.sendBodyAndHeader(DEFAULT_START_ENDPOINT_SPLIT_THEN_COMMIT, Arrays.asList(TEST_ID, TEST_ID2),
SolrConstants.PARAM_OPERATION,
SolrConstants.OPERATION_DELETE_BY_ID);
//verify
assertEquals(0, executeSolrQuery("id:test*").getResults().getNumFound(), "wrong number of entries found");
}
@Test
public void testDeleteListOfIDsInOneDeleteOperation() throws Exception {
//insert, commit and verify
solrInsertTestEntry(TEST_ID);
solrInsertTestEntry(TEST_ID2);
solrCommit();
assertEquals(2, executeSolrQuery("id:test*").getResults().getNumFound(), "wrong number of entries found");
//delete
Map<String, Object> headers = new HashMap<>(SolrUtils.getHeadersForCommit());
headers.put(SolrConstants.PARAM_OPERATION, SolrConstants.OPERATION_DELETE_BY_ID);
template.sendBodyAndHeaders(DEFAULT_START_ENDPOINT, Arrays.asList(TEST_ID, TEST_ID2), headers);
//verify
assertEquals(0, executeSolrQuery("id:test*").getResults().getNumFound(), "wrong number of entries found");
}
@Test
public void testDeleteByQuery() throws Exception {
//insert, commit and verify
solrInsertTestEntry(TEST_ID);
solrInsertTestEntry(TEST_ID2);
solrCommit();
assertEquals(2, executeSolrQuery("id:test*").getResults().getNumFound(), "wrong number of entries found");
//delete
Map<String, Object> headers = new HashMap<>(SolrUtils.getHeadersForCommit());
headers.put(SolrConstants.PARAM_OPERATION, SolrConstants.OPERATION_DELETE_BY_QUERY);
template.sendBodyAndHeaders("direct:start", "id:test*", headers);
//verify
assertEquals(0, executeSolrQuery("id:test*").getResults().getNumFound(), "wrong number of entries found");
}
@Test
public void testInsertSolrInputDocumentAsXMLWithoutAddRoot() throws IOException {
SolrInputDocument doc = new SolrInputDocument();
doc.addField("id", "MA147LL/A");
StringWriter writer = new StringWriter();
ClientUtils.writeXML(doc, writer);
String docAsXml = writer.toString();
executeInsertFor(docAsXml);
QueryResponse response = executeSolrQuery("id:MA147LL/A");
assertEquals(0, response.getStatus());
assertEquals(1, response.getResults().getNumFound());
}
@Test
public void testInsertSolrInputDocumentAsXMLWithAddRoot() throws IOException {
SolrInputDocument doc = new SolrInputDocument();
doc.addField("id", "MA147LL/A");
StringWriter writer = new StringWriter();
ClientUtils.writeXML(doc, writer);
String content = writer.toString();
String docAsXml = "<add>" + content + "</add>";
Map<String, Object> headers = Map.of(Exchange.CONTENT_TYPE, "text/xml");
executeInsertFor(docAsXml, headers);
QueryResponse response = executeSolrQuery("id:MA147LL/A");
assertEquals(0, response.getStatus());
assertEquals(1, response.getResults().getNumFound());
}
@Test
public void testInsertSolrInputDocument() {
SolrInputDocument doc = new SolrInputDocument();
doc.addField("id", "MA147LL/A");
executeInsertFor(doc);
QueryResponse response = executeSolrQuery("id:MA147LL/A");
assertEquals(0, response.getStatus());
assertEquals(1, response.getResults().getNumFound());
}
@Test
public void testInsertSolrInputDocumentList() {
List<SolrInputDocument> docList = new ArrayList<>(2);
SolrInputDocument doc = new SolrInputDocument();
doc.addField("id", "MA147LL/A");
docList.add(doc);
doc = new SolrInputDocument();
doc.addField("id", "KP147LL/A");
docList.add(doc);
executeInsertFor(docList);
QueryResponse response = executeSolrQuery("id:MA147LL/A");
assertEquals(0, response.getStatus());
assertEquals(1, response.getResults().getNumFound());
response = executeSolrQuery("id:KP147LL/A");
assertEquals(0, response.getStatus());
assertEquals(1, response.getResults().getNumFound());
response = executeSolrQuery("id:KP147LL/ABC");
assertEquals(0, response.getStatus());
assertEquals(0, response.getResults().getNumFound());
}
@Test
public void testInsertStreaming() {
// TODO rename method
ExchangeBuilder builder = ExchangeBuilder.anExchange(camelContext())
.withHeader("SolrField.id", "MA147LL/A");
executeInsert(builder.build());
QueryResponse response = executeSolrQuery("id:MA147LL/A");
assertEquals(0, response.getStatus());
assertEquals(1, response.getResults().getNumFound());
}
@Test
public void indexSingleDocumentOnlyWithId() {
ExchangeBuilder builder = ExchangeBuilder.anExchange(camelContext())
.withHeader("SolrField.id", "MA147LL/A");
executeInsert(builder.build());
// Check things were indexed.
QueryResponse response = executeSolrQuery("id:MA147LL/A");
assertEquals(0, response.getStatus());
assertEquals(1, response.getResults().getNumFound());
}
@Test
public void caughtSolrExceptionIsHandledElegantly() {
// empty request
Exchange exchange = executeInsertFor(null, Map.of(), false);
assertInstanceOf(org.apache.camel.InvalidPayloadException.class, exchange.getException());
}
@Test
public void setHeadersAsSolrFields() {
ExchangeBuilder builder = ExchangeBuilder.anExchange(camelContext())
.withBody("Body is ignored")
.withHeader("SolrField.id", "MA147LL/A")
.withHeader("SolrField.name_s", "Apple 60 GB iPod with Video Playback Black")
.withHeader("SolrField.manu_s", "Apple Computer Inc.");
executeInsert(builder.build());
QueryResponse response = executeSolrQuery("id:MA147LL/A");
assertEquals(0, response.getStatus());
assertEquals(1, response.getResults().getNumFound());
SolrDocument doc = response.getResults().get(0);
assertEquals("Apple 60 GB iPod with Video Playback Black", doc.getFieldValue("name_s"));
assertEquals("Apple Computer Inc.", doc.getFieldValue("manu_s"));
}
@Test
public void setMultiValuedFieldInHeader() {
String[] categories = { "electronics", "apple" };
ExchangeBuilder builder = ExchangeBuilder.anExchange(camelContext())
.withBody("Test body for iPod.")
.withHeader("SolrField.id", "MA147LL/A")
.withHeader("SolrField.cat", categories);
executeInsert(builder.build());
// Check things were indexed.
QueryResponse response = executeSolrQuery("id:MA147LL/A");
assertEquals(0, response.getStatus());
assertEquals(1, response.getResults().getNumFound());
SolrDocument doc = response.getResults().get(0);
assertArrayEquals(categories, ((List<?>) doc.getFieldValue("cat")).toArray());
}
@Test
public void indexDocumentsAndThenCommit() {
ExchangeBuilder builder = ExchangeBuilder.anExchange(camelContext())
.withHeader("SolrField.id", "MA147LL/A")
.withHeader("SolrField.name", "Apple 60 GB iPod with Video Playback Black")
.withHeader("SolrField.manu", "Apple Computer Inc.");
executeInsert(builder.build(), false);
QueryResponse response = executeSolrQuery("*:*");
assertEquals(0, response.getStatus());
assertEquals(0, response.getResults().getNumFound());
executeInsertFor(null);
QueryResponse afterCommitResponse = executeSolrQuery("*:*");
assertEquals(0, afterCommitResponse.getStatus());
assertEquals(1, afterCommitResponse.getResults().getNumFound());
}
@Test
public void indexWithAutoCommit() {
// new exchange - not autocommit route
ExchangeBuilder builder = ExchangeBuilder.anExchange(camelContext())
.withHeader("SolrField.content", "NO_AUTO_COMMIT");
executeInsert(DEFAULT_START_ENDPOINT, builder.build(), false);
// not committed
QueryResponse response = executeSolrQuery("*:*");
assertEquals(0, response.getStatus());
assertEquals(0, response.getResults().getNumFound());
// perform commit
executeInsertFor(null);
response = executeSolrQuery("*:*");
assertEquals(1, response.getResults().getNumFound());
// new exchange - autocommit route
builder = ExchangeBuilder.anExchange(camelContext())
.withHeader("SolrField.content", "AUTO_COMMIT");
executeInsert(DEFAULT_START_ENDPOINT_AUTO_COMMIT, builder.build(), false);
// should be committed
response = executeSolrQuery("*:*");
assertEquals(0, response.getStatus());
assertEquals(2, response.getResults().getNumFound());
}
@Test
public void invalidSolrParametersAreIgnored() {
ExchangeBuilder builder = ExchangeBuilder.anExchange(camelContext())
.withHeader("SolrField.id", "MA147LL/A")
.withHeader("SolrField.name", "Apple 60 GB iPod with Video Playback Black")
.withHeader("SolrParam.invalid-param", "this is ignored");
executeInsert(builder.build());
QueryResponse response = executeSolrQuery("*:*");
assertEquals(0, response.getStatus());
assertEquals(1, response.getResults().getNumFound());
}
@Test
public void queryDocumentsToCSVUpdateHandlerWithFileConsumer() throws Exception {
context.getRouteController().startRoute("file-route");
MockEndpoint mock = getMockEndpoint(DEFAULT_MOCK_ENDPOINT);
mock.setExpectedMessageCount(1);
mock.assertIsSatisfied();
QueryResponse response = executeSolrQuery("*:*");
assertEquals(0, response.getStatus());
assertEquals(10, response.getResults().getNumFound());
response = executeSolrQuery("id:0553573403");
SolrDocumentList list = response.getResults();
assertEquals("A Game of Thrones", list.get(0).getFieldValue("name_s"));
assertEquals(7.99, list.get(0).getFieldValue("price_d"));
context.getRouteController().stopRoute("file-route");
}
@Test
public void queryDocumentsToMap() {
solrEndpoint.getConfiguration().setRequestHandler("/update/csv");
ExchangeBuilder builder = ExchangeBuilder.anExchange(camelContext())
.withBody(new File("src/test/resources/data/books.csv"))
.withHeader(SolrConstants.PARAM_CONTENT_TYPE, "text/csv");
executeInsert(builder.build());
solrEndpoint.getConfiguration().setRequestHandler(null);
Map<String, String> map = new HashMap<>();
map.put("id", "0553579934");
map.put("cat", "Test");
map.put("name", "Test");
map.put("price", "7.99");
map.put("author_t", "Test");
map.put("series_t", "Test");
map.put("sequence_i", "3");
map.put("genre_s", "Test");
builder = ExchangeBuilder.anExchange(camelContext())
.withBody(map);
executeInsert(builder.build());
QueryResponse response = executeSolrQuery("id:0553579934");
assertEquals(0, response.getStatus());
assertEquals(1, response.getResults().getNumFound());
}
@Test
public void queryDocumentsToCSVUpdateHandlerWithoutParameters() {
ExchangeBuilder builder = ExchangeBuilder.anExchange(camelContext())
.withBody(new File("src/test/resources/data/books.csv"));
executeInsert(builder.build());
QueryResponse response = executeSolrQuery("*:*");
assertEquals(0, response.getStatus());
assertEquals(10, response.getResults().getNumFound());
response = executeSolrQuery("id:0553573403");
SolrDocumentList list = response.getResults();
assertEquals("A Game of Thrones", list.get(0).getFieldValue("name_s"));
assertEquals(7.99, list.get(0).getFieldValue("price_d"));
}
@Test
public void indexDocumentsToCSVUpdateHandlerWithParameters() {
solrEndpoint.getConfiguration().setRequestHandler("/update/csv");
ExchangeBuilder builder = ExchangeBuilder.anExchange(camelContext())
.withBody(new File("src/test/resources/data/books.csv"))
.withHeader(SolrConstants.PARAM_CONTENT_TYPE, "text/csv")
.withHeader("SolrParam.fieldnames", "id,cat,name,price,inStock,author_t,series_t,sequence_i,genre_s")
.withHeader("SolrParam.skip", "cat,sequence_i,genre_s")
.withHeader("SolrParam.skipLines", 1);
executeInsert(builder.build());
QueryResponse response = executeSolrQuery("*:*");
assertEquals(0, response.getStatus());
assertEquals(10, response.getResults().getNumFound());
SolrDocument doc = response.getResults().get(0);
assertFalse(doc.getFieldNames().contains("cat"));
}
@Test
@Disabled("The extraction Solr Module is not available in slim version of solr test infra.")
public void indexPDFDocumentToExtractingRequestHandler() {
solrEndpoint.getConfiguration().setRequestHandler("/update/extract");
ExchangeBuilder builder = ExchangeBuilder.anExchange(camelContext())
.withBody(new File("src/test/resources/data/tutorial.pdf"))
.withHeader("SolrParam.literal.id", "tutorial.pdf");
executeInsert(builder.build());
QueryResponse response = executeSolrQuery("*:*");
assertEquals(0, response.getStatus());
assertEquals(1, response.getResults().getNumFound());
SolrDocument doc = response.getResults().get(0);
assertEquals("Solr", doc.getFieldValue("subject"));
assertEquals("tutorial.pdf", doc.getFieldValue("id"));
assertEquals(List.of("application/pdf"), doc.getFieldValue("content_type"));
}
@Test
public void testCommit() {
//insert and verify
solrInsertTestEntry();
assertEquals(0, executeSolrQuery("id:" + TEST_ID).getResults().getNumFound(), "wrong number of entries found");
//commit
template.sendBodyAndHeaders("direct:start", null, SolrUtils.getHeadersForCommit());
//verify exists after commit
assertEquals(1, executeSolrQuery("id:" + TEST_ID).getResults().getNumFound(), "wrong number of entries found");
}
@Test
public void testSoftCommit() {
//insert and verify
solrInsertTestEntry();
assertEquals(0, executeSolrQuery("id:" + TEST_ID).getResults().getNumFound(), "wrong number of entries found");
//commit
template.sendBodyAndHeaders("direct:start", null, SolrUtils.getHeadersForCommit("softCommit"));
//verify exists after commit
assertEquals(1, executeSolrQuery("id:" + TEST_ID).getResults().getNumFound(), "wrong number of entries found");
}
@Test
public void testRollback() {
//insert and verify
solrInsertTestEntry();
assertEquals(0, executeSolrQuery("id:" + TEST_ID).getResults().getNumFound(), "wrong number of entries found");
//rollback
template.sendBodyAndHeaders("direct:start", null, SolrUtils.getHeadersForCommit("rollback"));
//verify after rollback
assertEquals(0, executeSolrQuery("id:" + TEST_ID).getResults().getNumFound(), "wrong number of entries found");
//commit
template.sendBodyAndHeaders("direct:start", null, SolrUtils.getHeadersForCommit());
//verify after commit (again)
assertEquals(0, executeSolrQuery("id:" + TEST_ID).getResults().getNumFound(), "wrong number of entries found");
}
@Test
public void testOptimize() {
//insert and verify
solrInsertTestEntry();
assertEquals(0, executeSolrQuery("id:" + TEST_ID).getResults().getNumFound(), "wrong number of entries found");
//optimize (be careful with this operation: it reorganizes your index!)
template.sendBodyAndHeaders("direct:start", null, SolrUtils.getHeadersForCommit("optimize"));
//verify exists after optimize
assertEquals(1, executeSolrQuery("id:" + TEST_ID).getResults().getNumFound(), "wrong number of entries found");
}
@Test
public void testAddBean() {
//add bean
SolrInsertAndDeleteTest.Item item = new Item();
item.id = TEST_ID;
item.categories = new String[] { "aaa", "bbb", "ccc" };
template.sendBodyAndHeaders(
"direct:start",
item,
Map.of(
SolrConstants.PARAM_OPERATION, SolrConstants.OPERATION_ADD_BEAN,
SolrConstants.HEADER_PARAM_PREFIX + "commit", "true"));
//verify
assertEquals(1, executeSolrQuery("id:" + TEST_ID).getResults().getNumFound(), "wrong number of entries found");
}
@Test
public void testAddBeans() {
List<SolrInsertAndDeleteTest.Item> beans = new ArrayList<>();
//add bean1
SolrInsertAndDeleteTest.Item item1 = new Item();
item1.id = TEST_ID;
item1.categories = new String[] { "aaa", "bbb", "ccc" };
beans.add(item1);
//add bean2
SolrInsertAndDeleteTest.Item item2 = new Item();
item2.id = TEST_ID2;
item2.categories = new String[] { "aaa", "bbb", "ccc" };
beans.add(item2);
template.sendBodyAndHeaders(
"direct:start",
beans,
Map.of(
SolrConstants.PARAM_OPERATION, SolrConstants.OPERATION_ADD_BEAN,
SolrConstants.HEADER_PARAM_PREFIX + "commit", "true"));
//verify
assertEquals(1, executeSolrQuery("id:" + TEST_ID).getResults().getNumFound(), "wrong number of entries found");
assertEquals(1, executeSolrQuery("id:" + TEST_ID2).getResults().getNumFound(), "wrong number of entries found");
assertEquals(2, executeSolrQuery("*:*").getResults().getNumFound(), "wrong number of entries found");
}
public static
|
SolrInsertAndDeleteTest
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/ProfilingInfo.java
|
{
"start": 1120,
"end": 5321
}
|
class ____ implements ResponseBody, Serializable {
private static final long serialVersionUID = 1L;
public static final String FIELD_NAME_STATUS = "status";
public static final String FIELD_NAME_MODE = "mode";
public static final String FIELD_NAME_TRIGGER_TIME = "triggerTime";
public static final String FIELD_NAME_FINISHED_TIME = "finishedTime";
public static final String FIELD_NAME_DURATION = "duration";
public static final String FIELD_NAME_MESSAGE = "message";
public static final String FIELD_NAME_OUTPUT_FILE = "outputFile";
@JsonProperty(FIELD_NAME_STATUS)
private ProfilingStatus status;
@JsonProperty(FIELD_NAME_MODE)
private ProfilingMode mode;
@JsonProperty(FIELD_NAME_TRIGGER_TIME)
private Long triggerTime;
@JsonProperty(FIELD_NAME_FINISHED_TIME)
private Long finishedTime;
@JsonProperty(FIELD_NAME_DURATION)
private Long duration;
@JsonProperty(FIELD_NAME_MESSAGE)
private String message;
@JsonProperty(FIELD_NAME_OUTPUT_FILE)
private String outputFile;
private ProfilingInfo() {}
private ProfilingInfo(
ProfilingStatus status,
ProfilingMode mode,
long triggerTime,
long finishedTime,
long duration,
String message,
String outputFile) {
this.status = status;
this.mode = mode;
this.triggerTime = triggerTime;
this.finishedTime = finishedTime;
this.duration = duration;
this.message = message;
this.outputFile = outputFile;
}
public ProfilingInfo fail(String message) {
this.status = ProfilingStatus.FAILED;
this.message = message;
this.finishedTime = System.currentTimeMillis();
return this;
}
public ProfilingInfo success(String outputFile) {
this.status = ProfilingStatus.FINISHED;
this.finishedTime = System.currentTimeMillis();
this.outputFile = outputFile;
this.message = "Profiling Successful";
return this;
}
public ProfilingStatus getStatus() {
return status;
}
public ProfilingMode getProfilingMode() {
return mode;
}
public Long getDuration() {
return duration;
}
public String getMessage() {
return message;
}
public Long getFinishedTime() {
return finishedTime;
}
public String getOutputFile() {
return outputFile;
}
public long getTriggerTime() {
return triggerTime;
}
public static ProfilingInfo create(long duration, ProfilingMode mode) {
ProfilingInfo profilingInfo = new ProfilingInfo();
profilingInfo.mode = mode;
profilingInfo.triggerTime = System.currentTimeMillis();
profilingInfo.status = ProfilingStatus.RUNNING;
profilingInfo.duration = duration;
return profilingInfo;
}
@JsonCreator
public static ProfilingInfo create(
@JsonProperty(FIELD_NAME_STATUS) ProfilingStatus status,
@JsonProperty(FIELD_NAME_MODE) ProfilingMode mode,
@JsonProperty(FIELD_NAME_TRIGGER_TIME) long triggerTime,
@JsonProperty(FIELD_NAME_FINISHED_TIME) long finishedTime,
@JsonProperty(FIELD_NAME_DURATION) long duration,
@JsonProperty(FIELD_NAME_MESSAGE) String message,
@JsonProperty(FIELD_NAME_OUTPUT_FILE) String outputPath) {
return new ProfilingInfo(
status, mode, triggerTime, finishedTime, duration, message, outputPath);
}
@Override
public String toString() {
return "ProfilingInfo{"
+ "status="
+ status
+ ", mode="
+ mode
+ ", triggerTime="
+ triggerTime
+ ", finishedTime="
+ finishedTime
+ ", duration="
+ duration
+ ", message='"
+ message
+ '\''
+ ", outputFile='"
+ outputFile
+ '\''
+ '}';
}
/** Profiling Status. */
public
|
ProfilingInfo
|
java
|
square__retrofit
|
retrofit/java-test/src/test/java/retrofit2/RequestFactoryTest.java
|
{
"start": 75389,
"end": 75570
}
|
class ____.lang.Object (parameter 'parts')\n"
+ " for method Example.method");
}
}
@Test
public void multipartPartMapRejectsOkHttpPartValues() {
|
java
|
java
|
apache__dubbo
|
dubbo-test/dubbo-test-check/src/main/java/org/apache/dubbo/test/check/DubboTestChecker.java
|
{
"start": 2177,
"end": 2851
}
|
class ____ implements TestExecutionListener {
private static final String CONFIG_CHECK_MODE = "checkMode";
private static final String CONFIG_CHECK_THREADS = "checkThreads";
private static final String CONFIG_THREAD_DUMP_WAIT_TIME = "threadDumpWaitTime";
private static final String CONFIG_FORCE_DESTROY = "forceDestroy";
private static final String CONFIG_REPORT_FILE = "reportFile";
private static final String MODE_CLASS = "class";
private static final String MODE_METHOD = "method";
private static final Logger logger = LoggerFactory.getLogger(DubboTestChecker.class);
/**
* check mode:
* class - check after
|
DubboTestChecker
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/XExecutableTypesTest.java
|
{
"start": 12080,
"end": 12349
}
|
class ____ {",
" void toList(Collection<Object> c) { throw new RuntimeException(); }",
"}");
Source bar =
CompilerTests.javaSource(
"test.Bar",
"package test;",
"import java.util.*;",
"
|
Foo
|
java
|
apache__flink
|
flink-tests/src/test/java/org/apache/flink/test/cancelling/MapCancelingITCase.java
|
{
"start": 1328,
"end": 2526
}
|
class ____ extends CancelingTestBase {
@Test
public void testMapCancelling() throws Exception {
executeTask(new IdentityMapper<Integer>());
}
@Test
public void testSlowMapCancelling() throws Exception {
executeTask(new DelayingIdentityMapper<Integer>());
}
@Test
public void testMapWithLongCancellingResponse() throws Exception {
executeTask(new LongCancelTimeIdentityMapper<Integer>());
}
@Test
public void testMapPriorToFirstRecordReading() throws Exception {
executeTask(new StuckInOpenIdentityMapper<Integer>());
}
public void executeTask(MapFunction<Integer, Integer> mapper) throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.createInput(new InfiniteIntegerInputFormat(false))
.map(mapper)
.sinkTo(new DiscardingSink<>());
env.setParallelism(PARALLELISM);
runAndCancelJob(env.getStreamGraph().getJobGraph(), 5 * 1000, 10 * 1000);
}
// --------------------------------------------------------------------------------------------
private static final
|
MapCancelingITCase
|
java
|
elastic__elasticsearch
|
build-conventions/src/main/java/org/elasticsearch/gradle/internal/checkstyle/SnippetLengthCheck.java
|
{
"start": 1141,
"end": 3004
}
|
class ____ extends AbstractFileSetCheck {
private static final Pattern START = Pattern.compile("^( *)//\\s*tag::(?!noformat)(.+?)\\s*$", Pattern.MULTILINE);
private int max;
/**
* The maximum width that a snippet may have.
*/
public void setMax(int max) {
this.max = max;
}
@Override
protected void processFiltered(File file, FileText fileText) throws CheckstyleException {
checkFile((line, message) -> log(line, message), max, fileText.toLinesArray());
}
static void checkFile(BiConsumer<Integer, String> log, int max, String... lineArray) {
LineItr lines = new LineItr(Arrays.asList(lineArray).iterator());
while (lines.hasNext()) {
Matcher m = START.matcher(lines.next());
if (m.matches()) {
checkSnippet(log, max, lines, m.group(1), m.group(2));
}
}
}
private static void checkSnippet(BiConsumer<Integer, String> log, int max, LineItr lines, String leadingSpaces, String name) {
Pattern end = Pattern.compile("^ *//\\s*end::" + name + "\\s*$", Pattern.MULTILINE);
while (lines.hasNext()) {
String line = lines.next();
if (end.matcher(line).matches()) {
return;
}
if (line.isEmpty()) {
continue;
}
if (false == line.startsWith(leadingSpaces)) {
log.accept(lines.lastLineNumber, "snippet line should start with [" + leadingSpaces + "]");
continue;
}
int width = line.length() - leadingSpaces.length();
if (width > max) {
log.accept(lines.lastLineNumber, "snippet line should be no more than [" + max + "] characters but was [" + width + "]");
}
}
}
private static
|
SnippetLengthCheck
|
java
|
spring-projects__spring-boot
|
build-plugin/spring-boot-gradle-plugin/src/test/java/com/example/boottestrun/classpath/BootTestRunClasspathApplication.java
|
{
"start": 1019,
"end": 1255
}
|
class ____ = " + BootTestRunClasspathApplication.class.getName());
int i = 1;
for (String entry : ManagementFactory.getRuntimeMXBean().getClassPath().split(File.pathSeparator)) {
System.out.println(i++ + ". " + entry);
}
}
}
|
name
|
java
|
quarkusio__quarkus
|
integration-tests/jackson/src/main/java/io/quarkus/it/jackson/RegisteredPojoModelResource.java
|
{
"start": 345,
"end": 687
}
|
class ____ {
@POST
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response newModel(String body) throws IOException {
RegisteredPojoModel model = RegisteredPojoModel.fromJson(body);
return Response.status(201).entity(model.toJson()).build();
}
}
|
RegisteredPojoModelResource
|
java
|
spring-projects__spring-framework
|
spring-core/src/main/java/org/springframework/util/xml/AbstractXMLEventReader.java
|
{
"start": 858,
"end": 975
}
|
class ____ {@code XMLEventReader}s.
*
* @author Arjen Poutsma
* @author Juergen Hoeller
* @since 5.0
*/
abstract
|
for
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java
|
{
"start": 284528,
"end": 286843
}
|
class ____ extends ParserRuleContext {
public TerminalNode EQ() { return getToken(EsqlBaseParser.EQ, 0); }
public TerminalNode NEQ() { return getToken(EsqlBaseParser.NEQ, 0); }
public TerminalNode LT() { return getToken(EsqlBaseParser.LT, 0); }
public TerminalNode LTE() { return getToken(EsqlBaseParser.LTE, 0); }
public TerminalNode GT() { return getToken(EsqlBaseParser.GT, 0); }
public TerminalNode GTE() { return getToken(EsqlBaseParser.GTE, 0); }
@SuppressWarnings("this-escape")
public ComparisonOperatorContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_comparisonOperator; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterComparisonOperator(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitComparisonOperator(this);
}
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor<? extends T>)visitor).visitComparisonOperator(this);
else return visitor.visitChildren(this);
}
}
public final ComparisonOperatorContext comparisonOperator() throws RecognitionException {
ComparisonOperatorContext _localctx = new ComparisonOperatorContext(_ctx, getState());
enterRule(_localctx, 180, RULE_comparisonOperator);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(930);
_la = _input.LA(1);
if ( !(((((_la - 80)) & ~0x3f) == 0 && ((1L << (_la - 80)) & 125L) != 0)) ) {
_errHandler.recoverInline(this);
}
else {
if ( _input.LA(1)==Token.EOF ) matchedEOF = true;
_errHandler.reportMatch(this);
consume();
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
@SuppressWarnings("CheckReturnValue")
public static
|
ComparisonOperatorContext
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/processor/async/AsyncNestedTripleChoiceIssueTest.java
|
{
"start": 979,
"end": 3545
}
|
class ____ extends ContextTestSupport {
@Test
public void testNestedChoiceVeryBig() throws Exception {
getMockEndpoint("mock:low").expectedMessageCount(0);
getMockEndpoint("mock:med").expectedMessageCount(0);
getMockEndpoint("mock:big").expectedMessageCount(0);
getMockEndpoint("mock:verybig").expectedMessageCount(1);
template.sendBodyAndHeader("direct:start", "Hello World", "foo", 10);
assertMockEndpointsSatisfied();
}
@Test
public void testNestedChoiceBig() throws Exception {
getMockEndpoint("mock:low").expectedMessageCount(0);
getMockEndpoint("mock:med").expectedMessageCount(0);
getMockEndpoint("mock:big").expectedMessageCount(1);
getMockEndpoint("mock:verybig").expectedMessageCount(0);
template.sendBodyAndHeader("direct:start", "Hello World", "foo", 7);
assertMockEndpointsSatisfied();
}
@Test
public void testNestedChoiceMed() throws Exception {
getMockEndpoint("mock:low").expectedMessageCount(0);
getMockEndpoint("mock:med").expectedMessageCount(1);
getMockEndpoint("mock:big").expectedMessageCount(0);
getMockEndpoint("mock:verybig").expectedMessageCount(0);
template.sendBodyAndHeader("direct:start", "Hello World", "foo", 4);
assertMockEndpointsSatisfied();
}
@Test
public void testNestedChoiceLow() throws Exception {
getMockEndpoint("mock:low").expectedMessageCount(1);
getMockEndpoint("mock:med").expectedMessageCount(0);
getMockEndpoint("mock:big").expectedMessageCount(0);
getMockEndpoint("mock:verybig").expectedMessageCount(0);
template.sendBodyAndHeader("direct:start", "Hello World", "foo", 1);
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
context.addComponent("async", new MyAsyncComponent());
from("direct:start").choice().when(header("foo").isGreaterThan(1)).to("async:bye:camel").choice()
.when(header("foo").isGreaterThan(5)).to("async:bye:camel2")
.choice().when(header("foo").isGreaterThan(7)).to("mock:verybig").otherwise().to("mock:big").end().endChoice()
.otherwise().to("mock:med").end().endChoice().otherwise()
.to("mock:low").end();
}
};
}
}
|
AsyncNestedTripleChoiceIssueTest
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/security/contexts/IncompatibleTestSecurityContextFactory.java
|
{
"start": 1191,
"end": 1648
}
|
class ____ implements SecurityContextFactory {
@Override
public boolean isCompatibleWith(SecurityConfiguration securityConfig) {
return false;
}
@Override
public SecurityContext createContext(SecurityConfiguration securityConfig)
throws SecurityContextInitializeException {
return new TestSecurityContext();
}
/** Test security context class. */
public static
|
IncompatibleTestSecurityContextFactory
|
java
|
quarkusio__quarkus
|
extensions/hibernate-validator/deployment/src/test/java/io/quarkus/hibernate/validator/test/config/ConfigMappingInvalidTest.java
|
{
"start": 3687,
"end": 3807
}
|
interface ____ {
@Max(3)
String host();
List<@NotEmpty String> services();
}
public
|
Server
|
java
|
spring-projects__spring-framework
|
spring-jdbc/src/main/java/org/springframework/jdbc/datasource/ShardingKeyProvider.java
|
{
"start": 791,
"end": 1248
}
|
interface ____ determining sharding keys which are used to establish direct
* shard connections in the context of sharded databases. This is used as a callback
* for providing the current sharding key (plus optionally a super sharding key) in
* {@link org.springframework.jdbc.datasource.ShardingKeyDataSourceAdapter}.
*
* <p>Can be used as a functional interface (for example, with a lambda expression) for a simple
* sharding key, or as a two-method
|
for
|
java
|
spring-projects__spring-security
|
web/src/main/java/org/springframework/security/web/csrf/CsrfFilter.java
|
{
"start": 7898,
"end": 8313
}
|
class ____ implements RequestMatcher {
private final HashSet<String> allowedMethods = new HashSet<>(Arrays.asList("GET", "HEAD", "TRACE", "OPTIONS"));
@Override
public boolean matches(HttpServletRequest request) {
return !this.allowedMethods.contains(request.getMethod());
}
@Override
public String toString() {
return "IsNotHttpMethod " + this.allowedMethods;
}
}
}
|
DefaultRequiresCsrfMatcher
|
java
|
apache__logging-log4j2
|
log4j-core/src/main/java/org/apache/logging/log4j/core/impl/Log4jContextFactory.java
|
{
"start": 6694,
"end": 7732
}
|
class ____ of the caller.
* @param loader The ClassLoader to use or null.
* @param currentContext If true returns the current Context, if false returns the Context appropriate
* for the caller if a more appropriate Context can be determined.
* @param externalContext An external context (such as a ServletContext) to be associated with the LoggerContext.
* @return The LoggerContext.
*/
@Override
public LoggerContext getContext(
final String fqcn, final ClassLoader loader, final Object externalContext, final boolean currentContext) {
final LoggerContext ctx = selector.getContext(fqcn, loader, currentContext);
if (externalContext != null && ctx.getExternalContext() == null) {
ctx.setExternalContext(externalContext);
}
if (ctx.getState() == LifeCycle.State.INITIALIZED) {
ctx.start();
}
return ctx;
}
/**
* Loads the LoggerContext using the ContextSelector.
* @param fqcn The fully qualified
|
name
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/issues/ExceptionCamel4022Test.java
|
{
"start": 2063,
"end": 3230
}
|
class ____ implements Processor {
private final String msg;
public MyExceptionThrower(String msg) {
this.msg = msg;
}
@Override
public void process(Exchange exchange) {
throw new IllegalArgumentException(msg);
}
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
// DLC
errorHandler(deadLetterChannel("mock:dlc").redeliveryDelay(0).maximumRedeliveries(3));
// onException that does NOT handle the exception
onException(Exception.class).logStackTrace(false).process(new MyExceptionThrower("Damn Again"))
.to("mock:onexception");
// route
from("direct:start").to("mock:a").to("direct:intermediate").to("mock:result2");
// 2nd route
from("direct:intermediate").to("mock:b").setBody(constant("<some-value/>"))
.process(new MyExceptionThrower("Damn")).to("mock:intermediate");
}
};
}
}
|
MyExceptionThrower
|
java
|
apache__camel
|
components/camel-datasonnet/src/main/java/org/apache/camel/language/datasonnet/CML.java
|
{
"start": 1370,
"end": 4620
}
|
class ____ extends Library {
private static final CML INSTANCE = new CML();
private final ThreadLocal<Exchange> exchange = new ThreadLocal<>();
private CML() {
}
public static CML getInstance() {
return INSTANCE;
}
public ThreadLocal<Exchange> getExchange() {
return exchange;
}
@Override
public String namespace() {
return "cml";
}
@Override
public Set<String> libsonnets() {
return Collections.emptySet();
}
@Override
public Map<String, Val.Func> functions(DataFormatService dataFormats, Header header) {
Map<String, Val.Func> answer = new HashMap<>();
answer.put("properties", makeSimpleFunc(
Collections.singletonList("key"), //parameters list
params -> properties(params.get(0))));
answer.put("header", makeSimpleFunc(
Collections.singletonList("key"), //parameters list
params -> header(params.get(0), dataFormats)));
answer.put("variable", makeSimpleFunc(
Collections.singletonList("key"), //parameters list
params -> variable(params.get(0), dataFormats)));
answer.put("exchangeProperty", makeSimpleFunc(
Collections.singletonList("key"), //parameters list
params -> exchangeProperty(params.get(0), dataFormats)));
return answer;
}
public Map<String, Val.Obj> modules(DataFormatService dataFormats, Header header) {
return Collections.emptyMap();
}
private Val properties(Val key) {
if (key instanceof Val.Str) {
return new Val.Str(exchange.get().getContext().resolvePropertyPlaceholders("{{" + ((Val.Str) key).value() + "}}"));
}
throw new IllegalArgumentException("Expected String got: " + key.prettyName());
}
private Val header(Val key, DataFormatService dataformats) {
if (key instanceof Val.Str) {
return valFrom(exchange.get().getMessage().getHeader(((Val.Str) key).value()), dataformats);
}
throw new IllegalArgumentException("Expected String got: " + key.prettyName());
}
private Val variable(Val key, DataFormatService dataformats) {
if (key instanceof Val.Str) {
return valFrom(exchange.get().getVariable(((Val.Str) key).value()), dataformats);
}
throw new IllegalArgumentException("Expected String got: " + key.prettyName());
}
private Val exchangeProperty(Val key, DataFormatService dataformats) {
if (key instanceof Val.Str) {
return valFrom(exchange.get().getProperty(((Val.Str) key).value()), dataformats);
}
throw new IllegalArgumentException("Expected String got: " + key.prettyName());
}
private Val valFrom(Object obj, DataFormatService dataformats) {
Document doc;
if (obj instanceof Document) {
doc = (Document) obj;
} else {
doc = new DefaultDocument(obj, MediaTypes.APPLICATION_JAVA);
}
try {
return Materializer.reverse(dataformats.mandatoryRead(doc));
} catch (PluginException e) {
throw new IllegalStateException(e);
}
}
}
|
CML
|
java
|
apache__camel
|
components/camel-consul/src/test/java/org/apache/camel/component/consul/MockAgentTest.java
|
{
"start": 1478,
"end": 2552
}
|
class ____ extends CamelTestSupport {
@Test
public void testMockAgent() throws Exception {
MockEndpoint mockConsulAgent = getMockEndpoint("mock:consul:agent");
AdviceWith.adviceWith(context, "servicesRoute", a -> {
a.mockEndpointsAndSkip("consul:agent*");
});
mockConsulAgent.returnReplyBody(constant(ImmutableMap.of("foo-1", ImmutableService.builder()
.id("foo-1")
.service("foo")
.address("localhost")
.port(80)
.build())));
@SuppressWarnings("unchecked")
Map<String, Service> result = fluentTemplate.to("direct:start").request(Map.class);
Assertions.assertEquals(1, result.size());
}
@Override
protected RoutesBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").routeId("servicesRoute").to("consul:agent?action=" + ConsulAgentActions.SERVICES);
}
};
}
}
|
MockAgentTest
|
java
|
spring-projects__spring-security
|
taglibs/src/test/java/org/springframework/security/taglibs/csrf/CsrfInputTagTests.java
|
{
"start": 974,
"end": 1976
}
|
class ____ {
public CsrfInputTag tag;
@BeforeEach
public void setUp() {
this.tag = new CsrfInputTag();
}
@Test
public void handleTokenReturnsHiddenInput() {
CsrfToken token = new DefaultCsrfToken("X-Csrf-Token", "_csrf", "abc123def456ghi789");
String value = this.tag.handleToken(token);
assertThat(value).as("The returned value should not be null.").isNotNull();
assertThat(value).withFailMessage("The output is not correct.")
.isEqualTo("<input type=\"hidden\" name=\"_csrf\" value=\"abc123def456ghi789\" />");
}
@Test
public void handleTokenReturnsHiddenInputDifferentTokenValue() {
CsrfToken token = new DefaultCsrfToken("X-Csrf-Token", "csrfParameter", "fooBarBazQux");
String value = this.tag.handleToken(token);
assertThat(value).as("The returned value should not be null.").isNotNull();
assertThat(value).withFailMessage("The output is not correct.")
.isEqualTo("<input type=\"hidden\" name=\"csrfParameter\" value=\"fooBarBazQux\" />");
}
}
|
CsrfInputTagTests
|
java
|
quarkusio__quarkus
|
integration-tests/jpa-mariadb/src/main/java/io/quarkus/it/jpa/mariadb/Person.java
|
{
"start": 410,
"end": 1393
}
|
class ____ {
private long id;
private String name;
private Address address;
public Person() {
}
public Person(long id, String name, Address address) {
this.id = id;
this.name = name;
this.address = address;
}
@Id
@GeneratedValue
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
@ManyToOne(cascade = CascadeType.ALL, fetch = FetchType.LAZY)
public Address getAddress() {
return address;
}
public void setAddress(Address address) {
this.address = address;
}
public void describeFully(StringBuilder sb) {
sb.append("Person with id=").append(id).append(", name='").append(name).append("', address { ");
getAddress().describeFully(sb);
sb.append(" }");
}
}
|
Person
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/detached/collection/DetachedNonJoinedCollectionInitializationJoinFetchTest.java
|
{
"start": 4696,
"end": 5039
}
|
class ____ {
@Id
private Long id;
private String name;
@ManyToMany(fetch = FetchType.EAGER)
@OrderColumn
@Fetch( FetchMode.SELECT )
private List<EntityB> b = new ArrayList<>();
public List<EntityB> getB() {
return b;
}
public void setB(List<EntityB> b) {
this.b = b;
}
}
@Entity(name = "EntityB")
static
|
EntityA
|
java
|
apache__camel
|
components/camel-platform-http-vertx/src/test/java/org/apache/camel/component/platform/http/vertx/VertxPlatformHttpsProxyTest.java
|
{
"start": 1975,
"end": 5484
}
|
class ____ {
private final int port = AvailablePortFinder.getNextAvailable();
private final WireMockServer wireMockServer = new WireMockServer(
options().httpsPort(port)
.httpDisabled(true)
.keystorePath("proxy/keystore.p12")
.keystorePassword("changeit")
.keyManagerPassword("changeit"));
@BeforeEach
void before() {
wireMockServer.stubFor(get(urlPathEqualTo("/"))
.willReturn(aResponse()
.withBody(
"{\"message\": \"Hello World\"}")));
wireMockServer.start();
}
@AfterEach
void after() {
if (wireMockServer != null) {
wireMockServer.stop();
}
}
@Test
void testProxy() throws Exception {
final CamelContext context = VertxPlatformHttpEngineTest.createCamelContext();
try {
context.getRegistry().bind("sslContextParameters", sslContextParameters());
context.getRegistry().bind("x509HostnameVerifier", x509HostnameVerifier());
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from("platform-http:proxy")
.toD("https://${headers." + Exchange.HTTP_HOST
+ "}?bridgeEndpoint=true&sslContextParameters=#sslContextParameters&x509HostnameVerifier=#x509HostnameVerifier");
}
});
context.start();
// URI of proxy created with platform HTTP component
final var proxyURI = "http://localhost:" + RestAssured.port;
// In order to make sure that RestAssured don't perform a CONNECT instead of a GET, we do trick with http
// if we want to do test manually from a terminal we use the real HTTPS address
final var originURI = "http://localhost:" + port;
given()
.proxy(proxyURI)
.contentType(ContentType.JSON)
.when().get(originURI)
.then()
.statusCode(200)
.body(containsString("{\"message\": \"Hello World\"}"));
} finally {
context.stop();
}
}
public SSLContextParameters sslContextParameters() {
SSLContextParameters sslContextParameters = new SSLContextParameters();
KeyManagersParameters keyManagersParameters = new KeyManagersParameters();
KeyStoreParameters keyStore = new KeyStoreParameters();
keyStore.setPassword("changeit");
keyStore.setResource("proxy/keystore.p12");
keyManagersParameters.setKeyPassword("changeit");
keyManagersParameters.setKeyStore(keyStore);
sslContextParameters.setKeyManagers(keyManagersParameters);
KeyStoreParameters truststoreParameters = new KeyStoreParameters();
truststoreParameters.setResource("proxy/keystore.p12");
truststoreParameters.setPassword("changeit");
TrustManagersParameters trustManagersParameters = new TrustManagersParameters();
trustManagersParameters.setKeyStore(truststoreParameters);
sslContextParameters.setTrustManagers(trustManagersParameters);
return sslContextParameters;
}
public NoopHostnameVerifier x509HostnameVerifier() {
return NoopHostnameVerifier.INSTANCE;
}
}
|
VertxPlatformHttpsProxyTest
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/streaming/util/typeutils/ScalaProductFieldAccessorFactory.java
|
{
"start": 1067,
"end": 2403
}
|
interface ____ {
/** Returns a product {@link FieldAccessor} that does not support recursion. */
<T, F> FieldAccessor<T, F> createSimpleProductFieldAccessor(
int pos, TypeInformation<T> typeInfo, ExecutionConfig config);
/** Returns a product {@link FieldAccessor} that does support recursion. */
<T, R, F> FieldAccessor<T, F> createRecursiveProductFieldAccessor(
int pos,
TypeInformation<T> typeInfo,
FieldAccessor<R, F> innerAccessor,
ExecutionConfig config);
/**
* Loads the implementation, if it is accessible.
*
* @param log Logger to be used in case the loading fails
* @return Loaded implementation, if it is accessible.
*/
static ScalaProductFieldAccessorFactory load(Logger log) {
try {
final Object factory =
Class.forName(
"org.apache.flink.streaming.util.typeutils.DefaultScalaProductFieldAccessorFactory")
.getDeclaredConstructor()
.newInstance();
return (ScalaProductFieldAccessorFactory) factory;
} catch (Exception e) {
log.debug("Unable to load Scala API extension.", e);
return null;
}
}
}
|
ScalaProductFieldAccessorFactory
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppNodeUpdateEvent.java
|
{
"start": 1061,
"end": 1119
}
|
class ____ extends RMAppEvent {
public
|
RMAppNodeUpdateEvent
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/checkpoint/CheckpointCoordinatorTestingUtils.java
|
{
"start": 5073,
"end": 23679
}
|
class ____ {
public static OperatorStateHandle generatePartitionableStateHandle(
JobVertexID jobVertexID,
int index,
int namedStates,
int partitionsPerState,
boolean rawState)
throws IOException {
Map<String, List<? extends Serializable>> statesListsMap = new HashMap<>(namedStates);
for (int i = 0; i < namedStates; ++i) {
List<Integer> testStatesLists = new ArrayList<>(partitionsPerState);
// generate state
int seed = jobVertexID.hashCode() * index + i * namedStates;
if (rawState) {
seed = (seed + 1) * 31;
}
Random random = new Random(seed);
for (int j = 0; j < partitionsPerState; ++j) {
int simulatedStateValue = random.nextInt();
testStatesLists.add(simulatedStateValue);
}
statesListsMap.put("state-" + i, testStatesLists);
}
return generatePartitionableStateHandle(statesListsMap);
}
static ChainedStateHandle<OperatorStateHandle> generateChainedPartitionableStateHandle(
JobVertexID jobVertexID,
int index,
int namedStates,
int partitionsPerState,
boolean rawState)
throws IOException {
Map<String, List<? extends Serializable>> statesListsMap = new HashMap<>(namedStates);
for (int i = 0; i < namedStates; ++i) {
List<Integer> testStatesLists = new ArrayList<>(partitionsPerState);
// generate state
int seed = jobVertexID.hashCode() * index + i * namedStates;
if (rawState) {
seed = (seed + 1) * 31;
}
Random random = new Random(seed);
for (int j = 0; j < partitionsPerState; ++j) {
int simulatedStateValue = random.nextInt();
testStatesLists.add(simulatedStateValue);
}
statesListsMap.put("state-" + i, testStatesLists);
}
return ChainedStateHandle.wrapSingleHandle(
generatePartitionableStateHandle(statesListsMap));
}
static OperatorStateHandle generatePartitionableStateHandle(
Map<String, List<? extends Serializable>> states) throws IOException {
List<List<? extends Serializable>> namedStateSerializables = new ArrayList<>(states.size());
for (Map.Entry<String, List<? extends Serializable>> entry : states.entrySet()) {
namedStateSerializables.add(entry.getValue());
}
Tuple2<byte[], List<long[]>> serializationWithOffsets =
serializeTogetherAndTrackOffsets(namedStateSerializables);
Map<String, OperatorStateHandle.StateMetaInfo> offsetsMap = new HashMap<>(states.size());
int idx = 0;
for (Map.Entry<String, List<? extends Serializable>> entry : states.entrySet()) {
offsetsMap.put(
entry.getKey(),
new OperatorStateHandle.StateMetaInfo(
serializationWithOffsets.f1.get(idx),
OperatorStateHandle.Mode.SPLIT_DISTRIBUTE));
++idx;
}
return new OperatorStreamStateHandle(
offsetsMap, generateByteStreamStateHandle(serializationWithOffsets.f0));
}
private static ByteStreamStateHandle generateByteStreamStateHandle(byte[] bytes) {
return new ByteStreamStateHandle(String.valueOf(UUID.randomUUID()), bytes);
}
static Tuple2<byte[], List<long[]>> serializeTogetherAndTrackOffsets(
List<List<? extends Serializable>> serializables) throws IOException {
List<long[]> offsets = new ArrayList<>(serializables.size());
List<byte[]> serializedGroupValues = new ArrayList<>();
int runningGroupsOffset = 0;
for (List<? extends Serializable> list : serializables) {
long[] currentOffsets = new long[list.size()];
offsets.add(currentOffsets);
for (int i = 0; i < list.size(); ++i) {
currentOffsets[i] = runningGroupsOffset;
byte[] serializedValue = InstantiationUtil.serializeObject(list.get(i));
serializedGroupValues.add(serializedValue);
runningGroupsOffset += serializedValue.length;
}
}
// write all generated values in a single byte array, which is index by
// groupOffsetsInFinalByteArray
byte[] allSerializedValuesConcatenated = new byte[runningGroupsOffset];
runningGroupsOffset = 0;
for (byte[] serializedGroupValue : serializedGroupValues) {
System.arraycopy(
serializedGroupValue,
0,
allSerializedValuesConcatenated,
runningGroupsOffset,
serializedGroupValue.length);
runningGroupsOffset += serializedGroupValue.length;
}
return new Tuple2<>(allSerializedValuesConcatenated, offsets);
}
public static void verifyStateRestore(ExecutionJobVertex executionJobVertex) throws Exception {
verifyStateRestore(
executionJobVertex.getJobVertexId(),
executionJobVertex,
StateAssignmentOperation.createKeyGroupPartitions(
executionJobVertex.getMaxParallelism(),
executionJobVertex.getParallelism()));
}
public static void verifyStateRestore(
JobVertexID jobVertexID,
ExecutionJobVertex executionJobVertex,
List<KeyGroupRange> keyGroupPartitions)
throws Exception {
for (int i = 0; i < executionJobVertex.getParallelism(); i++) {
JobManagerTaskRestore taskRestore =
executionJobVertex
.getTaskVertices()[i]
.getCurrentExecutionAttempt()
.getTaskRestore();
Assert.assertEquals(1L, taskRestore.getRestoreCheckpointId());
TaskStateSnapshot stateSnapshot = taskRestore.getTaskStateSnapshot();
OperatorSubtaskState operatorState =
stateSnapshot.getSubtaskStateByOperatorID(
OperatorID.fromJobVertexID(jobVertexID));
ChainedStateHandle<OperatorStateHandle> expectedOpStateBackend =
generateChainedPartitionableStateHandle(jobVertexID, i, 2, 8, false);
assertTrue(
CommonTestUtils.isStreamContentEqual(
expectedOpStateBackend.get(0).openInputStream(),
operatorState
.getManagedOperatorState()
.iterator()
.next()
.openInputStream()));
KeyGroupsStateHandle expectPartitionedKeyGroupState =
generateKeyGroupState(jobVertexID, keyGroupPartitions.get(i), false);
compareKeyedState(
Collections.singletonList(expectPartitionedKeyGroupState),
operatorState.getManagedKeyedState());
}
}
static void compareKeyedState(
Collection<KeyGroupsStateHandle> expectPartitionedKeyGroupState,
Collection<? extends KeyedStateHandle> actualPartitionedKeyGroupState)
throws Exception {
KeyGroupsStateHandle expectedHeadOpKeyGroupStateHandle =
expectPartitionedKeyGroupState.iterator().next();
int expectedTotalKeyGroups =
expectedHeadOpKeyGroupStateHandle.getKeyGroupRange().getNumberOfKeyGroups();
int actualTotalKeyGroups = 0;
for (KeyedStateHandle keyedStateHandle : actualPartitionedKeyGroupState) {
assertTrue(keyedStateHandle instanceof KeyGroupsStateHandle);
actualTotalKeyGroups += keyedStateHandle.getKeyGroupRange().getNumberOfKeyGroups();
}
assertEquals(expectedTotalKeyGroups, actualTotalKeyGroups);
try (FSDataInputStream inputStream = expectedHeadOpKeyGroupStateHandle.openInputStream()) {
for (int groupId : expectedHeadOpKeyGroupStateHandle.getKeyGroupRange()) {
long offset = expectedHeadOpKeyGroupStateHandle.getOffsetForKeyGroup(groupId);
inputStream.seek(offset);
int expectedKeyGroupState =
InstantiationUtil.deserializeObject(
inputStream, Thread.currentThread().getContextClassLoader());
for (KeyedStateHandle oneActualKeyedStateHandle : actualPartitionedKeyGroupState) {
assertTrue(oneActualKeyedStateHandle instanceof KeyGroupsStateHandle);
KeyGroupsStateHandle oneActualKeyGroupStateHandle =
(KeyGroupsStateHandle) oneActualKeyedStateHandle;
if (oneActualKeyGroupStateHandle.getKeyGroupRange().contains(groupId)) {
long actualOffset =
oneActualKeyGroupStateHandle.getOffsetForKeyGroup(groupId);
try (FSDataInputStream actualInputStream =
oneActualKeyGroupStateHandle.openInputStream()) {
actualInputStream.seek(actualOffset);
int actualGroupState =
InstantiationUtil.deserializeObject(
actualInputStream,
Thread.currentThread().getContextClassLoader());
assertEquals(expectedKeyGroupState, actualGroupState);
}
}
}
}
}
}
static void comparePartitionableState(
List<ChainedStateHandle<OperatorStateHandle>> expected,
List<List<Collection<OperatorStateHandle>>> actual)
throws Exception {
List<String> expectedResult = new ArrayList<>();
for (ChainedStateHandle<OperatorStateHandle> chainedStateHandle : expected) {
for (int i = 0; i < chainedStateHandle.getLength(); ++i) {
OperatorStateHandle operatorStateHandle = chainedStateHandle.get(i);
collectResult(i, operatorStateHandle, expectedResult);
}
}
Collections.sort(expectedResult);
List<String> actualResult = new ArrayList<>();
for (List<Collection<OperatorStateHandle>> collectionList : actual) {
if (collectionList != null) {
for (int i = 0; i < collectionList.size(); ++i) {
Collection<OperatorStateHandle> stateHandles = collectionList.get(i);
Assert.assertNotNull(stateHandles);
for (OperatorStateHandle operatorStateHandle : stateHandles) {
collectResult(i, operatorStateHandle, actualResult);
}
}
}
}
Collections.sort(actualResult);
Assert.assertEquals(expectedResult, actualResult);
}
static void collectResult(
int opIdx, OperatorStateHandle operatorStateHandle, List<String> resultCollector)
throws Exception {
try (FSDataInputStream in = operatorStateHandle.openInputStream()) {
for (Map.Entry<String, OperatorStateHandle.StateMetaInfo> entry :
operatorStateHandle.getStateNameToPartitionOffsets().entrySet()) {
for (long offset : entry.getValue().getOffsets()) {
in.seek(offset);
Integer state =
InstantiationUtil.deserializeObject(
in, Thread.currentThread().getContextClassLoader());
resultCollector.add(opIdx + " : " + entry.getKey() + " : " + state);
}
}
}
}
static TaskStateSnapshot mockSubtaskState(
JobVertexID jobVertexID, int index, KeyGroupRange keyGroupRange) throws IOException {
OperatorStateHandle partitionableState =
generatePartitionableStateHandle(jobVertexID, index, 2, 8, false);
KeyGroupsStateHandle partitionedKeyGroupState =
generateKeyGroupState(jobVertexID, keyGroupRange, false);
TaskStateSnapshot subtaskStates = spy(new TaskStateSnapshot());
OperatorSubtaskState subtaskState =
spy(
OperatorSubtaskState.builder()
.setManagedOperatorState(partitionableState)
.setManagedKeyedState(partitionedKeyGroupState)
.build());
subtaskStates.putSubtaskStateByOperatorID(
OperatorID.fromJobVertexID(jobVertexID), subtaskState);
return subtaskStates;
}
public static KeyGroupsStateHandle generateKeyGroupState(
JobVertexID jobVertexID, KeyGroupRange keyGroupPartition, boolean rawState)
throws IOException {
List<Integer> testStatesLists = new ArrayList<>(keyGroupPartition.getNumberOfKeyGroups());
// generate state for one keygroup
for (int keyGroupIndex : keyGroupPartition) {
int vertexHash = jobVertexID.hashCode();
int seed =
rawState ? (vertexHash * (31 + keyGroupIndex)) : (vertexHash + keyGroupIndex);
Random random = new Random(seed);
int simulatedStateValue = random.nextInt();
testStatesLists.add(simulatedStateValue);
}
return generateKeyGroupState(keyGroupPartition, testStatesLists);
}
public static KeyGroupsStateHandle generateKeyGroupState(
KeyGroupRange keyGroupRange, List<? extends Serializable> states) throws IOException {
Preconditions.checkArgument(keyGroupRange.getNumberOfKeyGroups() == states.size());
Tuple2<byte[], List<long[]>> serializedDataWithOffsets =
serializeTogetherAndTrackOffsets(
Collections.<List<? extends Serializable>>singletonList(states));
KeyGroupRangeOffsets keyGroupRangeOffsets =
new KeyGroupRangeOffsets(keyGroupRange, serializedDataWithOffsets.f1.get(0));
ByteStreamStateHandle allSerializedStatesHandle =
generateByteStreamStateHandle(serializedDataWithOffsets.f0);
return new KeyGroupsStateHandle(keyGroupRangeOffsets, allSerializedStatesHandle);
}
public static Tuple2<List<StateObject>, OperatorSubtaskState>
generateSampleOperatorSubtaskState() throws IOException {
JobVertexID jobVertexID = new JobVertexID();
int index = 0;
Random random = new Random();
OperatorStateHandle managedOpHandle =
generatePartitionableStateHandle(jobVertexID, index, 2, 8, false);
OperatorStateHandle rawOpHandle =
generatePartitionableStateHandle(jobVertexID, index, 2, 8, true);
KeyedStateHandle managedKeyedHandle =
generateKeyGroupState(jobVertexID, new KeyGroupRange(0, random.nextInt(12)), false);
KeyedStateHandle rawKeyedHandle =
generateKeyGroupState(jobVertexID, new KeyGroupRange(0, random.nextInt(10)), true);
InputChannelStateHandle inputChannelStateHandle =
createNewInputChannelStateHandle(3, random);
ResultSubpartitionStateHandle resultSubpartitionStateHandle =
createNewResultSubpartitionStateHandle(3, random);
OperatorSubtaskState operatorSubtaskState =
OperatorSubtaskState.builder()
.setManagedOperatorState(managedOpHandle)
.setRawOperatorState(rawOpHandle)
.setManagedKeyedState(managedKeyedHandle)
.setRawKeyedState(rawKeyedHandle)
.setInputChannelState(
StateObjectCollection.singleton(inputChannelStateHandle))
.setResultSubpartitionState(
StateObjectCollection.singleton(resultSubpartitionStateHandle))
.setInputRescalingDescriptor(
InflightDataRescalingDescriptorUtil.rescalingDescriptor(
new int[1],
new RescaleMappings[0],
Collections.singleton(1)))
.setOutputRescalingDescriptor(
InflightDataRescalingDescriptorUtil.rescalingDescriptor(
new int[1],
new RescaleMappings[0],
Collections.singleton(2)))
.build();
return new Tuple2<>(
Arrays.asList(
managedOpHandle,
rawOpHandle,
managedKeyedHandle,
rawKeyedHandle,
inputChannelStateHandle,
resultSubpartitionStateHandle),
operatorSubtaskState);
}
public static TaskStateSnapshot createSnapshotWithUnionListState(
File stateFile, OperatorID operatorId, boolean isTaskFinished) throws IOException {
TaskStateSnapshot taskStateSnapshot = new TaskStateSnapshot(1, isTaskFinished);
taskStateSnapshot.putSubtaskStateByOperatorID(
operatorId, createSubtaskStateWithUnionListState(stateFile));
return taskStateSnapshot;
}
public static OperatorSubtaskState createSubtaskStateWithUnionListState(File stateFile) {
return OperatorSubtaskState.builder()
.setManagedOperatorState(
new OperatorStreamStateHandle(
Collections.singletonMap(
"test",
new OperatorStateHandle.StateMetaInfo(
new long[0], OperatorStateHandle.Mode.UNION)),
new FileStateHandle(new Path(stateFile.getAbsolutePath()), 0L)))
.build();
}
static
|
CheckpointCoordinatorTestingUtils
|
java
|
apache__flink
|
flink-core/src/test/java/org/apache/flink/api/java/typeutils/TypeInfoFactoryTest.java
|
{
"start": 17748,
"end": 18085
}
|
class ____ extends TypeInfoFactory<IntLike> {
@Override
@SuppressWarnings("unchecked")
public TypeInformation<IntLike> createTypeInfo(
Type t, Map<String, TypeInformation<?>> genericParams) {
return (TypeInformation) INT_TYPE_INFO;
}
}
public static
|
IntLikeTypeInfoFactory
|
java
|
spring-projects__spring-framework
|
spring-test/src/main/java/org/springframework/test/context/transaction/TransactionalTestExecutionListener.java
|
{
"start": 3786,
"end": 3923
}
|
class ____ will be run within a transaction. Test
* methods that are <em>not</em> annotated with {@code @Transactional} (at the
*
|
hierarchy
|
java
|
micronaut-projects__micronaut-core
|
router/src/main/java/io/micronaut/web/router/RouteBuilder.java
|
{
"start": 10455,
"end": 14605
}
|
class ____ ID. The URI route is built by the configured {@link UriNamingStrategy}.</p>
*
* @param type The class
* @param id The route id
* @return The route
*/
default UriRoute GET(Class<?> type, PropertyConvention id) {
return GET(getUriNamingStrategy().resolveUri(type, id), type, MethodConvention.SHOW.methodName(), Object.class);
}
/**
* <p>Route the specified URI template to the specified target.</p>
*
* <p>The number of variables in the template should match the number of method arguments</p>
*
* @param uri The URI
* @param method The method
* @return The route
*/
default UriRoute GET(String uri, ExecutableMethod<?, ?> method) {
return GET(uri, method.getDeclaringType(), method.getMethodName(), method.getArgumentTypes());
}
/**
* <p>Route the specified URI template to the specified target.</p>
*
* <p>The number of variables in the template should match the number of method arguments</p>
*
* @param beanDefinition The bean definition
* @param uri The URI
* @param method The method
* @return The route
*/
default UriRoute GET(String uri, BeanDefinition<?> beanDefinition, ExecutableMethod<?, ?> method) {
return GET(uri, beanDefinition.getBeanType(), method.getMethodName(), method.getArgumentTypes());
}
/**
* <p>Route the specified URI template to the specified target.</p>
*
* <p>The number of variables in the template should match the number of method arguments</p>
*
* @param uri The URI
* @param target The target
* @param method The method
* @param parameterTypes The parameter types for the target method
* @return The route
*/
UriRoute GET(String uri, Object target, String method, Class<?>... parameterTypes);
/**
* <p>Route the specified URI template to the specified target.</p>
*
* <p>The number of variables in the template should match the number of method arguments</p>
*
* @param uri The URI
* @param type The type
* @param method The method
* @param parameterTypes The parameter types for the target method
* @return The route
*/
UriRoute GET(String uri, Class<?> type, String method, Class<?>... parameterTypes);
/**
* Route the specified URI to the specified target for an HTTP POST. Since the method to execute is not
* specified "index" is used by default.
*
* @param uri The URI
* @param target The target object
* @param parameterTypes The parameter types for the target method
* @return The route
*/
default UriRoute POST(String uri, Object target, Class<?>... parameterTypes) {
return POST(uri, target, MethodConvention.SAVE.methodName(), parameterTypes);
}
/**
* <p>Route to the specified object. The URI route is built by the configured {@link UriNamingStrategy}.</p>
*
* @param target The object
* @return The route
*/
default UriRoute POST(Object target) {
Class<?> type = target.getClass();
return POST(getUriNamingStrategy().resolveUri(type), target);
}
/**
* <p>Route to the specified object and ID. The URI route is built by the configured {@link UriNamingStrategy}.</p>
*
* @param target The object
* @param id The route id
* @return The route
*/
default UriRoute POST(Object target, PropertyConvention id) {
Class<?> type = target.getClass();
return POST(getUriNamingStrategy().resolveUri(type, id), target, MethodConvention.UPDATE.methodName());
}
/**
* <p>Route to the specified class. The URI route is built by the configured {@link UriNamingStrategy}.</p>
*
* @param type The class
* @return The route
*/
default UriRoute POST(Class<?> type) {
return POST(getUriNamingStrategy().resolveUri(type), type, MethodConvention.SAVE.methodName());
}
/**
* <p>Route to the specified
|
and
|
java
|
apache__dubbo
|
dubbo-registry/dubbo-registry-api/src/main/java/org/apache/dubbo/registry/client/ServiceDiscoveryService.java
|
{
"start": 852,
"end": 1027
}
|
interface ____ {
void register() throws RuntimeException;
void update() throws RuntimeException;
void unregister() throws RuntimeException;
}
|
ServiceDiscoveryService
|
java
|
micronaut-projects__micronaut-core
|
http-server-netty/src/test/groovy/io/micronaut/docs/writable/TemplateController.java
|
{
"start": 1173,
"end": 2089
}
|
class ____ {
private final SimpleTemplateEngine templateEngine = new SimpleTemplateEngine();
private final Template template;
public TemplateController() {
template = initTemplate(); // <1>
}
@Get(value = "/welcome", produces = MediaType.TEXT_PLAIN)
Writable render() { // <2>
return writer -> template.make( // <3>
CollectionUtils.mapOf(
"firstName", "Fred",
"lastName", "Flintstone"
)
).writeTo(writer);
}
private Template initTemplate() {
Template template;
try {
template = templateEngine.createTemplate(
"Dear $firstName $lastName. Nice to meet you."
);
} catch (Exception e) {
throw new HttpServerException("Cannot create template");
}
return template;
}
}
//end::clazz[]
|
TemplateController
|
java
|
spring-projects__spring-data-jpa
|
spring-data-jpa/src/test/java/org/springframework/data/jpa/repository/query/EqlComplianceTests.java
|
{
"start": 1475,
"end": 21841
}
|
class ____ {
/**
* Parse the query using {@link EqlParser} then run it through the query-preserving {@link EqlQueryRenderer}.
*/
private static String parseWithoutChanges(String query) {
JpaQueryEnhancer.EqlQueryParser parser = JpaQueryEnhancer.EqlQueryParser.parseQuery(query);
return TokenRenderer.render(new EqlQueryRenderer().visit(parser.getContext()));
}
private void assertQuery(String query) {
String slimmedDownQuery = reduceWhitespace(query);
assertThat(parseWithoutChanges(slimmedDownQuery)).isEqualTo(slimmedDownQuery);
}
private String reduceWhitespace(String original) {
return original //
.replaceAll("[ \\t\\n]{1,}", " ") //
.trim();
}
@Test
void selectQueries() {
assertQuery("Select e FROM Employee e WHERE e.salary > 100000");
assertQuery("Select e FROM Employee e WHERE e.id = :id");
assertQuery("Select MAX(e.salary) FROM Employee e");
assertQuery("Select e.firstName FROM Employee e");
assertQuery("Select e.firstName, e.lastName FROM Employee e");
}
@Test
void selectClause() {
assertQuery("SELECT COUNT(e) FROM Employee e");
assertQuery("SELECT MAX(e.salary) FROM Employee e");
assertQuery("select sum(i.size.foo.bar.new) from Item i");
assertQuery("SELECT NEW com.acme.reports.EmpReport(e.firstName, e.lastName, e.salary) FROM Employee e");
}
@Test
void fromClause() {
assertQuery("SELECT e FROM Employee e");
assertQuery("SELECT e, a FROM Employee e, MailingAddress a WHERE e.address = a.address");
assertQuery("SELECT e FROM com.acme.Employee e");
}
@Test
void join() {
assertQuery("SELECT e FROM Employee e JOIN e.address a WHERE a.city = :city");
assertQuery("SELECT e FROM Employee e JOIN e.projects p JOIN e.projects p2 WHERE p.name = :p1 AND p2.name = :p2");
}
@Test
void joinFetch() {
assertQuery("SELECT e FROM Employee e JOIN FETCH e.address");
assertQuery("SELECT e FROM Employee e JOIN FETCH e.address a ORDER BY a.city");
assertQuery("SELECT e FROM Employee e JOIN FETCH e.address AS a ORDER BY a.city");
}
@Test
void leftJoin() {
assertQuery("SELECT e FROM Employee e LEFT JOIN e.address a ORDER BY a.city");
}
@Test
void on() {
assertQuery("SELECT e FROM Employee e LEFT JOIN e.address ON a.city = :city");
assertQuery("SELECT e FROM Employee e LEFT JOIN MailingAddress a ON e.address = a.address");
}
@Test
void subselectsInFromClause() {
assertQuery(
"SELECT e, c.city FROM Employee e, (SELECT DISTINCT a.city FROM Address a) c WHERE e.address.city = c.city");
}
@Test // GH-3277
void numericLiterals() {
assertQuery("SELECT e FROM Employee e WHERE e.id = 1234");
assertQuery("SELECT e FROM Employee e WHERE e.id = 1234L");
assertQuery("SELECT s FROM Stat s WHERE s.ratio > 3.14");
assertQuery("SELECT s FROM Stat s WHERE s.ratio > 3.14F");
assertQuery("SELECT s FROM Stat s WHERE s.ratio > 3.14e32D");
}
@Test // GH-3308
void newWithStrings() {
assertQuery("select new com.example.demo.SampleObject(se.id, se.sampleValue, \"java\") from SampleEntity se");
}
@Test
void orderByClause() {
assertQuery("SELECT e FROM Employee e ORDER BY e.lastName ASC, e.firstName ASC"); // Typo in EQL document
assertQuery("SELECT e FROM Employee e ORDER BY UPPER(e.lastName)");
assertQuery("SELECT e FROM Employee e LEFT JOIN e.manager m ORDER BY m.lastName NULLS FIRST");
assertQuery("SELECT e FROM Employee e ORDER BY e.address");
}
@Test
void groupByClause() {
assertQuery("SELECT AVG(e.salary), e.address.city FROM Employee e GROUP BY e.address.city");
assertQuery("SELECT AVG(e.salary), e.address.city FROM Employee e GROUP BY e.address.city ORDER BY AVG(e.salary)");
assertQuery("SELECT e, COUNT(p) FROM Employee e LEFT JOIN e.projects p GROUP BY e");
}
@Test
void havingClause() {
assertQuery(
"SELECT AVG(e.salary), e.address.city FROM Employee e GROUP BY e.address.city HAVING AVG(e.salary) > 100000");
}
@Test
void union() {
assertQuery("""
SELECT MAX(e.salary) FROM Employee e WHERE e.address.city = :city1
UNION SELECT MAX(e.salary) FROM Employee e WHERE e.address.city = :city2
""");
assertQuery("""
SELECT e FROM Employee e JOIN e.phones p WHERE p.areaCode = :areaCode1
INTERSECT SELECT e FROM Employee e JOIN e.phones p WHERE p.areaCode = :areaCode2
""");
assertQuery("""
SELECT e FROM Employee e
EXCEPT SELECT e FROM Employee e WHERE e.salary > e.manager.salary
""");
}
@Test
void whereClause() {
// TBD
}
@Test
void updateQueries() {
assertQuery("UPDATE Employee e SET e.salary = 60000 WHERE e.salary = 50000");
}
@Test
void deleteQueries() {
assertQuery("DELETE FROM Employee e WHERE e.department IS NULL");
}
@Test
void literals() {
assertQuery("SELECT e FROM Employee e WHERE e.name = 'Bob'");
assertQuery("SELECT e FROM Employee e WHERE e.id = 1234");
assertQuery("SELECT e FROM Employee e WHERE e.id = 1234L");
assertQuery("SELECT s FROM Stat s WHERE s.ratio > 3.14F");
assertQuery("SELECT s FROM Stat s WHERE s.ratio > 3.14e32D");
assertQuery("SELECT e FROM Employee e WHERE e.active = TRUE");
assertQuery("SELECT e FROM Employee e WHERE e.startDate = {d'2012-01-03'}");
assertQuery("SELECT e FROM Employee e WHERE e.startTime = {t'09:00:00'}");
assertQuery("SELECT e FROM Employee e WHERE e.version = {ts'2012-01-03 09:00:00.000000001'}");
assertQuery("SELECT e FROM Employee e WHERE e.gender = org.acme.Gender.MALE");
assertQuery("UPDATE Employee e SET e.manager = NULL WHERE e.manager = :manager");
}
@Test
void functionsInSelect() {
assertQuery("SELECT e.salary - 1000 FROM Employee e");
assertQuery("SELECT e.salary + 1000 FROM Employee e");
assertQuery("SELECT e.salary * 2 FROM Employee e");
assertQuery("SELECT e.salary * 2.0 FROM Employee e");
assertQuery("SELECT e.salary / 2 FROM Employee e");
assertQuery("SELECT e.salary / 2.0 FROM Employee e");
assertQuery("SELECT ABS(e.salary - e.manager.salary) FROM Employee e");
assertQuery(
"select e from Employee e where case e.firstName when 'Bob' then 'Robert' when 'Jill' then 'Gillian' else '' end = 'Robert'");
assertQuery(
"select case when e.firstName = 'Bob' then 'Robert' when e.firstName = 'Jill' then 'Gillian' else '' end from Employee e where e.firstName = 'Bob' or e.firstName = 'Jill'");
assertQuery(
"select e from Employee e where case when e.firstName = 'Bob' then 'Robert' when e.firstName = 'Jill' then 'Gillian' else '' end = 'Robert'");
assertQuery("SELECT COALESCE(e.salary, 0) FROM Employee e");
assertQuery("SELECT CONCAT(e.firstName, ' ', e.lastName) FROM Employee e");
assertQuery("SELECT e.name, CURRENT_DATE FROM Employee e");
assertQuery("SELECT e.name, CURRENT_TIME FROM Employee e");
assertQuery("SELECT e.name, CURRENT_TIMESTAMP FROM Employee e");
assertQuery("SELECT LENGTH(e.lastName) FROM Employee e");
assertQuery("SELECT LOWER(e.lastName) FROM Employee e");
assertQuery("SELECT MOD(e.hoursWorked, 8) FROM Employee e");
assertQuery("SELECT NULLIF(e.salary, 0) FROM Employee e");
assertQuery("SELECT SQRT(o.RESULT) FROM Output o");
assertQuery("SELECT SUBSTRING(e.lastName, 0, 2) FROM Employee e");
assertQuery(
"SELECT TRIM(TRAILING FROM e.lastName), TRIM(e.lastName), TRIM(LEADING '-' FROM e.lastName) FROM Employee e");
assertQuery("SELECT UPPER(e.lastName) FROM Employee e");
assertQuery("SELECT CAST(e.salary NUMERIC(10, 2)) FROM Employee e");
assertQuery("SELECT EXTRACT(YEAR FROM e.startDate) FROM Employee e");
assertQuery("SELECT e FROM Employee e WHERE e.lastName REGEXP '^Dr.*'");
assertQuery("SELECT e FROM Employee e WHERE e.lastName REGEXP '^Dr\\.*'");
}
@Test
void functionsInWhere() {
assertQuery("SELECT e FROM Employee e WHERE e.salary - 1000 > 0");
assertQuery("SELECT e FROM Employee e WHERE e.salary + 1000 > 0");
assertQuery("SELECT e FROM Employee e WHERE e.salary * 2 > 0");
assertQuery("SELECT e FROM Employee e WHERE e.salary * 2.0 > 0.0");
assertQuery("SELECT e FROM Employee e WHERE e.salary / 2 > 0");
assertQuery("SELECT e FROM Employee e WHERE e.salary / 2.0 > 0.0");
assertQuery("SELECT e FROM Employee e WHERE ABS(e.salary - e.manager.salary) > 0");
assertQuery("SELECT e FROM Employee e WHERE COALESCE(e.salary, 0) > 0");
assertQuery("SELECT e FROM Employee e WHERE CONCAT(e.firstName, ' ', e.lastName) = 'Bilbo'");
assertQuery("SELECT e FROM Employee e WHERE CURRENT_DATE > CURRENT_TIME");
assertQuery("SELECT e FROM Employee e WHERE CURRENT_TIME > CURRENT_TIMESTAMP");
assertQuery("SELECT e FROM Employee e WHERE LENGTH(e.lastName) > 0");
assertQuery("SELECT e FROM Employee e WHERE LOWER(e.lastName) = 'bilbo'");
assertQuery("SELECT e FROM Employee e WHERE MOD(e.hoursWorked, 8) > 0");
assertQuery("SELECT e FROM Employee e WHERE NULLIF(e.salary, 0) is null");
assertQuery("SELECT e FROM Employee e WHERE SQRT(o.RESULT) > 0.0");
assertQuery("SELECT e FROM Employee e WHERE SUBSTRING(e.lastName, 0, 2) = 'Bilbo'");
assertQuery("SELECT e FROM Employee e WHERE TRIM(TRAILING FROM e.lastName) = 'Bilbo'");
assertQuery("SELECT e FROM Employee e WHERE TRIM(e.lastName) = 'Bilbo'");
assertQuery("SELECT e FROM Employee e WHERE TRIM(LEADING '-' FROM e.lastName) = 'Bilbo'");
assertQuery("SELECT e FROM Employee e WHERE UPPER(e.lastName) = 'BILBO'");
assertQuery("SELECT e FROM Employee e WHERE CAST(e.salary NUMERIC(10, 2)) > 0.0");
assertQuery("SELECT e FROM Employee e WHERE EXTRACT(YEAR FROM e.startDate) = '2023'");
}
@Test
void functionsInOrderBy() {
assertQuery("SELECT e FROM Employee e ORDER BY e.salary - 1000");
assertQuery("SELECT e FROM Employee e ORDER BY e.salary + 1000");
assertQuery("SELECT e FROM Employee e ORDER BY e.salary * 2");
assertQuery("SELECT e FROM Employee e ORDER BY e.salary * 2.0");
assertQuery("SELECT e FROM Employee e ORDER BY e.salary / 2");
assertQuery("SELECT e FROM Employee e ORDER BY e.salary / 2.0");
assertQuery("SELECT e FROM Employee e ORDER BY ABS(e.salary - e.manager.salary)");
assertQuery("SELECT e FROM Employee e ORDER BY COALESCE(e.salary, 0)");
assertQuery("SELECT e FROM Employee e ORDER BY CONCAT(e.firstName, ' ', e.lastName)");
assertQuery("SELECT e FROM Employee e ORDER BY CURRENT_DATE");
assertQuery("SELECT e FROM Employee e ORDER BY CURRENT_TIME");
assertQuery("SELECT e FROM Employee e ORDER BY CURRENT_TIMESTAMP");
assertQuery("SELECT e FROM Employee e ORDER BY LENGTH(e.lastName)");
assertQuery("SELECT e FROM Employee e ORDER BY LOWER(e.lastName)");
assertQuery("SELECT e FROM Employee e ORDER BY MOD(e.hoursWorked, 8)");
assertQuery("SELECT e FROM Employee e ORDER BY NULLIF(e.salary, 0)");
assertQuery("SELECT e FROM Employee e ORDER BY SQRT(o.RESULT)");
assertQuery("SELECT e FROM Employee e ORDER BY SUBSTRING(e.lastName, 0, 2)");
assertQuery("SELECT e FROM Employee e ORDER BY TRIM(TRAILING FROM e.lastName)");
assertQuery("SELECT e FROM Employee e ORDER BY TRIM(e.lastName)");
assertQuery("SELECT e FROM Employee e ORDER BY TRIM(LEADING '-' FROM e.lastName)");
assertQuery("SELECT e FROM Employee e ORDER BY UPPER(e.lastName)");
assertQuery("SELECT e FROM Employee e ORDER BY CAST(e.salary NUMERIC(10, 2))");
assertQuery("SELECT e FROM Employee e ORDER BY EXTRACT(YEAR FROM e.startDate)");
}
@Test
void functionsInGroupBy() {
assertQuery("SELECT e FROM Employee e GROUP BY e.salary - 1000");
assertQuery("SELECT e FROM Employee e GROUP BY e.salary + 1000");
assertQuery("SELECT e FROM Employee e GROUP BY e.salary * 2");
assertQuery("SELECT e FROM Employee e GROUP BY e.salary * 2.0");
assertQuery("SELECT e FROM Employee e GROUP BY e.salary / 2");
assertQuery("SELECT e FROM Employee e GROUP BY e.salary / 2.0");
assertQuery("SELECT e FROM Employee e GROUP BY ABS(e.salary - e.manager.salary)");
assertQuery("SELECT e FROM Employee e GROUP BY COALESCE(e.salary, 0)");
assertQuery("SELECT e FROM Employee e GROUP BY CONCAT(e.firstName, ' ', e.lastName)");
assertQuery("SELECT e FROM Employee e GROUP BY CURRENT_DATE");
assertQuery("SELECT e FROM Employee e GROUP BY CURRENT_TIME");
assertQuery("SELECT e FROM Employee e GROUP BY CURRENT_TIMESTAMP");
assertQuery("SELECT e FROM Employee e GROUP BY LENGTH(e.lastName)");
assertQuery("SELECT e FROM Employee e GROUP BY LOWER(e.lastName)");
assertQuery("SELECT e FROM Employee e GROUP BY MOD(e.hoursWorked, 8)");
assertQuery("SELECT e FROM Employee e GROUP BY NULLIF(e.salary, 0)");
assertQuery("SELECT e FROM Employee e GROUP BY SQRT(o.RESULT)");
assertQuery("SELECT e FROM Employee e GROUP BY SUBSTRING(e.lastName, 0, 2)");
assertQuery("SELECT e FROM Employee e GROUP BY TRIM(TRAILING FROM e.lastName)");
assertQuery("SELECT e FROM Employee e GROUP BY TRIM(e.lastName)");
assertQuery("SELECT e FROM Employee e GROUP BY TRIM(LEADING '-' FROM e.lastName)");
assertQuery("SELECT e FROM Employee e GROUP BY UPPER(e.lastName)");
assertQuery("SELECT e FROM Employee e GROUP BY CAST(e.salary NUMERIC(10, 2))");
assertQuery("SELECT e FROM Employee e GROUP BY EXTRACT(YEAR FROM e.startDate)");
}
@Test
void functionsInHaving() {
assertQuery("SELECT e FROM Employee e GROUP BY e.salary HAVING e.salary - 1000 > 0");
assertQuery("SELECT e FROM Employee e GROUP BY e.salary HAVING e.salary + 1000 > 0");
assertQuery("SELECT e FROM Employee e GROUP BY e.salary HAVING e.salary * 2 > 0");
assertQuery("SELECT e FROM Employee e GROUP BY e.salary HAVING e.salary * 2.0 > 0.0");
assertQuery("SELECT e FROM Employee e GROUP BY e.salary HAVING e.salary / 2 > 0");
assertQuery("SELECT e FROM Employee e GROUP BY e.salary HAVING e.salary / 2.0 > 0.0");
assertQuery("SELECT e FROM Employee e GROUP BY e.salary HAVING ABS(e.salary - e.manager.salary) > 0");
assertQuery("SELECT e FROM Employee e GROUP BY e.salary HAVING COALESCE(e.salary, 0) > 0");
assertQuery("SELECT e FROM Employee e GROUP BY e.salary HAVING CONCAT(e.firstName, ' ', e.lastName) = 'Bilbo'");
assertQuery("SELECT e FROM Employee e GROUP BY e.salary HAVING CURRENT_DATE > CURRENT_TIME");
assertQuery("SELECT e FROM Employee e GROUP BY e.salary HAVING CURRENT_TIME > CURRENT_TIMESTAMP");
assertQuery("SELECT e FROM Employee e GROUP BY e.salary HAVING LENGTH(e.lastName) > 0");
assertQuery("SELECT e FROM Employee e GROUP BY e.salary HAVING LOWER(e.lastName) = 'bilbo'");
assertQuery("SELECT e FROM Employee e GROUP BY e.salary HAVING MOD(e.hoursWorked, 8) > 0");
assertQuery("SELECT e FROM Employee e GROUP BY e.salary HAVING NULLIF(e.salary, 0) is null");
assertQuery("SELECT e FROM Employee e GROUP BY e.salary HAVING SQRT(o.RESULT) > 0.0");
assertQuery("SELECT e FROM Employee e GROUP BY e.salary HAVING SUBSTRING(e.lastName, 0, 2) = 'Bilbo'");
assertQuery("SELECT e FROM Employee e GROUP BY e.salary HAVING TRIM(TRAILING FROM e.lastName) = 'Bilbo'");
assertQuery("SELECT e FROM Employee e GROUP BY e.salary HAVING TRIM(e.lastName) = 'Bilbo'");
assertQuery("SELECT e FROM Employee e GROUP BY e.salary HAVING TRIM(LEADING '-' FROM e.lastName) = 'Bilbo'");
assertQuery("SELECT e FROM Employee e GROUP BY e.salary HAVING UPPER(e.lastName) = 'BILBO'");
assertQuery("SELECT e FROM Employee e GROUP BY e.salary HAVING CAST(e.salary NUMERIC(10, 2)) > 0.0");
assertQuery("SELECT e FROM Employee e GROUP BY e.salary HAVING EXTRACT(YEAR FROM e.startDate) = '2023'");
}
@Test
void specialOperators() {
assertQuery("SELECT toDo FROM Employee e JOIN e.toDoList toDo WHERE INDEX(toDo) = 1");
assertQuery("SELECT p FROM Employee e JOIN e.priorities p WHERE KEY(p) = 'high'");
assertQuery("SELECT e FROM Employee e WHERE SIZE(e.managedEmployees) < 2");
assertQuery("SELECT e FROM Employee e WHERE SIZE(e.managedEmployees.new) < 2");
assertQuery("SELECT e FROM Employee e WHERE e.managedEmployees IS EMPTY");
assertQuery("SELECT e FROM Employee e WHERE e.managedEmployee.size.new IS EMPTY");
assertQuery("SELECT e FROM Employee e WHERE 'write code' MEMBER OF e.responsibilities");
assertQuery("SELECT e FROM Employee e WHERE 'write code' MEMBER OF e.responsibilities.size");
assertQuery("SELECT p FROM Project p WHERE TYPE(p) = LargeProject");
/**
* NOTE: The following query has been altered to properly align with EclipseLink test code despite NOT matching
* their ref docs. See https://github.com/eclipse-ee4j/eclipselink/issues/1949 for more details.
*/
assertQuery("SELECT e FROM Employee e JOIN TREAT(e.projects AS LargeProject) p WHERE p.budget > 1000000");
assertQuery("SELECT p FROM Phone p WHERE FUNCTION('TO_NUMBER', p.areaCode) > 613");
}
@Test
void eclipseLinkSpecialOperators() {
assertQuery("SELECT p FROM Phone p WHERE FUNC('TO_NUMBER', e.areaCode) > 613");
assertQuery("SELECT FUNC('YEAR', e.startDate) AS YEAR, COUNT(e) FROM Employee e GROUP BY YEAR");
assertQuery(
"SELECT a FROM Asset a, Geography geo WHERE geo.id = :id AND a.id IN :id_list AND FUNC('ST_INTERSECTS', a.geometry, geo.geometry) = 'TRUE'");
assertQuery(
"SELECT s FROM SimpleSpatial s WHERE FUNC('MDSYS.SDO_RELATE', s.jGeometry, :otherGeometry, :params) = 'TRUE' ORDER BY s.id ASC");
assertQuery("SELECT e FROM Employee e WHERE OPERATOR('ExtractXml', e.resume, '@years-experience') > 10");
}
@Test
void sql() {
assertQuery("SELECT p FROM Phone p WHERE SQL('CAST(? AS CHAR(3))', e.areaCode) = '613'");
assertQuery("SELECT SQL('EXTRACT(YEAR FROM ?)', e.startDate) AS YEAR, COUNT(e) FROM Employee e GROUP BY YEAR");
assertQuery("SELECT e FROM Employee e ORDER BY SQL('? NULLS FIRST', e.startDate)");
assertQuery("SELECT e FROM Employee e WHERE e.startDate = SQL('(SELECT SYSDATE FROM DUAL)')");
}
@Test
void column() {
assertQuery("SELECT e FROM Employee e WHERE COLUMN('MANAGER_ID', e) = :id");
assertQuery("SELECT e FROM Employee e WHERE COLUMN('ROWID', e) = :id");
}
@Test
void table() {
assertQuery(
"SELECT e, a.LAST_UPDATE_USER FROM Employee e, TABLE('AUDIT') a WHERE a.TABLE = 'EMPLOYEE' AND a.ROWID = COLUMN('ROWID', e)");
}
@Test // GH-3175
void coalesceFunctions() {
assertQuery("SELECT b FROM Bundle b WHERE coalesce(b.deleted, false) AND b.latestImport = true");
assertQuery("SELECT b FROM Bundle b WHERE NOT coalesce(b.deleted, false) AND b.latestImport = true");
}
@Test // GH-3314
void isNullAndIsNotNull() {
assertQuery("SELECT e FROM Employee e WHERE (e.active = null OR e.active = true)");
assertQuery("SELECT e FROM Employee e WHERE (e.active = NULL OR e.active = true)");
assertQuery("SELECT e FROM Employee e WHERE (e.active IS null OR e.active = true)");
assertQuery("SELECT e FROM Employee e WHERE (e.active IS NULL OR e.active = true)");
assertQuery("SELECT e FROM Employee e WHERE (e.active != null OR e.active = true)");
assertQuery("SELECT e FROM Employee e WHERE (e.active != NULL OR e.active = true)");
assertQuery("SELECT e FROM Employee e WHERE (e.active IS NOT null OR e.active = true)");
assertQuery("SELECT e FROM Employee e WHERE (e.active IS NOT NULL OR e.active = true)");
}
@Test // GH-3496
void lateralShouldBeAValidParameter() {
assertQuery("select e from Employee e where e.lateral = :_lateral");
assertQuery("select te from TestEntity te where te.lateral = :lateral");
}
@Test // GH-3136
void intersect() {
assertQuery("""
SELECT e FROM Employee e JOIN e.phones p WHERE p.areaCode = :areaCode1
INTERSECT SELECT e FROM Employee e JOIN e.phones p WHERE p.areaCode = :areaCode2
""");
}
@Test // GH-3136
void except() {
assertQuery("""
SELECT e FROM Employee e
EXCEPT SELECT e FROM Employee e WHERE e.salary > e.manager.salary
""");
}
@ParameterizedTest // GH-3136
@ValueSource(strings = { "STRING", "INTEGER", "FLOAT", "DOUBLE" })
void cast(String targetType) {
assertQuery("SELECT CAST(e.salary AS %s) FROM Employee e".formatted(targetType));
}
@ParameterizedTest // GH-3136
@ValueSource(strings = { "LEFT", "RIGHT" })
void leftRightStringFunctions(String keyword) {
assertQuery("SELECT %s(e.name, 3) FROM Employee e".formatted(keyword));
}
@Test // GH-3136
void replaceStringFunctions() {
assertQuery("SELECT REPLACE(e.name, 'o', 'a') FROM Employee e");
assertQuery("SELECT REPLACE(e.name, ' ', '_') FROM Employee e");
}
@Test // GH-3136
void stringConcatWithPipes() {
assertQuery("SELECT e.firstname || e.lastname AS name FROM Employee e");
}
}
|
EqlComplianceTests
|
java
|
apache__camel
|
components/camel-jackson-protobuf/src/test/java/org/apache/camel/component/jackson/protobuf/JacksonProtobufLookupResolverTest.java
|
{
"start": 1528,
"end": 3627
}
|
class ____ extends CamelTestSupport {
@Test
public void testMarshalUnmarshalPojo() throws Exception {
MockEndpoint mock1 = getMockEndpoint("mock:serialized");
mock1.expectedMessageCount(1);
Pojo pojo = new Pojo("Hello");
template.sendBody("direct:pojo", pojo);
mock1.assertIsSatisfied();
byte[] serialized = mock1.getReceivedExchanges().get(0).getIn().getBody(byte[].class);
assertNotNull(serialized);
assertEquals(7, serialized.length);
MockEndpoint mock2 = getMockEndpoint("mock:pojo");
mock2.expectedMessageCount(1);
mock2.message(0).body().isInstanceOf(Pojo.class);
template.sendBody("direct:serialized", serialized);
mock2.assertIsSatisfied();
Pojo back = mock2.getReceivedExchanges().get(0).getIn().getBody(Pojo.class);
assertEquals(pojo.getText(), back.getText());
}
@Override
protected void bindToRegistry(Registry registry) throws Exception {
String protobufStr = "message Pojo {\n"
+ " required string text = 1;\n"
+ "}\n";
ProtobufSchema schema = ProtobufSchemaLoader.std.parse(protobufStr);
SchemaResolver resolver = ex -> schema;
registry.bind("schema-resolver-1", SchemaResolver.class, resolver);
SchemaResolver resolver2 = ex -> {
throw new RuntimeCamelException();
};
registry.bind("schema-resolver-2", SchemaResolver.class, resolver2);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:serialized").unmarshal().protobuf(ProtobufLibrary.Jackson, Pojo.class, "schema-resolver-1")
.to("mock:pojo");
from("direct:pojo").marshal().protobuf(ProtobufLibrary.Jackson, Pojo.class, "schema-resolver-1")
.to("mock:serialized");
}
};
}
public static
|
JacksonProtobufLookupResolverTest
|
java
|
apache__flink
|
flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/join/WindowJoin.java
|
{
"start": 7157,
"end": 7865
}
|
class ____<T> implements WatermarkStrategy<T> {
private IngestionTimeWatermarkStrategy() {}
public static <T> IngestionTimeWatermarkStrategy<T> create() {
return new IngestionTimeWatermarkStrategy<>();
}
@Override
public WatermarkGenerator<T> createWatermarkGenerator(
WatermarkGeneratorSupplier.Context context) {
return new AscendingTimestampsWatermarks<>();
}
@Override
public TimestampAssigner<T> createTimestampAssigner(
TimestampAssignerSupplier.Context context) {
return (event, timestamp) -> System.currentTimeMillis();
}
}
}
|
IngestionTimeWatermarkStrategy
|
java
|
apache__camel
|
components/camel-cxf/camel-cxf-spring-soap/src/test/java/org/apache/camel/component/cxf/mtom/CxfMtomDisabledProducerPayloadModeTest.java
|
{
"start": 2500,
"end": 4995
}
|
class ____ extends CxfMtomProducerPayloadModeTest {
private static final Logger LOG = LoggerFactory.getLogger(CxfMtomDisabledProducerPayloadModeTest.class);
@Override
protected boolean isMtomEnabled() {
return false;
}
@Override
protected Object getServiceImpl() {
return new MyHelloImpl();
}
@Override
@Test
public void testProducer() throws Exception {
if (MtomTestHelper.isAwtHeadless(null, LOG)) {
return;
}
Exchange exchange = context.createProducerTemplate().send("direct:testEndpoint", new Processor() {
public void process(Exchange exchange) throws Exception {
exchange.setPattern(ExchangePattern.InOut);
List<Source> elements = new ArrayList<>();
elements.add(new DOMSource(
StaxUtils.read(new StringReader(MtomTestHelper.MTOM_DISABLED_REQ_MESSAGE)).getDocumentElement()));
CxfPayload<SoapHeader> body = new CxfPayload<>(
new ArrayList<SoapHeader>(),
elements, null);
exchange.getIn().setBody(body);
exchange.getIn(AttachmentMessage.class).addAttachment(MtomTestHelper.REQ_PHOTO_CID,
new DataHandler(new ByteArrayDataSource(MtomTestHelper.REQ_PHOTO_DATA, "application/octet-stream")));
exchange.getIn(AttachmentMessage.class).addAttachment(MtomTestHelper.REQ_IMAGE_CID,
new DataHandler(new ByteArrayDataSource(MtomTestHelper.requestJpeg, "image/jpeg")));
}
});
// process response - verify response attachments
CxfPayload<?> out = exchange.getMessage().getBody(CxfPayload.class);
assertEquals(1, out.getBody().size());
DataHandler dr = exchange.getMessage(AttachmentMessage.class).getAttachment(MtomTestHelper.RESP_PHOTO_CID);
assertEquals("application/octet-stream", dr.getContentType());
assertArrayEquals(MtomTestHelper.RESP_PHOTO_DATA, IOUtils.readBytesFromStream(dr.getInputStream()));
dr = exchange.getMessage(AttachmentMessage.class).getAttachment(MtomTestHelper.RESP_IMAGE_CID);
assertEquals("image/jpeg", dr.getContentType());
BufferedImage image = ImageIO.read(dr.getInputStream());
assertEquals(560, image.getWidth());
assertEquals(300, image.getHeight());
}
public static
|
CxfMtomDisabledProducerPayloadModeTest
|
java
|
spring-projects__spring-security
|
oauth2/oauth2-authorization-server/src/main/java/org/springframework/security/oauth2/server/authorization/http/converter/OAuth2TokenIntrospectionHttpMessageConverter.java
|
{
"start": 2356,
"end": 5907
}
|
class ____
extends AbstractHttpMessageConverter<OAuth2TokenIntrospection> {
private static final ParameterizedTypeReference<Map<String, Object>> STRING_OBJECT_MAP = new ParameterizedTypeReference<>() {
};
private final GenericHttpMessageConverter<Object> jsonMessageConverter = HttpMessageConverters
.getJsonMessageConverter();
private Converter<Map<String, Object>, OAuth2TokenIntrospection> tokenIntrospectionConverter = new MapOAuth2TokenIntrospectionConverter();
private Converter<OAuth2TokenIntrospection, Map<String, Object>> tokenIntrospectionParametersConverter = new OAuth2TokenIntrospectionMapConverter();
public OAuth2TokenIntrospectionHttpMessageConverter() {
super(MediaType.APPLICATION_JSON, new MediaType("application", "*+json"));
}
@Override
protected boolean supports(Class<?> clazz) {
return OAuth2TokenIntrospection.class.isAssignableFrom(clazz);
}
@Override
@SuppressWarnings("unchecked")
protected OAuth2TokenIntrospection readInternal(Class<? extends OAuth2TokenIntrospection> clazz,
HttpInputMessage inputMessage) throws HttpMessageNotReadableException {
try {
Map<String, Object> tokenIntrospectionParameters = (Map<String, Object>) this.jsonMessageConverter
.read(STRING_OBJECT_MAP.getType(), null, inputMessage);
return this.tokenIntrospectionConverter.convert(tokenIntrospectionParameters);
}
catch (Exception ex) {
throw new HttpMessageNotReadableException(
"An error occurred reading the Token Introspection Response: " + ex.getMessage(), ex, inputMessage);
}
}
@Override
protected void writeInternal(OAuth2TokenIntrospection tokenIntrospection, HttpOutputMessage outputMessage)
throws HttpMessageNotWritableException {
try {
Map<String, Object> tokenIntrospectionResponseParameters = this.tokenIntrospectionParametersConverter
.convert(tokenIntrospection);
this.jsonMessageConverter.write(tokenIntrospectionResponseParameters, STRING_OBJECT_MAP.getType(),
MediaType.APPLICATION_JSON, outputMessage);
}
catch (Exception ex) {
throw new HttpMessageNotWritableException(
"An error occurred writing the Token Introspection Response: " + ex.getMessage(), ex);
}
}
/**
* Sets the {@link Converter} used for converting the Token Introspection Response
* parameters to an {@link OAuth2TokenIntrospection}.
* @param tokenIntrospectionConverter the {@link Converter} used for converting to an
* {@link OAuth2TokenIntrospection}
*/
public final void setTokenIntrospectionConverter(
Converter<Map<String, Object>, OAuth2TokenIntrospection> tokenIntrospectionConverter) {
Assert.notNull(tokenIntrospectionConverter, "tokenIntrospectionConverter cannot be null");
this.tokenIntrospectionConverter = tokenIntrospectionConverter;
}
/**
* Sets the {@link Converter} used for converting an {@link OAuth2TokenIntrospection}
* to a {@code Map} representation of the Token Introspection Response parameters.
* @param tokenIntrospectionParametersConverter the {@link Converter} used for
* converting to a {@code Map} representation of the Token Introspection Response
* parameters
*/
public final void setTokenIntrospectionParametersConverter(
Converter<OAuth2TokenIntrospection, Map<String, Object>> tokenIntrospectionParametersConverter) {
Assert.notNull(tokenIntrospectionParametersConverter, "tokenIntrospectionParametersConverter cannot be null");
this.tokenIntrospectionParametersConverter = tokenIntrospectionParametersConverter;
}
private static final
|
OAuth2TokenIntrospectionHttpMessageConverter
|
java
|
spring-projects__spring-framework
|
spring-beans/src/test/java/org/springframework/beans/factory/support/ConstructorResolverAotTests.java
|
{
"start": 23399,
"end": 23639
}
|
class ____ implements FactoryBean<Integer> {
@Override
public Integer getObject() {
return 42;
}
@Override
public Class<?> getObjectType() {
return Integer.class;
}
}
@SuppressWarnings("unused")
static
|
IntegerFactoryBean
|
java
|
apache__logging-log4j2
|
log4j-core-test/src/test/java/org/apache/logging/log4j/core/appender/ConsoleAppenderTest.java
|
{
"start": 2087,
"end": 2616
}
|
class ____ {
private static final String LOG4J_SKIP_JANSI = "log4j.skipJansi";
@AfterAll
static void afterClass() {
System.clearProperty(LOG4J_SKIP_JANSI);
}
@BeforeAll
static void beforeClass() {
System.setProperty(LOG4J_SKIP_JANSI, "true");
}
ByteArrayOutputStream baos;
@Mock
PrintStream psMock;
@BeforeEach
void before() {
System.setProperty(LOG4J_SKIP_JANSI, "true");
baos = new ByteArrayOutputStream();
}
private
|
ConsoleAppenderTest
|
java
|
spring-projects__spring-framework
|
spring-core/src/main/java/org/springframework/util/ClassUtils.java
|
{
"start": 51167,
"end": 52186
}
|
class ____ one of its superclasses at least have one or more
* methods with the supplied name (with any argument types)?
* Includes non-public methods.
* @param clazz the clazz to check
* @param methodName the name of the method
* @return whether there is at least one method with the given name
*/
public static boolean hasAtLeastOneMethodWithName(Class<?> clazz, String methodName) {
Assert.notNull(clazz, "Class must not be null");
Assert.notNull(methodName, "Method name must not be null");
Method[] declaredMethods = clazz.getDeclaredMethods();
for (Method method : declaredMethods) {
if (method.getName().equals(methodName)) {
return true;
}
}
Class<?>[] ifcs = clazz.getInterfaces();
for (Class<?> ifc : ifcs) {
if (hasAtLeastOneMethodWithName(ifc, methodName)) {
return true;
}
}
return (clazz.getSuperclass() != null && hasAtLeastOneMethodWithName(clazz.getSuperclass(), methodName));
}
/**
* Given a method, which may come from an interface, and a target
|
or
|
java
|
google__guava
|
android/guava-tests/test/com/google/common/util/concurrent/FuturesGetCheckedTest.java
|
{
"start": 3473,
"end": 15047
}
|
class ____ extends TestCase {
// Boring untimed-get tests:
public void testGetCheckedUntimed_success() throws TwoArgConstructorException {
assertEquals("foo", getChecked(immediateFuture("foo"), TwoArgConstructorException.class));
}
public void testGetCheckedUntimed_interrupted() {
SettableFuture<String> future = SettableFuture.create();
Thread.currentThread().interrupt();
try {
getChecked(future, TwoArgConstructorException.class);
fail();
} catch (TwoArgConstructorException expected) {
assertThat(expected).hasCauseThat().isInstanceOf(InterruptedException.class);
assertTrue(Thread.currentThread().isInterrupted());
} finally {
Thread.interrupted();
}
}
public void testGetCheckedUntimed_cancelled() throws TwoArgConstructorException {
SettableFuture<String> future = SettableFuture.create();
future.cancel(true);
assertThrows(
CancellationException.class, () -> getChecked(future, TwoArgConstructorException.class));
}
public void testGetCheckedUntimed_executionExceptionChecked() {
TwoArgConstructorException expected =
assertThrows(
TwoArgConstructorException.class,
() -> getChecked(FAILED_FUTURE_CHECKED_EXCEPTION, TwoArgConstructorException.class));
assertThat(expected).hasCauseThat().isEqualTo(CHECKED_EXCEPTION);
}
public void testGetCheckedUntimed_executionExceptionUnchecked()
throws TwoArgConstructorException {
UncheckedExecutionException expected =
assertThrows(
UncheckedExecutionException.class,
() -> getChecked(FAILED_FUTURE_UNCHECKED_EXCEPTION, TwoArgConstructorException.class));
assertThat(expected).hasCauseThat().isEqualTo(UNCHECKED_EXCEPTION);
}
public void testGetCheckedUntimed_executionExceptionError() throws TwoArgConstructorException {
ExecutionError expected =
assertThrows(
ExecutionError.class,
() -> getChecked(FAILED_FUTURE_ERROR, TwoArgConstructorException.class));
assertThat(expected).hasCauseThat().isEqualTo(ERROR);
}
public void testGetCheckedUntimed_executionExceptionOtherThrowable() {
TwoArgConstructorException expected =
assertThrows(
TwoArgConstructorException.class,
() -> getChecked(FAILED_FUTURE_OTHER_THROWABLE, TwoArgConstructorException.class));
assertThat(expected).hasCauseThat().isEqualTo(OTHER_THROWABLE);
}
public void testGetCheckedUntimed_runtimeException() throws TwoArgConstructorException {
RuntimeException expected =
assertThrows(
RuntimeException.class,
() -> getChecked(RUNTIME_EXCEPTION_FUTURE, TwoArgConstructorException.class));
assertEquals(RUNTIME_EXCEPTION, expected);
}
public void testGetCheckedUntimed_error() throws TwoArgConstructorException {
try {
getChecked(ERROR_FUTURE, TwoArgConstructorException.class);
} catch (Error expected) {
assertEquals(ERROR, expected);
return;
}
fail();
}
public void testGetCheckedUntimed_badExceptionConstructor_failsEvenForSuccessfulInput()
throws Exception {
assertThrows(
IllegalArgumentException.class,
() -> getChecked(immediateFuture("x"), ExceptionWithBadConstructor.class));
}
public void testGetCheckedUntimed_badExceptionConstructor_wrapsOriginalChecked()
throws Exception {
assertThrows(
IllegalArgumentException.class,
() -> getChecked(FAILED_FUTURE_CHECKED_EXCEPTION, ExceptionWithBadConstructor.class));
}
public void testGetCheckedUntimed_withGoodAndBadExceptionConstructor() throws Exception {
ExceptionWithGoodAndBadConstructor expected =
assertThrows(
ExceptionWithGoodAndBadConstructor.class,
() ->
getChecked(
FAILED_FUTURE_CHECKED_EXCEPTION, ExceptionWithGoodAndBadConstructor.class));
assertThat(expected).hasCauseThat().isSameInstanceAs(CHECKED_EXCEPTION);
}
// Boring timed-get tests:
public void testGetCheckedTimed_success() throws TwoArgConstructorException {
assertEquals(
"foo", getChecked(immediateFuture("foo"), TwoArgConstructorException.class, 0, SECONDS));
}
public void testGetCheckedTimed_interrupted() {
SettableFuture<String> future = SettableFuture.create();
Thread.currentThread().interrupt();
try {
getChecked(future, TwoArgConstructorException.class, 0, SECONDS);
fail();
} catch (TwoArgConstructorException expected) {
assertThat(expected).hasCauseThat().isInstanceOf(InterruptedException.class);
assertTrue(Thread.currentThread().isInterrupted());
} finally {
Thread.interrupted();
}
}
public void testGetCheckedTimed_cancelled() throws TwoArgConstructorException {
SettableFuture<String> future = SettableFuture.create();
future.cancel(true);
assertThrows(
CancellationException.class,
() -> getChecked(future, TwoArgConstructorException.class, 0, SECONDS));
}
public void testGetCheckedTimed_executionExceptionChecked() {
TwoArgConstructorException expected =
assertThrows(
TwoArgConstructorException.class,
() ->
getChecked(
FAILED_FUTURE_CHECKED_EXCEPTION, TwoArgConstructorException.class, 0, SECONDS));
assertThat(expected).hasCauseThat().isEqualTo(CHECKED_EXCEPTION);
}
public void testGetCheckedTimed_executionExceptionUnchecked() throws TwoArgConstructorException {
UncheckedExecutionException expected =
assertThrows(
UncheckedExecutionException.class,
() ->
getChecked(
FAILED_FUTURE_UNCHECKED_EXCEPTION,
TwoArgConstructorException.class,
0,
SECONDS));
assertThat(expected).hasCauseThat().isEqualTo(UNCHECKED_EXCEPTION);
}
public void testGetCheckedTimed_executionExceptionError() throws TwoArgConstructorException {
ExecutionError expected =
assertThrows(
ExecutionError.class,
() -> getChecked(FAILED_FUTURE_ERROR, TwoArgConstructorException.class, 0, SECONDS));
assertThat(expected).hasCauseThat().isEqualTo(ERROR);
}
public void testGetCheckedTimed_executionExceptionOtherThrowable() {
TwoArgConstructorException expected =
assertThrows(
TwoArgConstructorException.class,
() ->
getChecked(
FAILED_FUTURE_OTHER_THROWABLE, TwoArgConstructorException.class, 0, SECONDS));
assertThat(expected).hasCauseThat().isEqualTo(OTHER_THROWABLE);
}
public void testGetCheckedTimed_runtimeException() throws TwoArgConstructorException {
RuntimeException expected =
assertThrows(
RuntimeException.class,
() ->
getChecked(RUNTIME_EXCEPTION_FUTURE, TwoArgConstructorException.class, 0, SECONDS));
assertEquals(RUNTIME_EXCEPTION, expected);
}
public void testGetCheckedTimed_error() throws TwoArgConstructorException {
try {
getChecked(ERROR_FUTURE, TwoArgConstructorException.class, 0, SECONDS);
} catch (Error expected) {
assertEquals(ERROR, expected);
return;
}
fail();
}
public void testGetCheckedTimed_timeoutException() {
SettableFuture<String> future = SettableFuture.create();
TwoArgConstructorException expected =
assertThrows(
TwoArgConstructorException.class,
() -> getChecked(future, TwoArgConstructorException.class, 0, SECONDS));
assertThat(expected).hasCauseThat().isInstanceOf(TimeoutException.class);
}
public void testGetCheckedTimed_badExceptionConstructor_failsEvenForSuccessfulInput()
throws Exception {
assertThrows(
IllegalArgumentException.class,
() -> getChecked(immediateFuture("x"), ExceptionWithBadConstructor.class, 1, SECONDS));
}
public void testGetCheckedTimed_badExceptionConstructor_wrapsOriginalChecked() throws Exception {
assertThrows(
IllegalArgumentException.class,
() ->
getChecked(
FAILED_FUTURE_CHECKED_EXCEPTION, ExceptionWithBadConstructor.class, 1, SECONDS));
}
public void testGetCheckedTimed_withGoodAndBadExceptionConstructor() {
ExceptionWithGoodAndBadConstructor expected =
assertThrows(
ExceptionWithGoodAndBadConstructor.class,
() ->
getChecked(
FAILED_FUTURE_CHECKED_EXCEPTION,
ExceptionWithGoodAndBadConstructor.class,
1,
SECONDS));
assertThat(expected).hasCauseThat().isSameInstanceAs(CHECKED_EXCEPTION);
}
// Edge case tests of the exception-construction code through untimed get():
@SuppressWarnings("FuturesGetCheckedIllegalExceptionType")
public void testGetCheckedUntimed_exceptionClassIsRuntimeException() {
assertThrows(
IllegalArgumentException.class,
() -> getChecked(FAILED_FUTURE_CHECKED_EXCEPTION, TwoArgConstructorRuntimeException.class));
}
public void testGetCheckedUntimed_exceptionClassSomePrivateConstructors() {
assertThrows(
ExceptionWithSomePrivateConstructors.class,
() ->
getChecked(
FAILED_FUTURE_CHECKED_EXCEPTION, ExceptionWithSomePrivateConstructors.class));
}
@SuppressWarnings("FuturesGetCheckedIllegalExceptionType")
public void testGetCheckedUntimed_exceptionClassNoPublicConstructor()
throws ExceptionWithPrivateConstructor {
assertThrows(
IllegalArgumentException.class,
() -> getChecked(FAILED_FUTURE_CHECKED_EXCEPTION, ExceptionWithPrivateConstructor.class));
}
@SuppressWarnings("FuturesGetCheckedIllegalExceptionType")
public void testGetCheckedUntimed_exceptionClassPublicConstructorWrongType()
throws ExceptionWithWrongTypesConstructor {
assertThrows(
IllegalArgumentException.class,
() ->
getChecked(FAILED_FUTURE_CHECKED_EXCEPTION, ExceptionWithWrongTypesConstructor.class));
}
public void testGetCheckedUntimed_exceptionClassPrefersStringConstructor() {
ExceptionWithManyConstructors expected =
assertThrows(
ExceptionWithManyConstructors.class,
() -> getChecked(FAILED_FUTURE_CHECKED_EXCEPTION, ExceptionWithManyConstructors.class));
assertTrue(expected.usedExpectedConstructor);
}
public void testGetCheckedUntimed_exceptionClassUsedInitCause() {
ExceptionWithoutThrowableConstructor expected =
assertThrows(
ExceptionWithoutThrowableConstructor.class,
() ->
getChecked(
FAILED_FUTURE_CHECKED_EXCEPTION, ExceptionWithoutThrowableConstructor.class));
assertThat(expected).hasMessageThat().contains("mymessage");
assertThat(expected).hasCauseThat().isEqualTo(CHECKED_EXCEPTION);
}
public void testPrefersConstructorWithThrowableParameter() {
ExceptionWithManyConstructorsButOnlyOneThrowable exception =
assertThrows(
ExceptionWithManyConstructorsButOnlyOneThrowable.class,
() ->
getChecked(
FAILED_FUTURE_CHECKED_EXCEPTION,
ExceptionWithManyConstructorsButOnlyOneThrowable.class));
assertThat(exception).hasMessageThat().contains("mymessage");
assertThat(exception.getAntecedent()).isEqualTo(CHECKED_EXCEPTION);
}
// Class unloading test:
public static final
|
FuturesGetCheckedTest
|
java
|
spring-projects__spring-framework
|
spring-beans/src/main/java/org/springframework/beans/TypeConverterDelegate.java
|
{
"start": 12694,
"end": 12857
}
|
enum ____ for type [" + enumType + "]", ex);
}
}
}
}
if (convertedValue == currentConvertedValue) {
// Try field lookup as fallback: for Java
|
value
|
java
|
apache__camel
|
core/camel-api/src/main/java/org/apache/camel/AggregationStrategy.java
|
{
"start": 3300,
"end": 9652
}
|
interface ____ {
/**
* Aggregates an old and new exchange together to create a single combined exchange
* <p/>
* Important: In the aggregate method, do not create a new exchange instance to return, instead return either the
* old or new exchange from the input parameters; favor returning the old exchange whenever possible.
*
* @param oldExchange the oldest exchange (is <tt>null</tt> on first aggregation as we only have the new exchange)
* @param newExchange the newest exchange (can be <tt>null</tt> if there was no data possible to acquire)
* @return a combined composite of the two exchanges, return either the old or new exchange from the
* input parameters; favor returning the old exchange whenever possible)
*/
Exchange aggregate(Exchange oldExchange, Exchange newExchange);
/**
* Aggregates an old and new exchange together to create a single combined exchange.
* <p/>
* Important: In the aggregate method, do not create a new exchange instance to return, instead return either the
* old or new exchange from the input parameters; favor returning the old exchange whenever possible.
* <p/>
* Important: Only Multicast, Recipient List, and Splitter EIP supports this method with access to the input
* exchange. All other EIPs does not and uses the {@link #aggregate(Exchange, Exchange)} method instead.
*
* @param oldExchange the oldest exchange (is <tt>null</tt> on first aggregation as we only have the new
* exchange)
* @param newExchange the newest exchange (can be <tt>null</tt> if there was no data possible to acquire)
* @param inputExchange the input exchange (input to the EIP)
* @return a combined composite of the two exchanges, return either the old or new exchange from the
* input parameters; favor returning the old exchange whenever possible)
*/
default Exchange aggregate(Exchange oldExchange, Exchange newExchange, Exchange inputExchange) {
return aggregate(oldExchange, newExchange);
}
/**
* Indicates if this aggregation strategy uses pre-completion mode.
*
* @return <tt>true</tt> if this strategy uses pre-completion mode, or <tt>false</tt> otherwise.
*/
default boolean canPreComplete() {
return false;
}
/**
* Determines if the aggregation should complete the current group, and start a new group, or the aggregation should
* continue using the current group. This callback will only be called if {@link #canPreComplete()} returns
* <tt>true</tt>.
*
* @param oldExchange the oldest exchange (is <tt>null</tt> on first aggregation as we only have the new exchange)
* @param newExchange the newest exchange (can be <tt>null</tt> if there was no data possible to acquire)
* @return <tt>true</tt> to complete current group and start a new group, or <tt>false</tt> to keep
* using current
*/
default boolean preComplete(Exchange oldExchange, Exchange newExchange) {
return false;
}
/**
* The aggregated {@link Exchange} has completed
*
* <b>Important: </b> This method must <b>not</b> throw any exceptions.
*
* @param exchange the current aggregated exchange, or the original {@link org.apache.camel.Exchange} if no
* aggregation has been done before the completion occurred
*/
default void onCompletion(Exchange exchange) {
}
/**
* The aggregated {@link Exchange} has completed
*
* <b>Important: </b> This method must <b>not</b> throw any exceptions.
*
* @param exchange the current aggregated exchange, or the original {@link org.apache.camel.Exchange} if no
* aggregation has been done before the completion occurred
* @param inputExchange the input exchange (input to the EIP)
*/
default void onCompletion(Exchange exchange, Exchange inputExchange) {
onCompletion(exchange);
}
/**
* A timeout occurred.
* <p/>
* <b>Important: </b> This method must <b>not</b> throw any exceptions.
*
* @param exchange the current aggregated exchange, or the original {@link Exchange} if no aggregation has been done
* before the timeout occurred
* @param index the index, may be <tt>-1</tt> if not possible to determine the index
* @param total the total, may be <tt>-1</tt> if not possible to determine the total
* @param timeout the timeout value in millis, may be <tt>-1</tt> if not possible to determine the timeout
*/
default void timeout(Exchange exchange, int index, int total, long timeout) {
}
/**
* Callback when the aggregated {@link Exchange} fails to add in the
* {@link org.apache.camel.spi.OptimisticLockingAggregationRepository} because of an
* {@link org.apache.camel.spi.OptimisticLockingAggregationRepository.OptimisticLockingException}.
* <p/>
* Please note that when aggregating {@link Exchange}'s to be careful not to modify and return the
* {@code oldExchange} from the
* {@link AggregationStrategy#aggregate(org.apache.camel.Exchange, org.apache.camel.Exchange)} method. If you are
* using the default MemoryAggregationRepository this will mean you have modified the value of an object already
* referenced/stored by the MemoryAggregationRepository. This makes it impossible for optimistic locking to work
* correctly with the MemoryAggregationRepository.
* <p/>
* You should instead return either the new {@code newExchange} or a completely new instance of {@link Exchange}.
* This is due to the nature of how the underlying {@link java.util.concurrent.ConcurrentHashMap} performs CAS
* operations on the value identity.
*
* @see java.util.concurrent.ConcurrentHashMap
*/
default void onOptimisticLockFailure(Exchange oldExchange, Exchange newExchange) {
LoggerFactory.getLogger(getClass()).trace(
"onOptimisticLockFailure with AggregationStrategy: {}, oldExchange: {}, newExchange: {}", this, oldExchange,
newExchange);
}
}
|
AggregationStrategy
|
java
|
google__guava
|
android/guava-tests/test/com/google/common/collect/BenchmarkHelpers.java
|
{
"start": 7836,
"end": 8502
}
|
enum ____ implements MapsImplEnum {
TreeMapImpl {
@Override
public <K extends Comparable<K>, V> SortedMap<K, V> create(Map<K, V> map) {
SortedMap<K, V> result = Maps.newTreeMap();
result.putAll(map);
return result;
}
},
ConcurrentSkipListImpl {
@Override
public <K extends Comparable<K>, V> SortedMap<K, V> create(Map<K, V> map) {
return new ConcurrentSkipListMap<>(map);
}
},
ImmutableSortedMapImpl {
@Override
public <K extends Comparable<K>, V> SortedMap<K, V> create(Map<K, V> map) {
return ImmutableSortedMap.copyOf(map);
}
};
}
|
SortedMapImpl
|
java
|
elastic__elasticsearch
|
x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/OrAggFilter.java
|
{
"start": 377,
"end": 703
}
|
class ____ extends AggFilter {
public OrAggFilter(AggFilter left, AggFilter right) {
this(left.name() + "_|_" + right.name(), left, right);
}
public OrAggFilter(String name, AggFilter left, AggFilter right) {
super(name, Scripts.or(left.scriptTemplate(), right.scriptTemplate()));
}
}
|
OrAggFilter
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/factories/TestUpdateDeleteTableFactory.java
|
{
"start": 19969,
"end": 26025
}
|
class ____ extends SupportsRowLevelUpdateSink
implements SupportsRowLevelDelete {
private final ObjectIdentifier tableIdentifier;
private final ResolvedCatalogTable resolvedCatalogTable;
private final RowLevelDeleteMode deleteMode;
protected final String dataId;
private final List<String> requireColumnsForDelete;
private boolean isDelete;
protected int[] requiredColumnIndices;
public SupportsRowLevelModificationSink(
ObjectIdentifier tableIdentifier,
ResolvedCatalogTable resolvedCatalogTable,
RowLevelDeleteMode deleteMode,
RowLevelUpdateMode updateMode,
String dataId,
List<String> requireColumnsForDelete,
List<String> requireColumnsForUpdate,
boolean onlyRequireUpdatedColumns) {
this(
tableIdentifier,
resolvedCatalogTable,
deleteMode,
updateMode,
dataId,
requireColumnsForDelete,
requireColumnsForUpdate,
onlyRequireUpdatedColumns,
false,
false);
}
public SupportsRowLevelModificationSink(
ObjectIdentifier tableIdentifier,
ResolvedCatalogTable resolvedCatalogTable,
RowLevelDeleteMode deleteMode,
RowLevelUpdateMode updateMode,
String dataId,
List<String> requireColumnsForDelete,
List<String> requireColumnsForUpdate,
boolean onlyRequireUpdatedColumns,
boolean isDelete,
boolean isUpdate) {
super(
tableIdentifier,
resolvedCatalogTable,
updateMode,
dataId,
requireColumnsForUpdate,
onlyRequireUpdatedColumns,
isUpdate);
this.tableIdentifier = tableIdentifier;
this.resolvedCatalogTable = resolvedCatalogTable;
this.deleteMode = deleteMode;
this.dataId = dataId;
this.requireColumnsForDelete = requireColumnsForDelete;
this.isDelete = isDelete;
}
@Override
public ChangelogMode getChangelogMode(ChangelogMode requestedMode) {
return ChangelogMode.all();
}
@Override
public SinkRuntimeProvider getSinkRuntimeProvider(Context context) {
if (isUpdate) {
return super.getSinkRuntimeProvider(context);
} else {
return new DataStreamSinkProvider() {
@Override
public DataStreamSink<?> consumeDataStream(
ProviderContext providerContext, DataStream<RowData> dataStream) {
if (isDelete) {
return dataStream
.addSink(
new DeleteDataSinkFunction(
dataId,
getPrimaryKeyFieldGetter(
resolvedCatalogTable
.getResolvedSchema(),
requiredColumnIndices),
getAllFieldGetter(
resolvedCatalogTable
.getResolvedSchema()),
deleteMode))
.setParallelism(1);
} else {
// otherwise, do nothing
return dataStream.sinkTo(new DiscardingSink<>());
}
}
};
}
}
@Override
public DynamicTableSink copy() {
return new SupportsRowLevelModificationSink(
tableIdentifier,
resolvedCatalogTable,
deleteMode,
updateMode,
dataId,
requireColumnsForDelete,
requireColumnsForUpdate,
onlyRequireUpdatedColumns,
isDelete,
isUpdate);
}
@Override
public String asSummaryString() {
return "SupportsRowLevelModificationSink";
}
@Override
public RowLevelDeleteInfo applyRowLevelDelete(
@Nullable RowLevelModificationScanContext context) {
checkScanContext(context, tableIdentifier);
this.isDelete = true;
return new RowLevelDeleteInfo() {
@Override
public Optional<List<Column>> requiredColumns() {
List<Column> requiredCols = null;
if (requireColumnsForDelete != null) {
requiredCols =
getRequiredColumns(
requireColumnsForDelete,
resolvedCatalogTable.getResolvedSchema());
}
requiredColumnIndices =
getRequiredColumnIndexes(resolvedCatalogTable, requiredCols);
return Optional.ofNullable(requiredCols);
}
@Override
public RowLevelDeleteMode getRowLevelDeleteMode() {
return deleteMode;
}
};
}
}
/** The sink for delete existing data. */
private static
|
SupportsRowLevelModificationSink
|
java
|
apache__camel
|
components/camel-quickfix/src/main/java/org/apache/camel/component/quickfixj/QuickfixjConfiguration.java
|
{
"start": 1024,
"end": 2554
}
|
class ____ {
private Map<Object, Object> defaultSettings;
private Map<SessionID, Map<Object, Object>> sessionSettings;
public QuickfixjConfiguration() {
}
public Map<Object, Object> getDefaultSettings() {
return defaultSettings;
}
public void setDefaultSettings(Map<Object, Object> defaultSettings) {
this.defaultSettings = defaultSettings;
}
public Map<SessionID, Map<Object, Object>> getSessionSettings() {
return sessionSettings;
}
public void setSessionSettings(Map<SessionID, Map<Object, Object>> sessionSettings) {
this.sessionSettings = sessionSettings;
}
public void addSessionSetting(SessionID sessionID, Map<Object, Object> settings) {
if (sessionSettings == null) {
sessionSettings = new HashMap<>();
}
sessionSettings.put(sessionID, settings);
}
public SessionSettings createSessionSettings() throws ConfigError {
SessionSettings settings = new SessionSettings();
if (defaultSettings != null && !defaultSettings.isEmpty()) {
settings.set(new Dictionary("defaults", defaultSettings));
}
if (sessionSettings != null && !sessionSettings.isEmpty()) {
for (Map.Entry<SessionID, Map<Object, Object>> sessionSetting : sessionSettings.entrySet()) {
settings.set(sessionSetting.getKey(), new Dictionary("session", sessionSetting.getValue()));
}
}
return settings;
}
}
|
QuickfixjConfiguration
|
java
|
apache__kafka
|
streams/src/main/java/org/apache/kafka/streams/state/internals/ChangeLoggingTimestampedWindowBytesStore.java
|
{
"start": 1138,
"end": 1857
}
|
class ____ extends ChangeLoggingWindowBytesStore {
ChangeLoggingTimestampedWindowBytesStore(final WindowStore<Bytes, byte[]> bytesStore,
final boolean retainDuplicates) {
super(bytesStore, retainDuplicates, WindowKeySchema::toStoreKeyBinary);
}
@Override
void log(final Bytes key,
final byte[] valueAndTimestamp) {
internalContext.logChange(
name(),
key,
rawValue(valueAndTimestamp),
valueAndTimestamp != null ? timestamp(valueAndTimestamp) : internalContext.recordContext().timestamp(),
wrapped().getPosition()
);
}
}
|
ChangeLoggingTimestampedWindowBytesStore
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/MissingBindingValidationTest.java
|
{
"start": 33198,
"end": 33523
}
|
interface ____ {",
" String string();",
" Object needsString();",
"}");
Source childModule =
CompilerTests.javaSource(
"ChildModule",
"import dagger.Module;",
"import dagger.Provides;",
"",
"@Module",
"
|
Child
|
java
|
apache__camel
|
components/camel-bindy/src/test/java/org/apache/camel/dataformat/bindy/model/car/Car.java
|
{
"start": 1083,
"end": 4965
}
|
class ____ {
@DataField(pos = 1)
private String stockid;
@DataField(pos = 2)
private String make;
@DataField(pos = 3)
private String model;
@DataField(pos = 4)
private String deriv;
@DataField(pos = 5)
private String series;
@DataField(pos = 6)
private String registration;
@DataField(pos = 7)
private String chassis;
@DataField(pos = 8)
private String engine;
@DataField(pos = 9)
private int year;
@DataField(pos = 10, precision = 1)
private double klms;
@DataField(pos = 11)
private String body;
@DataField(pos = 12)
private Colour colour;
@DataField(pos = 13)
private String enginesize;
@DataField(pos = 14)
private String trans;
@DataField(pos = 15)
private String fuel;
@DataField(pos = 16)
private String options;
@DataField(pos = 17)
private String desc;
@DataField(pos = 18)
private String status;
@DataField(pos = 19, precision = 1)
private double price;
@DataField(pos = 20)
private String nvic;
public String getStockid() {
return stockid;
}
public void setStockid(String stockid) {
this.stockid = stockid;
}
public String getMake() {
return make;
}
public void setMake(String make) {
this.make = make;
}
public String getModel() {
return model;
}
public void setModel(String model) {
this.model = model;
}
public String getDeriv() {
return deriv;
}
public void setDeriv(String deriv) {
this.deriv = deriv;
}
public String getSeries() {
return series;
}
public void setSeries(String series) {
this.series = series;
}
public String getRegistration() {
return registration;
}
public void setRegistration(String registration) {
this.registration = registration;
}
public String getChassis() {
return chassis;
}
public void setChassis(String chassis) {
this.chassis = chassis;
}
public String getEngine() {
return engine;
}
public void setEngine(String engine) {
this.engine = engine;
}
public int getYear() {
return year;
}
public void setYear(int year) {
this.year = year;
}
public double getKlms() {
return klms;
}
public void setKlms(double klms) {
this.klms = klms;
}
public String getBody() {
return body;
}
public void setBody(String body) {
this.body = body;
}
public Colour getColour() {
return colour;
}
public void setColour(Colour colour) {
this.colour = colour;
}
public String getEnginesize() {
return enginesize;
}
public void setEnginesize(String enginesize) {
this.enginesize = enginesize;
}
public String getTrans() {
return trans;
}
public void setTrans(String trans) {
this.trans = trans;
}
public String getFuel() {
return fuel;
}
public void setFuel(String fuel) {
this.fuel = fuel;
}
public String getOptions() {
return options;
}
public void setOptions(String options) {
this.options = options;
}
public String getDesc() {
return desc;
}
public void setDesc(String desc) {
this.desc = desc;
}
public String getStatus() {
return status;
}
public void setStatus(String status) {
this.status = status;
}
public double getPrice() {
return price;
}
public void setPrice(double price) {
this.price = price;
}
public String getNvic() {
return nvic;
}
public void setNvic(String nvic) {
this.nvic = nvic;
}
public
|
Car
|
java
|
apache__camel
|
components/camel-servicenow/camel-servicenow-component/src/main/java/org/apache/camel/component/servicenow/releases/helsinki/HelsinkiServiceNowScorecardProcessor.java
|
{
"start": 1446,
"end": 3925
}
|
class ____ extends AbstractServiceNowProcessor {
HelsinkiServiceNowScorecardProcessor(ServiceNowEndpoint endpoint) {
super(endpoint);
addDispatcher(ACTION_RETRIEVE, ACTION_SUBJECT_PERFORMANCE_ANALYTICS, this::retrievePerformanceAnalytics);
}
/*
* This method retrieves Performance Analytics scorecard details.
*
* Method:
* - GET
*
* URL Format:
* - /api/now/pa/scorecards
*/
private void retrievePerformanceAnalytics(Exchange exchange) throws Exception {
final Message in = exchange.getIn();
final Class<?> responseModel = getResponseModel(in);
final String apiVersion = getApiVersion(in);
Response response = client.reset()
.types(MediaType.APPLICATION_JSON_TYPE)
.path("now")
.path(apiVersion)
.path("pa")
.path("scorecards")
.query(ServiceNowParams.SYSPARM_UUID, in)
.query(ServiceNowParams.SYSPARM_BREAKDOWN, in)
.query(ServiceNowParams.SYSPARM_INCLUDE_SCORES, in)
.query(ServiceNowParams.SYSPARM_INCLUDE_AGGREGATES, in)
.query(ServiceNowParams.SYSPARM_INCLUDE_AVAILABLE_BREAKDOWNS, in)
.query(ServiceNowParams.SYSPARM_INCLUDE_AVAILABLE_AGGREGATES, in)
.query(ServiceNowParams.SYSPARM_DISPLAY_VALUE, in)
.query(ServiceNowParams.SYSPARM_EXCLUDE_REFERENCE_LINK, in)
.query(ServiceNowParams.SYSPARM_FAVORITES, in)
.query(ServiceNowParams.SYSPARM_KEY, in)
.query(ServiceNowParams.SYSPARM_TARGET, in)
.query(ServiceNowParams.SYSPARM_DISPLAY, in)
.query(ServiceNowParams.SYSPARM_CONTAINS, in)
.query(ServiceNowParams.SYSPARM_TAGS, in)
.query(ServiceNowParams.SYSPARM_PER_PAGE, in)
.query(ServiceNowParams.SYSPARM_PAGE, in)
.query(ServiceNowParams.SYSPARM_SORT_BY, in)
.query(ServiceNowParams.SYSPARM_SORT_DIR, in)
.query(ServiceNowParams.SYSPARM_ELEMENTS_FILTER, in)
.query(ServiceNowParams.SYSPARM_BREAKDOWN_RELATION, in)
.query(ServiceNowParams.SYSPARM_INCLUDE_SCORE_NOTES, in)
.query(responseModel)
.invoke(HttpMethod.GET);
setBodyAndHeaders(in, responseModel, response);
}
}
|
HelsinkiServiceNowScorecardProcessor
|
java
|
apache__flink
|
flink-datastream-api/src/main/java/org/apache/flink/datastream/api/extension/window/strategy/WindowStrategy.java
|
{
"start": 2785,
"end": 9092
}
|
enum ____ {
PROCESSING,
EVENT
}
// ============== global window ================
/**
* Creates a global window strategy. Note that the global window can be used in both
* GlobalStream, KeyedStream, NonKeyedStream.
*
* @return A global window strategy.
*/
public static WindowStrategy global() {
return new GlobalWindowStrategy();
}
// ============== tumbling time window ================
/**
* Create a tumbling time window strategy with the event time default time type. Note that
* tumbling time windows can be used in KeyedStream and GlobalStream. If tumbling time window is
* used in a GlobalStream, it will convert the GlobalStream into a KeyedStream with a Key of
* zero, and then use the converted KeyedStream to execute the window.
*
* @param windowSize the size of Window.
* @return A tumbling time window strategy.
*/
public static WindowStrategy tumbling(Duration windowSize) {
return new TumblingTimeWindowStrategy(windowSize);
}
/**
* Create a tumbling time window strategy. Note that tumbling time windows can be used in
* KeyedStream and GlobalStream. If tumbling time window is used in a GlobalStream, it will
* convert the GlobalStream into a KeyedStream with a Key of zero, and then use the converted
* KeyedStream to execute the window.
*
* @param windowSize the size of Window.
* @param timeType the time type of Window.
* @return A tumbling time window strategy.
*/
public static WindowStrategy tumbling(Duration windowSize, TimeType timeType) {
return new TumblingTimeWindowStrategy(windowSize, timeType);
}
/**
* Create a tumbling time window strategy. Note that tumbling time windows can be used in
* KeyedStream and GlobalStream. If tumbling time window is used in a GlobalStream, it will
* convert the GlobalStream into a KeyedStream with a Key of zero, and then use the converted
* KeyedStream to execute the window.
*
* @param windowSize the size of Window.
* @param timeType the time type of Window.
* @param allowedLateness the allowed lateness of Window.
* @return A tumbling time window strategy.
*/
public static WindowStrategy tumbling(
Duration windowSize, TimeType timeType, Duration allowedLateness) {
return new TumblingTimeWindowStrategy(windowSize, timeType, allowedLateness);
}
// ============== sliding time window ================
/**
* Create a sliding time window strategy with the event time default time type. Note that
* sliding time windows can be used in KeyedStream and GlobalStream. If sliding time window is
* used in a GlobalStream, it will convert the GlobalStream into a KeyedStream with a Key of
* zero, and then use the converted KeyedStream to execute the window.
*
* @param windowSize the size of Window.
* @param windowSlideInterval the slide interval of Window.
* @return A sliding time window strategy.
*/
public static WindowStrategy sliding(Duration windowSize, Duration windowSlideInterval) {
return new SlidingTimeWindowStrategy(windowSize, windowSlideInterval);
}
/**
* Create a sliding time window strategy. Note that sliding time windows can be used in
* KeyedStream and GlobalStream. If sliding time window is used in a GlobalStream, it will
* convert the GlobalStream into a KeyedStream with a Key of zero, and then use the converted
* KeyedStream to execute the window.
*
* @param windowSize the size of Window.
* @param windowSlideInterval the slide interval of Window.
* @param timeType the time type of Window.
* @return A sliding time window strategy.
*/
public static WindowStrategy sliding(
Duration windowSize, Duration windowSlideInterval, TimeType timeType) {
return new SlidingTimeWindowStrategy(windowSize, windowSlideInterval, timeType);
}
/**
* Create a sliding time window strategy. Note that sliding time windows can be used in
* KeyedStream and GlobalStream. If sliding time window is used in a GlobalStream, it will
* convert the GlobalStream into a KeyedStream with a Key of zero, and then use the converted
* KeyedStream to execute the window.
*
* @param windowSize the size of Window.
* @param windowSlideInterval the slide interval of Window.
* @param timeType the time type of Window.
* @param allowedLateness the allowed lateness of Window.
* @return A sliding time window strategy.
*/
public static WindowStrategy sliding(
Duration windowSize,
Duration windowSlideInterval,
TimeType timeType,
Duration allowedLateness) {
return new SlidingTimeWindowStrategy(
windowSize, windowSlideInterval, timeType, allowedLateness);
}
// ============== session window ================
/**
* Create a session time window strategy with the event time default time type. Note that
* session time windows can be used in KeyedStream and GlobalStream. If session time window is
* used in a GlobalStream, it will convert the GlobalStream into a KeyedStream with a Key of
* zero, and then use the converted KeyedStream to execute the window.
*
* @param sessionGap the timeout of session.
* @return A session window strategy.
*/
public static WindowStrategy session(Duration sessionGap) {
return new SessionWindowStrategy(sessionGap);
}
/**
* Create a session time window strategy. Note that session time windows can be used in
* KeyedStream and GlobalStream. If session time window is used in a GlobalStream, it will
* convert the GlobalStream into a KeyedStream with a Key of zero, and then use the converted
* KeyedStream to execute the window.
*
* @param sessionGap the timeout of session.
* @param timeType the time type of Window.
* @return A session window strategy.
*/
public static WindowStrategy session(Duration sessionGap, TimeType timeType) {
return new SessionWindowStrategy(sessionGap, timeType);
}
}
|
TimeType
|
java
|
apache__camel
|
components/camel-influxdb2/src/generated/java/org/apache/camel/component/influxdb2/InfluxDb2ComponentConfigurer.java
|
{
"start": 736,
"end": 2809
}
|
class ____ extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
InfluxDb2Component target = (InfluxDb2Component) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "autowiredenabled":
case "autowiredEnabled": target.setAutowiredEnabled(property(camelContext, boolean.class, value)); return true;
case "influxdbclient":
case "influxDBClient": target.setInfluxDBClient(property(camelContext, com.influxdb.client.InfluxDBClient.class, value)); return true;
case "lazystartproducer":
case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true;
default: return false;
}
}
@Override
public String[] getAutowiredNames() {
return new String[]{"influxDBClient"};
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "autowiredenabled":
case "autowiredEnabled": return boolean.class;
case "influxdbclient":
case "influxDBClient": return com.influxdb.client.InfluxDBClient.class;
case "lazystartproducer":
case "lazyStartProducer": return boolean.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
InfluxDb2Component target = (InfluxDb2Component) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "autowiredenabled":
case "autowiredEnabled": return target.isAutowiredEnabled();
case "influxdbclient":
case "influxDBClient": return target.getInfluxDBClient();
case "lazystartproducer":
case "lazyStartProducer": return target.isLazyStartProducer();
default: return null;
}
}
}
|
InfluxDb2ComponentConfigurer
|
java
|
apache__flink
|
flink-core/src/main/java/org/apache/flink/configuration/StructuredOptionsSplitter.java
|
{
"start": 6679,
"end": 7290
}
|
class ____ {
private final TokenType tokenType;
private final String string;
private final int position;
private Token(TokenType tokenType, String string, int position) {
this.tokenType = tokenType;
this.string = string;
this.position = position;
}
public TokenType getTokenType() {
return tokenType;
}
public String getString() {
return string;
}
public int getPosition() {
return position;
}
}
private StructuredOptionsSplitter() {}
}
|
Token
|
java
|
micronaut-projects__micronaut-core
|
http-server-netty/src/test/groovy/io/micronaut/http/server/netty/filters/FilterExceptionHandlerException.java
|
{
"start": 239,
"end": 528
}
|
class ____ implements ExceptionHandler<FilterExceptionException, HttpResponse<?>> {
@Override
public HttpResponse<?> handle(HttpRequest request, FilterExceptionException exception) {
throw new RuntimeException("from exception handler");
}
}
|
FilterExceptionHandlerException
|
java
|
apache__camel
|
dsl/camel-yaml-dsl/camel-yaml-dsl-deserializers/src/generated/java/org/apache/camel/dsl/yaml/deserializers/ModelDeserializers.java
|
{
"start": 492767,
"end": 498526
}
|
class ____ extends YamlDeserializerBase<JaxbDataFormat> {
public JaxbDataFormatDeserializer() {
super(JaxbDataFormat.class);
}
@Override
protected JaxbDataFormat newInstance() {
return new JaxbDataFormat();
}
@Override
protected boolean setProperty(JaxbDataFormat target, String propertyKey,
String propertyName, Node node) {
propertyKey = org.apache.camel.util.StringHelper.dashToCamelCase(propertyKey);
switch(propertyKey) {
case "accessExternalSchemaProtocols": {
String val = asText(node);
target.setAccessExternalSchemaProtocols(val);
break;
}
case "contentTypeHeader": {
String val = asText(node);
target.setContentTypeHeader(val);
break;
}
case "contextPath": {
String val = asText(node);
target.setContextPath(val);
break;
}
case "contextPathIsClassName": {
String val = asText(node);
target.setContextPathIsClassName(val);
break;
}
case "encoding": {
String val = asText(node);
target.setEncoding(val);
break;
}
case "filterNonXmlChars": {
String val = asText(node);
target.setFilterNonXmlChars(val);
break;
}
case "fragment": {
String val = asText(node);
target.setFragment(val);
break;
}
case "id": {
String val = asText(node);
target.setId(val);
break;
}
case "ignoreJAXBElement": {
String val = asText(node);
target.setIgnoreJAXBElement(val);
break;
}
case "jaxbProviderProperties": {
String val = asText(node);
target.setJaxbProviderProperties(val);
break;
}
case "mustBeJAXBElement": {
String val = asText(node);
target.setMustBeJAXBElement(val);
break;
}
case "namespacePrefix": {
String val = asText(node);
target.setNamespacePrefix(val);
break;
}
case "noNamespaceSchemaLocation": {
String val = asText(node);
target.setNoNamespaceSchemaLocation(val);
break;
}
case "objectFactory": {
String val = asText(node);
target.setObjectFactory(val);
break;
}
case "partClass": {
String val = asText(node);
target.setPartClass(val);
break;
}
case "partNamespace": {
String val = asText(node);
target.setPartNamespace(val);
break;
}
case "prettyPrint": {
String val = asText(node);
target.setPrettyPrint(val);
break;
}
case "schema": {
String val = asText(node);
target.setSchema(val);
break;
}
case "schemaLocation": {
String val = asText(node);
target.setSchemaLocation(val);
break;
}
case "schemaSeverityLevel": {
String val = asText(node);
target.setSchemaSeverityLevel(val);
break;
}
case "xmlStreamWriterWrapper": {
String val = asText(node);
target.setXmlStreamWriterWrapper(val);
break;
}
default: {
return false;
}
}
return true;
}
}
@YamlType(
nodes = "joor",
inline = true,
types = org.apache.camel.model.language.JoorExpression.class,
order = org.apache.camel.dsl.yaml.common.YamlDeserializerResolver.ORDER_LOWEST - 1,
displayName = "jOOR",
description = "Evaluates a jOOR (Java compiled once at runtime) expression.",
deprecated = true,
properties = {
@YamlProperty(name = "expression", type = "string", required = true, description = "The expression value in your chosen language syntax", displayName = "Expression"),
@YamlProperty(name = "id", type = "string", description = "Sets the id of this node", displayName = "Id"),
@YamlProperty(name = "preCompile", type = "boolean", defaultValue = "true", description = "Whether the expression should be pre compiled once during initialization phase. If this is turned off, then the expression is reloaded and compiled on each evaluation.", displayName = "Pre Compile"),
@YamlProperty(name = "resultType", type = "string", description = "Sets the
|
JaxbDataFormatDeserializer
|
java
|
spring-projects__spring-boot
|
buildSrc/src/main/java/org/springframework/boot/build/antora/CopyAntoraContent.java
|
{
"start": 1161,
"end": 1774
}
|
class ____ extends DefaultTask {
private FileCollection source;
@Inject
public CopyAntoraContent() {
}
@InputFiles
public FileCollection getSource() {
return this.source;
}
public void setSource(FileCollection source) {
this.source = source;
}
@OutputFile
public abstract RegularFileProperty getOutputFile();
@TaskAction
void copyAntoraContent() throws IllegalStateException, IOException {
Path source = this.source.getSingleFile().toPath();
Path target = getOutputFile().getAsFile().get().toPath();
Files.copy(source, target, StandardCopyOption.REPLACE_EXISTING);
}
}
|
CopyAntoraContent
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.