language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/internal/util/collections/BoundedConcurrentHashMap.java | {
"start": 59841,
"end": 60360
} | class ____ extends AbstractCollection<V> {
@Override
public Iterator<V> iterator() {
return new ValueIterator();
}
@Override
public int size() {
return BoundedConcurrentHashMap.this.size();
}
@Override
public boolean isEmpty() {
return BoundedConcurrentHashMap.this.isEmpty();
}
@Override
public boolean contains(Object o) {
return BoundedConcurrentHashMap.this.containsValue( o );
}
@Override
public void clear() {
BoundedConcurrentHashMap.this.clear();
}
}
final | Values |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/schedulers/SchedulerRunnableIntrospection.java | {
"start": 1622,
"end": 1825
} | interface ____ {
/**
* Returns the wrapped action.
*
* @return the wrapped action. Cannot be null.
*/
@NonNull
Runnable getWrappedRunnable();
}
| SchedulerRunnableIntrospection |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/sealed/DependentSealedTest.java | {
"start": 672,
"end": 728
} | class ____ extends MyDependent {
}
}
| MyDependentSubclass |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/aop/aspectj/autoproxy/AspectJAutoProxyCreatorAndLazyInitTargetSourceTests.java | {
"start": 1048,
"end": 1562
} | class ____ {
@Test
void testAdrian() {
ClassPathXmlApplicationContext ctx =
new ClassPathXmlApplicationContext(getClass().getSimpleName() + "-context.xml", getClass());
ITestBean adrian = (ITestBean) ctx.getBean("adrian");
assertThat(LazyTestBean.instantiations).isEqualTo(0);
assertThat(adrian).isNotNull();
adrian.getAge();
assertThat(adrian.getAge()).isEqualTo(68);
assertThat(LazyTestBean.instantiations).isEqualTo(1);
ctx.close();
}
}
| AspectJAutoProxyCreatorAndLazyInitTargetSourceTests |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/scheduler/adaptive/allocator/SharedSlotTest.java | {
"start": 1589,
"end": 10840
} | class ____ {
@Test
void testConstructorAssignsPayload() {
final TestingPhysicalSlot physicalSlot = TestingPhysicalSlot.builder().build();
new SharedSlot(new SlotRequestId(), physicalSlot, false, () -> {});
assertThat(physicalSlot.getPayload()).isNotNull();
}
@Test
void testConstructorFailsIfSlotAlreadyHasAssignedPayload() {
assertThatThrownBy(
() -> {
final TestingPhysicalSlot physicalSlot =
TestingPhysicalSlot.builder().build();
physicalSlot.tryAssignPayload(new TestingPhysicalSlotPayload());
new SharedSlot(new SlotRequestId(), physicalSlot, false, () -> {});
})
.isInstanceOf(IllegalStateException.class);
}
@Test
void testAllocateLogicalSlot() {
final TestingPhysicalSlot physicalSlot = TestingPhysicalSlot.builder().build();
final SharedSlot sharedSlot =
new SharedSlot(new SlotRequestId(), physicalSlot, false, () -> {});
final LogicalSlot logicalSlot = sharedSlot.allocateLogicalSlot();
assertThat(logicalSlot.getAllocationId()).isEqualTo(physicalSlot.getAllocationId());
assertThat(logicalSlot.getLocality()).isEqualTo(Locality.UNKNOWN);
assertThat(logicalSlot.getPayload()).isNull();
assertThat(logicalSlot.getTaskManagerLocation())
.isEqualTo(physicalSlot.getTaskManagerLocation());
assertThat(logicalSlot.getTaskManagerGateway())
.isEqualTo(physicalSlot.getTaskManagerGateway());
}
@Test
void testAllocateLogicalSlotIssuesUniqueSlotRequestIds() {
final TestingPhysicalSlot physicalSlot = TestingPhysicalSlot.builder().build();
final SharedSlot sharedSlot =
new SharedSlot(new SlotRequestId(), physicalSlot, false, () -> {});
final LogicalSlot logicalSlot1 = sharedSlot.allocateLogicalSlot();
final LogicalSlot logicalSlot2 = sharedSlot.allocateLogicalSlot();
assertThat(logicalSlot1.getSlotRequestId()).isNotEqualTo(logicalSlot2.getSlotRequestId());
}
@Test
void testReturnLogicalSlotRejectsAliveSlots() {
assertThatThrownBy(
() -> {
final TestingPhysicalSlot physicalSlot =
TestingPhysicalSlot.builder().build();
final SharedSlot sharedSlot =
new SharedSlot(
new SlotRequestId(), physicalSlot, false, () -> {});
final LogicalSlot logicalSlot = sharedSlot.allocateLogicalSlot();
sharedSlot.returnLogicalSlot(logicalSlot);
})
.isInstanceOf(IllegalStateException.class);
}
@Test
void testReturnLogicalSlotRejectsUnknownSlot() {
assertThatThrownBy(
() -> {
final TestingPhysicalSlot physicalSlot =
TestingPhysicalSlot.builder().build();
final SharedSlot sharedSlot =
new SharedSlot(
new SlotRequestId(), physicalSlot, false, () -> {});
final LogicalSlot logicalSlot =
new TestingLogicalSlotBuilder().createTestingLogicalSlot();
logicalSlot.releaseSlot(new Exception("test"));
sharedSlot.returnLogicalSlot(logicalSlot);
})
.isInstanceOf(IllegalStateException.class);
}
@Test
void testReturnLogicalSlotTriggersExternalReleaseOnLastSlot() {
final TestingPhysicalSlot physicalSlot = TestingPhysicalSlot.builder().build();
final AtomicBoolean externalReleaseInitiated = new AtomicBoolean(false);
final SharedSlot sharedSlot =
new SharedSlot(
new SlotRequestId(),
physicalSlot,
false,
() -> externalReleaseInitiated.set(true));
final LogicalSlot logicalSlot1 = sharedSlot.allocateLogicalSlot();
final LogicalSlot logicalSlot2 = sharedSlot.allocateLogicalSlot();
// this implicitly returns the slot
logicalSlot1.releaseSlot(new Exception("test"));
assertThat(externalReleaseInitiated).isFalse();
logicalSlot2.releaseSlot(new Exception("test"));
assertThat(externalReleaseInitiated).isTrue();
}
@Test
void testReleaseDoesNotTriggersExternalRelease() {
final TestingPhysicalSlot physicalSlot = TestingPhysicalSlot.builder().build();
final AtomicBoolean externalReleaseInitiated = new AtomicBoolean(false);
final SharedSlot sharedSlot =
new SharedSlot(
new SlotRequestId(),
physicalSlot,
false,
() -> externalReleaseInitiated.set(true));
sharedSlot.release(new Exception("test"));
assertThat(externalReleaseInitiated).isFalse();
}
@Test
void testReleaseAlsoReleasesLogicalSlots() {
final TestingPhysicalSlot physicalSlot = TestingPhysicalSlot.builder().build();
final SharedSlot sharedSlot =
new SharedSlot(new SlotRequestId(), physicalSlot, false, () -> {});
final LogicalSlot logicalSlot = sharedSlot.allocateLogicalSlot();
sharedSlot.release(new Exception("test"));
assertThat(logicalSlot.isAlive()).isFalse();
}
@Test
void testReleaseForbidsSubsequentLogicalSlotAllocations() {
assertThatThrownBy(
() -> {
final TestingPhysicalSlot physicalSlot =
TestingPhysicalSlot.builder().build();
final SharedSlot sharedSlot =
new SharedSlot(
new SlotRequestId(), physicalSlot, false, () -> {});
sharedSlot.release(new Exception("test"));
sharedSlot.allocateLogicalSlot();
})
.isInstanceOf(IllegalStateException.class);
}
@Test
void testCanReturnLogicalSlotDuringRelease() {
final TestingPhysicalSlot physicalSlot = TestingPhysicalSlot.builder().build();
final SharedSlot sharedSlot =
new SharedSlot(new SlotRequestId(), physicalSlot, false, () -> {});
final LogicalSlot logicalSlot1 = sharedSlot.allocateLogicalSlot();
final LogicalSlot logicalSlot2 = sharedSlot.allocateLogicalSlot();
// both slots try to release the other one, simulating that the failure of one execution due
// to the release also fails others
logicalSlot1.tryAssignPayload(
new TestLogicalSlotPayload(
cause -> {
if (logicalSlot2.isAlive()) {
logicalSlot2.releaseSlot(cause);
}
}));
logicalSlot2.tryAssignPayload(
new TestLogicalSlotPayload(
cause -> {
if (logicalSlot1.isAlive()) {
logicalSlot1.releaseSlot(cause);
}
}));
sharedSlot.release(new Exception("test"));
// if all logical slots were released, and the sharedSlot no longer allows the allocation of
// logical slots, then the slot release was completed
assertThat(logicalSlot1.isAlive()).isFalse();
assertThat(logicalSlot2.isAlive()).isFalse();
assertThatThrownBy(sharedSlot::allocateLogicalSlot)
.withFailMessage(
"Allocation of logical slot should have failed because the slot was released.")
.isInstanceOf(IllegalStateException.class);
}
@Test
void testCannotAllocateLogicalSlotDuringRelease() {
assertThatThrownBy(
() -> {
final TestingPhysicalSlot physicalSlot =
TestingPhysicalSlot.builder().build();
final SharedSlot sharedSlot =
new SharedSlot(
new SlotRequestId(), physicalSlot, false, () -> {});
final LogicalSlot logicalSlot = sharedSlot.allocateLogicalSlot();
logicalSlot.tryAssignPayload(
new TestLogicalSlotPayload(
ignored -> sharedSlot.allocateLogicalSlot()));
sharedSlot.release(new Exception("test"));
})
.isInstanceOf(IllegalStateException.class);
}
private static | SharedSlotTest |
java | elastic__elasticsearch | modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/timeseries/TimeSeriesAggregationBuilderTests.java | {
"start": 609,
"end": 1032
} | class ____ extends AggregationBuilderTestCase<TimeSeriesAggregationBuilder> {
@Override
protected TimeSeriesAggregationBuilder createTestAggregatorBuilder() {
// Size set large enough tests not intending to hit the size limit shouldn't see it.
return new TimeSeriesAggregationBuilder(randomAlphaOfLength(10), randomBoolean(), randomIntBetween(1000, 100_000));
}
}
| TimeSeriesAggregationBuilderTests |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamAction.java | {
"start": 1860,
"end": 9240
} | enum ____ {
ADD_BACKING_INDEX((byte) 0, DataStreamAction.ADD_BACKING_INDEX),
REMOVE_BACKING_INDEX((byte) 1, DataStreamAction.REMOVE_BACKING_INDEX);
private final byte value;
private final String fieldName;
Type(byte value, ParseField field) {
this.value = value;
this.fieldName = field.getPreferredName();
}
public byte value() {
return value;
}
public static Type fromValue(byte value) {
return switch (value) {
case 0 -> ADD_BACKING_INDEX;
case 1 -> REMOVE_BACKING_INDEX;
default -> throw new IllegalArgumentException("no data stream action type for [" + value + "]");
};
}
}
private final Type type;
private String dataStream;
private String index;
private boolean failureStore = false;
public static DataStreamAction addBackingIndex(String dataStream, String index) {
return new DataStreamAction(Type.ADD_BACKING_INDEX, dataStream, index, false);
}
public static DataStreamAction addFailureStoreIndex(String dataStream, String index) {
return new DataStreamAction(Type.ADD_BACKING_INDEX, dataStream, index, true);
}
public static DataStreamAction removeBackingIndex(String dataStream, String index) {
return new DataStreamAction(Type.REMOVE_BACKING_INDEX, dataStream, index, false);
}
public static DataStreamAction removeFailureStoreIndex(String dataStream, String index) {
return new DataStreamAction(Type.REMOVE_BACKING_INDEX, dataStream, index, true);
}
public DataStreamAction(StreamInput in) throws IOException {
this.type = Type.fromValue(in.readByte());
this.dataStream = in.readString();
this.index = in.readString();
this.failureStore = in.getTransportVersion().onOrAfter(TransportVersions.V_8_14_0) && in.readBoolean();
}
private DataStreamAction(Type type, String dataStream, String index, boolean failureStore) {
if (false == Strings.hasText(dataStream)) {
throw new IllegalArgumentException("[data_stream] is required");
}
if (false == Strings.hasText(index)) {
throw new IllegalArgumentException("[index] is required");
}
this.type = Objects.requireNonNull(type, "[type] must not be null");
this.dataStream = dataStream;
this.index = index;
this.failureStore = failureStore;
}
DataStreamAction(Type type) {
this.type = type;
}
public String getDataStream() {
return dataStream;
}
public void setDataStream(String datastream) {
this.dataStream = datastream;
}
public String getIndex() {
return index;
}
public void setIndex(String index) {
this.index = index;
}
public boolean isFailureStore() {
return failureStore;
}
public void setFailureStore(boolean failureStore) {
this.failureStore = failureStore;
}
public Type getType() {
return type;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.startObject(type.fieldName);
builder.field(DATA_STREAM.getPreferredName(), dataStream);
builder.field(INDEX.getPreferredName(), index);
if (failureStore) {
builder.field(FAILURE_STORE.getPreferredName(), failureStore);
}
builder.endObject();
builder.endObject();
return builder;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeByte(type.value());
out.writeString(dataStream);
out.writeString(index);
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_14_0)) {
out.writeBoolean(failureStore);
}
}
public static DataStreamAction fromXContent(XContentParser parser) throws IOException {
return PARSER.apply(parser, null);
}
private static final ObjectParser<DataStreamAction, Void> ADD_BACKING_INDEX_PARSER = parser(
ADD_BACKING_INDEX.getPreferredName(),
() -> new DataStreamAction(Type.ADD_BACKING_INDEX)
);
private static final ObjectParser<DataStreamAction, Void> REMOVE_BACKING_INDEX_PARSER = parser(
REMOVE_BACKING_INDEX.getPreferredName(),
() -> new DataStreamAction(Type.REMOVE_BACKING_INDEX)
);
static {
ADD_BACKING_INDEX_PARSER.declareField(
DataStreamAction::setDataStream,
XContentParser::text,
DATA_STREAM,
ObjectParser.ValueType.STRING
);
ADD_BACKING_INDEX_PARSER.declareField(DataStreamAction::setIndex, XContentParser::text, INDEX, ObjectParser.ValueType.STRING);
ADD_BACKING_INDEX_PARSER.declareField(
DataStreamAction::setFailureStore,
XContentParser::booleanValue,
FAILURE_STORE,
ObjectParser.ValueType.BOOLEAN
);
REMOVE_BACKING_INDEX_PARSER.declareField(
DataStreamAction::setDataStream,
XContentParser::text,
DATA_STREAM,
ObjectParser.ValueType.STRING
);
REMOVE_BACKING_INDEX_PARSER.declareField(DataStreamAction::setIndex, XContentParser::text, INDEX, ObjectParser.ValueType.STRING);
REMOVE_BACKING_INDEX_PARSER.declareField(
DataStreamAction::setFailureStore,
XContentParser::booleanValue,
FAILURE_STORE,
ObjectParser.ValueType.BOOLEAN
);
}
private static ObjectParser<DataStreamAction, Void> parser(String name, Supplier<DataStreamAction> supplier) {
ObjectParser<DataStreamAction, Void> parser = new ObjectParser<>(name, supplier);
return parser;
}
public static final ConstructingObjectParser<DataStreamAction, Void> PARSER = new ConstructingObjectParser<>(
"data_stream_action",
a -> {
// Take the first action and error if there is more than one action
DataStreamAction action = null;
for (Object o : a) {
if (o != null) {
if (action == null) {
action = (DataStreamAction) o;
} else {
throw new IllegalArgumentException("too many data stream operations declared on operation entry");
}
}
}
return action;
}
);
static {
PARSER.declareObject(optionalConstructorArg(), ADD_BACKING_INDEX_PARSER, ADD_BACKING_INDEX);
PARSER.declareObject(optionalConstructorArg(), REMOVE_BACKING_INDEX_PARSER, REMOVE_BACKING_INDEX);
}
@Override
public boolean equals(Object obj) {
if (obj == null || obj.getClass() != getClass()) {
return false;
}
DataStreamAction other = (DataStreamAction) obj;
return Objects.equals(type, other.type)
&& Objects.equals(dataStream, other.dataStream)
&& Objects.equals(index, other.index)
&& Objects.equals(failureStore, other.failureStore);
}
@Override
public int hashCode() {
return Objects.hash(type, dataStream, index, failureStore);
}
}
| Type |
java | apache__flink | flink-filesystems/flink-gs-fs-hadoop/src/main/java/org/apache/flink/fs/gs/GSFileSystemFactory.java | {
"start": 7115,
"end": 8207
} | class ____ implements ConfigUtils.ConfigContext {
@Override
public Optional<String> getenv(String name) {
return Optional.ofNullable(System.getenv(name));
}
@Override
public org.apache.hadoop.conf.Configuration loadHadoopConfigFromDir(String configDir) {
org.apache.hadoop.conf.Configuration hadoopConfig =
new org.apache.hadoop.conf.Configuration();
hadoopConfig.addResource(new Path(configDir, "core-default.xml"));
hadoopConfig.addResource(new Path(configDir, "core-site.xml"));
hadoopConfig.reloadConfiguration();
return hadoopConfig;
}
@Override
public GoogleCredentials loadStorageCredentialsFromFile(String credentialsPath) {
try (FileInputStream credentialsStream = new FileInputStream(credentialsPath)) {
return GoogleCredentials.fromStream(credentialsStream);
} catch (IOException ex) {
throw new RuntimeException(ex);
}
}
}
}
| RuntimeConfigContext |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jpa/graphs/FetchGraphTest.java | {
"start": 5458,
"end": 5575
} | class ____ {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
Integer id;
String name;
}
}
| FinanceEntity |
java | quarkusio__quarkus | extensions/quartz/deployment/src/test/java/io/quarkus/quartz/test/timezone/ScheduledMethodTimeZoneTest.java | {
"start": 3221,
"end": 4093
} | class ____ {
static final CountDownLatch LATCH = new CountDownLatch(2);
static final CountDownLatch TIME_ZONE_1_LATCH = new CountDownLatch(1);
static final CountDownLatch TIME_ZONE_2_LATCH = new CountDownLatch(1);
@Scheduled(every = "1s")
void checkEverySecond() {
LATCH.countDown();
}
@Scheduled(identity = "simpleJobs1", cron = "{simpleJobs1.cron}", timeZone = "{simpleJobs1.timeZone}")
void checkEverySecondCronTimeZone1(ScheduledExecution execution) {
// this method should not be executed in the test
TIME_ZONE_1_LATCH.countDown();
}
@Scheduled(identity = "simpleJobs2", cron = "{simpleJobs2.cron}", timeZone = "{simpleJobs2.timeZone}")
void checkEverySecondCronTimeZone2() {
TIME_ZONE_2_LATCH.countDown();
}
}
}
| Jobs |
java | apache__camel | components/camel-jsonpath/src/test/java/org/apache/camel/jsonpath/SpringJsonPathCustomReadMapperBeanTest.java | {
"start": 1230,
"end": 2390
} | class ____ extends CamelSpringTestSupport {
@Override
protected AbstractApplicationContext createApplicationContext() {
return new ClassPathXmlApplicationContext("org/apache/camel/jsonpath/SpringJsonPathCustomReadMapperBeanTest.xml");
}
@Test
public void testJsonPathSplitNumbers() throws Exception {
// /CAMEL-17956
MockEndpoint m = getMockEndpoint("mock:result");
m.expectedMessageCount(1);
template.requestBody("direct:start", new File("src/test/resources/bignumbers.json"), String.class);
Object resultFromMock = m.getReceivedExchanges().get(0).getMessage().getBody();
// assertTrue(resultFromMock instanceof Map);
// Map<String,Object> resultMap = (Map)resultFromMock;
// assertTrue(resultMap.get("principalAmountOnValueDate") instanceof BigDecimal);
assertTrue(resultFromMock instanceof String);
assertTrue(resultFromMock.toString().contains("121002700.0"));
assertTrue(resultFromMock.toString().contains("-91000000.0"));
MockEndpoint.assertIsSatisfied(context);
}
}
| SpringJsonPathCustomReadMapperBeanTest |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/DefaultCharsetTest.java | {
"start": 9098,
"end": 9556
} | class ____ {
void f(String s, File f) throws Exception {
new FileReader(s);
new FileReader(f);
}
}
""")
.expectUnchanged()
.setArgs("-XDandroidCompatible=true")
.doTest();
}
@Test
public void androidWriter() {
refactoringTest()
.addInputLines(
"in/Test.java",
"""
import java.io.*;
| Test |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java | {
"start": 2147,
"end": 3261
} | class ____ implements Authenticator {
private static Logger LOG =
LoggerFactory.getLogger(DelegationTokenAuthenticator.class);
private static final String CONTENT_TYPE = "Content-Type";
private static final String APPLICATION_JSON_MIME = "application/json";
private static final String HTTP_GET = "GET";
private static final String HTTP_PUT = "PUT";
public static final String OP_PARAM = "op";
private static final String OP_PARAM_EQUALS = OP_PARAM + "=";
public static final String DELEGATION_TOKEN_HEADER =
"X-Hadoop-Delegation-Token";
public static final String DELEGATION_PARAM = "delegation";
public static final String TOKEN_PARAM = "token";
public static final String RENEWER_PARAM = "renewer";
public static final String SERVICE_PARAM = "service";
public static final String DELEGATION_TOKEN_JSON = "Token";
public static final String DELEGATION_TOKEN_URL_STRING_JSON = "urlString";
public static final String RENEW_DELEGATION_TOKEN_JSON = "long";
/**
* DelegationToken operations.
*/
@InterfaceAudience.Private
public | DelegationTokenAuthenticator |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/bug/Issue408.java | {
"start": 3600,
"end": 4227
} | class ____ {
private VO[] volist;
private Long longid0;
private Long longid1;
public VO[] getVolist() {
return volist;
}
public void setVolist(VO[] volist) {
this.volist = volist;
}
public Long getLongid1() {
return longid1;
}
public void setLongid1(Long longid1) {
this.longid1 = longid1;
}
public Long getLongid0() {
return longid0;
}
public void setLongid0(Long longid0) {
this.longid0 = longid0;
}
}
public static | VOList |
java | quarkusio__quarkus | integration-tests/main/src/main/java/io/quarkus/it/arc/interceptor/Simple.java | {
"start": 607,
"end": 740
} | interface ____ {
@Nonbinding
String name();
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
@ | Simple |
java | spring-projects__spring-security | oauth2/oauth2-authorization-server/src/main/java/org/springframework/security/oauth2/server/authorization/oidc/authentication/OidcUserInfoAuthenticationContext.java | {
"start": 1628,
"end": 3024
} | class ____ implements OAuth2AuthenticationContext {
private final Map<Object, Object> context;
private OidcUserInfoAuthenticationContext(Map<Object, Object> context) {
this.context = Collections.unmodifiableMap(new HashMap<>(context));
}
@SuppressWarnings("unchecked")
@Nullable
@Override
public <V> V get(Object key) {
return hasKey(key) ? (V) this.context.get(key) : null;
}
@Override
public boolean hasKey(Object key) {
Assert.notNull(key, "key cannot be null");
return this.context.containsKey(key);
}
/**
* Returns the {@link OAuth2AccessToken OAuth 2.0 Access Token}.
* @return the {@link OAuth2AccessToken}
*/
public OAuth2AccessToken getAccessToken() {
return get(OAuth2AccessToken.class);
}
/**
* Returns the {@link OAuth2Authorization authorization}.
* @return the {@link OAuth2Authorization}
*/
public OAuth2Authorization getAuthorization() {
return get(OAuth2Authorization.class);
}
/**
* Constructs a new {@link Builder} with the provided
* {@link OidcUserInfoAuthenticationToken}.
* @param authentication the {@link OidcUserInfoAuthenticationToken}
* @return the {@link Builder}
*/
public static Builder with(OidcUserInfoAuthenticationToken authentication) {
return new Builder(authentication);
}
/**
* A builder for {@link OidcUserInfoAuthenticationContext}.
*/
public static final | OidcUserInfoAuthenticationContext |
java | apache__flink | flink-python/src/main/java/org/apache/flink/table/runtime/operators/python/AbstractEmbeddedStatelessFunctionOperator.java | {
"start": 1769,
"end": 4625
} | class ____
extends AbstractEmbeddedPythonFunctionOperator<RowData>
implements OneInputStreamOperator<RowData, RowData>, BoundedOneInput {
private static final long serialVersionUID = 1L;
/** The offsets of user-defined function inputs. */
protected final int[] udfInputOffsets;
/** The input logical type. */
protected final RowType inputType;
/** The user-defined function input logical type. */
protected final RowType udfInputType;
/** The user-defined function output logical type. */
protected final RowType udfOutputType;
/** The GenericRowData reused holding the execution result of python udf. */
protected transient GenericRowData reuseResultRowData;
/** The collector used to collect records. */
protected transient StreamRecordRowDataWrappingCollector rowDataWrapper;
protected transient PythonTypeUtils.DataConverter[] userDefinedFunctionInputConverters;
protected transient Object[] userDefinedFunctionInputArgs;
protected transient PythonTypeUtils.DataConverter[] userDefinedFunctionOutputConverters;
public AbstractEmbeddedStatelessFunctionOperator(
Configuration config,
RowType inputType,
RowType udfInputType,
RowType udfOutputType,
int[] udfInputOffsets) {
super(config);
this.inputType = Preconditions.checkNotNull(inputType);
this.udfInputType = Preconditions.checkNotNull(udfInputType);
this.udfOutputType = Preconditions.checkNotNull(udfOutputType);
this.udfInputOffsets = Preconditions.checkNotNull(udfInputOffsets);
}
@Override
public void open() throws Exception {
super.open();
rowDataWrapper = new StreamRecordRowDataWrappingCollector(output);
reuseResultRowData = new GenericRowData(udfOutputType.getFieldCount());
RowType userDefinedFunctionInputType =
new RowType(
Arrays.stream(udfInputOffsets)
.mapToObj(i -> inputType.getFields().get(i))
.collect(Collectors.toList()));
userDefinedFunctionInputConverters =
userDefinedFunctionInputType.getFields().stream()
.map(RowType.RowField::getType)
.map(PythonTypeUtils::toDataConverter)
.toArray(PythonTypeUtils.DataConverter[]::new);
userDefinedFunctionInputArgs = new Object[udfInputOffsets.length];
userDefinedFunctionOutputConverters =
udfOutputType.getFields().stream()
.map(RowType.RowField::getType)
.map(PythonTypeUtils::toDataConverter)
.toArray(PythonTypeUtils.DataConverter[]::new);
}
}
| AbstractEmbeddedStatelessFunctionOperator |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/protocol/CommandArgs.java | {
"start": 1929,
"end": 10160
} | class ____<K, V> {
static final byte[] CRLF = "\r\n".getBytes(StandardCharsets.UTF_8);
protected final RedisCodec<K, V> codec;
final List<SingularArgument> singularArguments = new ArrayList<>(10);
/**
* @param codec Codec used to encode/decode keys and values, must not be {@code null}.
*/
public CommandArgs(RedisCodec<K, V> codec) {
LettuceAssert.notNull(codec, "RedisCodec must not be null");
this.codec = codec;
}
/**
*
* @return the number of arguments.
*/
public int count() {
return singularArguments.size();
}
/**
* Adds a key argument.
*
* @param key the key
* @return the command args.
*/
public CommandArgs<K, V> addKey(K key) {
singularArguments.add(KeyArgument.of(key, codec));
return this;
}
/**
* Add multiple key arguments.
*
* @param keys must not be {@code null}.
* @return the command args.
*/
public CommandArgs<K, V> addKeys(Iterable<K> keys) {
LettuceAssert.notNull(keys, "Keys must not be null");
for (K key : keys) {
addKey(key);
}
return this;
}
/**
* Add multiple key arguments.
*
* @param keys must not be {@code null}.
* @return the command args.
*/
@SafeVarargs
public final CommandArgs<K, V> addKeys(K... keys) {
LettuceAssert.notNull(keys, "Keys must not be null");
for (K key : keys) {
addKey(key);
}
return this;
}
/**
* Add a value argument.
*
* @param value the value
* @return the command args.
*/
public CommandArgs<K, V> addValue(V value) {
singularArguments.add(ValueArgument.of(value, codec));
return this;
}
/**
* Add multiple value arguments.
*
* @param values must not be {@code null}.
* @return the command args.
*/
public CommandArgs<K, V> addValues(Iterable<V> values) {
LettuceAssert.notNull(values, "Values must not be null");
for (V value : values) {
addValue(value);
}
return this;
}
/**
* Add multiple value arguments.
*
* @param values must not be {@code null}.
* @return the command args.
*/
@SafeVarargs
public final CommandArgs<K, V> addValues(V... values) {
LettuceAssert.notNull(values, "Values must not be null");
for (V value : values) {
addValue(value);
}
return this;
}
/**
* Add a map (hash) argument.
*
* @param map the map, must not be {@code null}.
* @return the command args.
*/
public CommandArgs<K, V> add(Map<K, V> map) {
LettuceAssert.notNull(map, "Map must not be null");
for (Map.Entry<K, V> entry : map.entrySet()) {
addKey(entry.getKey()).addValue(entry.getValue());
}
return this;
}
/**
* Add a string argument. The argument is represented as bulk string.
*
* @param s the string.
* @return the command args.
*/
public CommandArgs<K, V> add(String s) {
singularArguments.add(StringArgument.of(s));
return this;
}
/**
* Add a string as char-array. The argument is represented as bulk string.
*
* @param cs the string.
* @return the command args.
*/
public CommandArgs<K, V> add(char[] cs) {
singularArguments.add(CharArrayArgument.of(cs));
return this;
}
/**
* Add an 64-bit integer (long) argument.
*
* @param n the argument.
* @return the command args.
*/
public CommandArgs<K, V> add(long n) {
singularArguments.add(IntegerArgument.of(n));
return this;
}
/**
* Add a double argument.
*
* @param n the double argument.
* @return the command args.
*/
public CommandArgs<K, V> add(double n) {
singularArguments.add(DoubleArgument.of(n));
return this;
}
/**
* Add a byte-array argument. The argument is represented as bulk string.
*
* @param value the byte-array.
* @return the command args.
*/
public CommandArgs<K, V> add(byte[] value) {
singularArguments.add(BytesArgument.of(value));
return this;
}
/**
* Add a {@link CommandKeyword} argument. The argument is represented as bulk string.
*
* @param keyword must not be {@code null}.
* @return the command args.
*/
public CommandArgs<K, V> add(CommandKeyword keyword) {
LettuceAssert.notNull(keyword, "CommandKeyword must not be null");
singularArguments.add(ProtocolKeywordArgument.of(keyword));
return this;
}
/**
* Add a {@link CommandType} argument. The argument is represented as bulk string.
*
* @param type must not be {@code null}.
* @return the command args.
*/
public CommandArgs<K, V> add(CommandType type) {
LettuceAssert.notNull(type, "CommandType must not be null");
singularArguments.add(ProtocolKeywordArgument.of(type));
return this;
}
/**
* Add a {@link ProtocolKeyword} argument. The argument is represented as bulk string.
*
* @param keyword the keyword, must not be {@code null}
* @return the command args.
*/
public CommandArgs<K, V> add(ProtocolKeyword keyword) {
LettuceAssert.notNull(keyword, "CommandKeyword must not be null");
singularArguments.add(ProtocolKeywordArgument.of(keyword));
return this;
}
/**
* Add all arguments from {@link CommandArgs}
*
* @param args the args, must not be {@code null}
* @return the command args.
* @since 6.2
*/
public CommandArgs<K, V> addAll(CommandArgs<?, ?> args) {
LettuceAssert.notNull(args, "CommandArgs must not be null");
this.singularArguments.addAll(args.singularArguments);
return this;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
sb.append(getClass().getSimpleName());
ByteBuf buffer = UnpooledByteBufAllocator.DEFAULT.buffer(singularArguments.size() * 10);
encode(buffer);
buffer.resetReaderIndex();
byte[] bytes = new byte[buffer.readableBytes()];
buffer.readBytes(bytes);
sb.append(" [buffer=").append(new String(bytes));
sb.append(']');
buffer.release();
return sb.toString();
}
/**
* Returns a command string representation of {@link CommandArgs} with annotated key and value parameters.
*
* {@code args.addKey("mykey").add(2.0)} will return {@code key<mykey> 2.0}.
*
* @return the command string representation.
*/
public String toCommandString() {
return LettuceStrings.collectionToDelimitedString(singularArguments, " ", "", "");
}
/**
* Returns the first integer argument.
*
* @return the first integer argument or {@code null}.
*/
@Deprecated
public Long getFirstInteger() {
return CommandArgsAccessor.getFirstInteger(this);
}
/**
* Returns the first string argument.
*
* @return the first string argument or {@code null}.
*/
@Deprecated
public String getFirstString() {
return CommandArgsAccessor.getFirstString(this);
}
/**
* Returns the first key argument in its byte-encoded representation.
*
* @return the first key argument in its byte-encoded representation or {@code null}.
*/
public ByteBuffer getFirstEncodedKey() {
return CommandArgsAccessor.encodeFirstKey(this);
}
/**
* Encode the {@link CommandArgs} and write the arguments to the {@link ByteBuf}.
*
* @param buf the target buffer.
*/
public void encode(ByteBuf buf) {
buf.touch("CommandArgs.encode(…)");
for (SingularArgument singularArgument : singularArguments) {
singularArgument.encode(buf);
}
}
/**
* Single argument wrapper that can be encoded.
*/
static abstract | CommandArgs |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/json/JsonCompareMode.java | {
"start": 765,
"end": 874
} | enum ____ {
/**
* Strict checking.
*/
STRICT,
/**
* Lenient checking.
*/
LENIENT
}
| JsonCompareMode |
java | micronaut-projects__micronaut-core | aop/src/main/java/io/micronaut/aop/internal/intercepted/InterceptedMethodUtil.java | {
"start": 1549,
"end": 6798
} | class ____ {
private InterceptedMethodUtil() {
}
/**
* Find possible {@link InterceptedMethod} implementation.
*
* @param context The {@link MethodInvocationContext}
* @param conversionService The {@link ConversionService}
* @return The {@link InterceptedMethod}
* @since 4.0.0
*/
@NonNull
public static InterceptedMethod of(@NonNull MethodInvocationContext<?, ?> context, @NonNull ConversionService conversionService) {
if (context.isSuspend()) {
KotlinInterceptedMethodImpl kotlinInterceptedMethod = KotlinInterceptedMethodImpl.of(context);
if (kotlinInterceptedMethod != null) {
return kotlinInterceptedMethod;
}
return new SynchronousInterceptedMethod(context);
} else {
ReturnType<?> returnType = context.getReturnType();
Class<?> returnTypeClass = returnType.getType();
if (returnTypeClass == void.class || returnTypeClass == String.class) {
// Micro Optimization
return new SynchronousInterceptedMethod(context);
} else if (CompletionStage.class.isAssignableFrom(returnTypeClass) || Future.class.isAssignableFrom(returnTypeClass)) {
return new CompletionStageInterceptedMethod(context, conversionService);
} else if (PublisherInterceptedMethod.isConvertibleToPublisher(returnTypeClass)) {
if (ReactorInterceptedMethod.REACTOR_AVAILABLE) {
return new ReactorInterceptedMethod(context, conversionService);
}
return new PublisherInterceptedMethod(context, conversionService);
} else {
return new SynchronousInterceptedMethod(context);
}
}
}
/**
* Resolve interceptor binding annotations from the metadata.
*
* @param annotationMetadata The annotation metadata
* @param interceptorKind The interceptor kind
* @return the annotation values
*/
public static io.micronaut.core.annotation.AnnotationValue<?>[] resolveInterceptorBinding(
AnnotationMetadata annotationMetadata,
InterceptorKind interceptorKind) {
final List<AnnotationValue<InterceptorBinding>> interceptorBindings
= annotationMetadata.getAnnotationValuesByType(InterceptorBinding.class);
if (!interceptorBindings.isEmpty()) {
return interceptorBindings
.stream()
.filter(av -> {
final InterceptorKind kind = av.enumValue("kind", InterceptorKind.class)
.orElse(InterceptorKind.AROUND);
return kind == interceptorKind;
})
.toArray(io.micronaut.core.annotation.AnnotationValue[]::new);
}
return AnnotationUtil.ZERO_ANNOTATION_VALUES;
}
/**
* Does the given metadata have AOP advice declared.
*
* @param annotationMetadata The annotation metadata
* @return True if it does
*/
public static boolean hasAroundStereotype(@Nullable AnnotationMetadata annotationMetadata) {
return hasInterceptorBinding(annotationMetadata,
false,
Around.class,
InterceptorKind.AROUND);
}
/**
* Does the given metadata have introduction declared.
*
* @param annotationMetadata The annotation metadata
* @return True if it does
*/
public static boolean hasIntroductionStereotype(@Nullable AnnotationMetadata annotationMetadata) {
return hasInterceptorBinding(annotationMetadata,
false,
Introduction.class,
InterceptorKind.INTRODUCTION);
}
/**
* Does the given metadata have declared AOP advice.
*
* @param annotationMetadata The annotation metadata
* @return True if it does
*/
public static boolean hasDeclaredAroundAdvice(@Nullable AnnotationMetadata annotationMetadata) {
return hasInterceptorBinding(annotationMetadata,
true,
Around.class,
InterceptorKind.AROUND);
}
private static boolean hasInterceptorBinding(AnnotationMetadata annotationMetadata,
boolean declared,
Class<? extends Annotation> interceptorAnnotation,
InterceptorKind kind) {
List<AnnotationValue<InterceptorBinding>> annotationsValues;
if (declared) {
if (annotationMetadata.hasDeclaredStereotype(interceptorAnnotation)) {
return true;
}
annotationsValues = annotationMetadata.getDeclaredAnnotationValuesByType(InterceptorBinding.class);
} else {
if (annotationMetadata.hasStereotype(interceptorAnnotation)) {
return true;
}
annotationsValues = annotationMetadata.getAnnotationValuesByType(InterceptorBinding.class);
}
return annotationsValues
.stream().anyMatch(av -> av.enumValue("kind", InterceptorKind.class).orElse(InterceptorKind.AROUND) == kind);
}
}
| InterceptedMethodUtil |
java | reactor__reactor-core | reactor-core/src/test/java/reactor/core/publisher/MonoToCompletableFutureTest.java | {
"start": 998,
"end": 2414
} | class ____ {
@Test
public void normal() throws Exception {
CompletableFuture<Integer> f = Mono.just(1)
.toFuture();
assertThat(f.get()).isEqualTo(1);
}
@Test
public void error() {
CompletableFuture<Integer> f =
Mono.<Integer>error(new IllegalStateException("test")).toFuture();
assertThat(f.isDone()).isTrue();
assertThat(f.isCompletedExceptionally()).isTrue();
assertThatExceptionOfType(ExecutionException.class)
.isThrownBy(f::get)
.withCauseExactlyInstanceOf(IllegalStateException.class)
.withMessage("java.lang.IllegalStateException: test");
}
@Test
public void empty() throws Exception {
CompletableFuture<Integer> f = Mono.<Integer>empty().toFuture();
assertThat(f.get()).isNull();
}
@Test
public void monoSourceIsntCancelled() {
AtomicBoolean flag = new AtomicBoolean();
assertThat(Mono.just("value")
.doOnCancel(() -> flag.set(true))
.toFuture()
).isCompletedWithValue("value");
assertThat(flag).as("cancelled").isFalse();
}
@Test
public void sourceCanBeCancelledExplicitlyByOnNext() {
AtomicBoolean flag = new AtomicBoolean();
assertThat(Flux.just("value")
.doOnCancel(() -> flag.set(true))
.subscribeWith(new MonoToCompletableFuture<>(true))
).isCompletedWithValue("value");
assertThat(flag).as("cancelled").isTrue();
}
}
| MonoToCompletableFutureTest |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/factories/CustomMapImpl.java | {
"start": 257,
"end": 537
} | class ____<K, V> extends HashMap<K, V> implements CustomMap<K, V> {
private final String typeProp;
public CustomMapImpl(String typeProp) {
this.typeProp = typeProp;
}
@Override
public String getTypeProp() {
return typeProp;
}
}
| CustomMapImpl |
java | apache__camel | core/camel-management-api/src/main/java/org/apache/camel/api/management/mbean/ManagedDelayerMBean.java | {
"start": 973,
"end": 1637
} | interface ____ extends ManagedProcessorMBean {
@ManagedAttribute(description = "Delay")
Long getDelay();
@ManagedOperation(description = "Set a constant delay in millis")
void constantDelay(Integer millis);
@ManagedAttribute(description = "Number of exchanges currently delayed")
int getDelayedCount();
@ManagedAttribute(description = "Enables asynchronous delay which means the thread will not block while delaying")
Boolean isAsyncDelayed();
@ManagedAttribute(description = "Whether or not the caller should run the task when it was rejected by the thread pool")
Boolean isCallerRunsWhenRejected();
}
| ManagedDelayerMBean |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/models/annotations/internal/CompositeTypeRegistrationsAnnotation.java | {
"start": 756,
"end": 2046
} | class ____
implements CompositeTypeRegistrations, RepeatableContainer<CompositeTypeRegistration> {
private org.hibernate.annotations.CompositeTypeRegistration[] value;
/**
* Used in creating dynamic annotation instances (e.g. from XML)
*/
public CompositeTypeRegistrationsAnnotation(ModelsContext modelContext) {
}
/**
* Used in creating annotation instances from JDK variant
*/
public CompositeTypeRegistrationsAnnotation(
CompositeTypeRegistrations annotation,
ModelsContext modelContext) {
this.value = extractJdkValue(
annotation,
HibernateAnnotations.COMPOSITE_TYPE_REGISTRATIONS,
"value",
modelContext
);
}
/**
* Used in creating annotation instances from Jandex variant
*/
public CompositeTypeRegistrationsAnnotation(
Map<String, Object> attributeValues,
ModelsContext modelContext) {
this.value = (CompositeTypeRegistration[]) attributeValues.get( "value" );
}
@Override
public Class<? extends Annotation> annotationType() {
return CompositeTypeRegistrations.class;
}
@Override
public org.hibernate.annotations.CompositeTypeRegistration[] value() {
return value;
}
public void value(org.hibernate.annotations.CompositeTypeRegistration[] value) {
this.value = value;
}
}
| CompositeTypeRegistrationsAnnotation |
java | apache__flink | flink-core/src/main/java/org/apache/flink/util/MutableURLClassLoader.java | {
"start": 1041,
"end": 1583
} | class ____ extends URLClassLoader {
static {
ClassLoader.registerAsParallelCapable();
}
public MutableURLClassLoader(URL[] urls, ClassLoader parent) {
super(urls, parent);
}
@Override
public void addURL(URL url) {
super.addURL(url);
}
/**
* Copy the classloader for each job and these jobs can add their jar files to the classloader
* independently.
*
* @return the copied classloader
*/
public abstract MutableURLClassLoader copy();
}
| MutableURLClassLoader |
java | quarkusio__quarkus | independent-projects/tools/devtools-common/src/main/java/io/quarkus/platform/descriptor/loader/json/ResourceLoader.java | {
"start": 145,
"end": 569
} | interface ____ {
<T> T loadResourceAsPath(String name, ResourcePathConsumer<T> consumer) throws IOException;
default <T> T loadResource(String name, ResourceInputStreamConsumer<T> consumer) throws IOException {
return this.loadResourceAsPath(name, p -> {
try (InputStream is = Files.newInputStream(p)) {
return consumer.consume(is);
}
});
}
}
| ResourceLoader |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator.java | {
"start": 1180,
"end": 3960
} | class ____ implements EvalOperator.ExpressionEvaluator {
private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator.class);
private final Source source;
private final EvalOperator.ExpressionEvaluator left;
private final EvalOperator.ExpressionEvaluator right;
private final DriverContext driverContext;
private Warnings warnings;
public SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator(Source source,
EvalOperator.ExpressionEvaluator left, EvalOperator.ExpressionEvaluator right,
DriverContext driverContext) {
this.source = source;
this.left = left;
this.right = right;
this.driverContext = driverContext;
}
@Override
public Block eval(Page page) {
try (LongBlock leftBlock = (LongBlock) left.eval(page)) {
try (BytesRefBlock rightBlock = (BytesRefBlock) right.eval(page)) {
return eval(page.getPositionCount(), leftBlock, rightBlock);
}
}
}
@Override
public long baseRamBytesUsed() {
long baseRamBytesUsed = BASE_RAM_BYTES_USED;
baseRamBytesUsed += left.baseRamBytesUsed();
baseRamBytesUsed += right.baseRamBytesUsed();
return baseRamBytesUsed;
}
public BooleanBlock eval(int positionCount, LongBlock leftBlock, BytesRefBlock rightBlock) {
try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) {
position: for (int p = 0; p < positionCount; p++) {
boolean allBlocksAreNulls = true;
if (!leftBlock.isNull(p)) {
allBlocksAreNulls = false;
}
if (!rightBlock.isNull(p)) {
allBlocksAreNulls = false;
}
if (allBlocksAreNulls) {
result.appendNull();
continue position;
}
try {
SpatialIntersects.processCartesianPointDocValuesAndSource(result, p, leftBlock, rightBlock);
} catch (IllegalArgumentException | IOException e) {
warnings().registerException(e);
result.appendNull();
}
}
return result.build();
}
}
@Override
public String toString() {
return "SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator[" + "left=" + left + ", right=" + right + "]";
}
@Override
public void close() {
Releasables.closeExpectNoException(left, right);
}
private Warnings warnings() {
if (warnings == null) {
this.warnings = Warnings.createWarnings(
driverContext.warningsMode(),
source.source().getLineNumber(),
source.source().getColumnNumber(),
source.text()
);
}
return warnings;
}
static | SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator |
java | apache__camel | components/camel-test/camel-test-main-junit5/src/test/java/org/apache/camel/test/main/junit5/annotation/WithMainClassTest.java | {
"start": 2312,
"end": 2512
} | class ____ {
@Test
void shouldSupportSuperNestedTest() throws Exception {
shouldSupportNestedTest();
}
}
}
@Nested
| SuperNestedTest |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/cfg/ConfigOverrides.java | {
"start": 453,
"end": 862
} | class ____
implements java.io.Serializable,
Snapshottable<ConfigOverrides>
{
private static final long serialVersionUID = 3L;
/**
* Convenience value used as the default root setting.
* Note that although in a way it would make sense use "ALWAYS" for both,
* problems arise in some cases where default is seen as explicit setting,
* overriding possible per- | ConfigOverrides |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestDelegationToken.java | {
"start": 2811,
"end": 3408
} | class ____
extends AbstractDelegationTokenIdentifier
implements Writable {
public TestDelegationTokenIdentifier() {
}
public TestDelegationTokenIdentifier(Text owner, Text renewer, Text realUser) {
super(owner, renewer, realUser);
}
@Override
public Text getKind() {
return KIND;
}
@Override
public void write(DataOutput out) throws IOException {
super.write(out);
}
@Override
public void readFields(DataInput in) throws IOException {
super.readFields(in);
}
}
public static | TestDelegationTokenIdentifier |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/boot/models/SourceModelTestHelper.java | {
"start": 15971,
"end": 16230
} | class ____ : " + className );
}
try (final InputStream classFileStream = classUrl.openStream() ) {
indexer.index( classFileStream );
}
catch (IOException e) {
throw new RuntimeException( e );
}
} );
return indexer.complete();
}
}
| file |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/LambdaTestUtils.java | {
"start": 38675,
"end": 39268
} | class ____<T>
implements PrivilegedExceptionAction<T> {
private final Callable<T> callable;
/**
* Constructor.
* @param callable a non-null callable/closure.
*/
public PrivilegedOperation(final Callable<T> callable) {
this.callable = Preconditions.checkNotNull(callable);
}
@Override
public T run() throws Exception {
return callable.call();
}
}
/**
* VoidCaller variant of {@link PrivilegedOperation}: converts
* a void-returning closure to an action which {@code doAs} can call.
*/
public static | PrivilegedOperation |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/bugs/_895/MultiArrayMapper.java | {
"start": 678,
"end": 922
} | class ____ {
private List<byte[]> bytes;
public List<byte[]> getBytes() {
return bytes;
}
public void setBytes(List<byte[]> bytes) {
this.bytes = bytes;
}
}
}
| WithListOfByteArray |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/web/servlet/MockMvcBuilderSupport.java | {
"start": 1516,
"end": 3643
} | class ____ {
/**
* Delegates to {@link #createMockMvc(Filter[], MockServletConfig, WebApplicationContext, RequestBuilder, List, List, List)}
* for creation of the {@link MockMvc} instance and configures that instance
* with the supplied {@code defaultResponseCharacterEncoding}.
* @since 5.3.10
*/
protected final MockMvc createMockMvc(Filter[] filters, MockServletConfig servletConfig,
WebApplicationContext webAppContext, @Nullable RequestBuilder defaultRequestBuilder,
@Nullable Charset defaultResponseCharacterEncoding,
List<ResultMatcher> globalResultMatchers, List<ResultHandler> globalResultHandlers,
@Nullable List<DispatcherServletCustomizer> dispatcherServletCustomizers) {
MockMvc mockMvc = createMockMvc(
filters, servletConfig, webAppContext, defaultRequestBuilder,
globalResultMatchers, globalResultHandlers, dispatcherServletCustomizers);
mockMvc.setDefaultResponseCharacterEncoding(defaultResponseCharacterEncoding);
return mockMvc;
}
protected final MockMvc createMockMvc(Filter[] filters, MockServletConfig servletConfig,
WebApplicationContext webAppContext, @Nullable RequestBuilder defaultRequestBuilder,
List<ResultMatcher> globalResultMatchers, List<ResultHandler> globalResultHandlers,
@Nullable List<DispatcherServletCustomizer> dispatcherServletCustomizers) {
TestDispatcherServlet dispatcherServlet = new TestDispatcherServlet(webAppContext);
if (dispatcherServletCustomizers != null) {
for (DispatcherServletCustomizer customizers : dispatcherServletCustomizers) {
customizers.customize(dispatcherServlet);
}
}
try {
dispatcherServlet.init(servletConfig);
}
catch (ServletException ex) {
// should never happen...
throw new MockMvcBuildException("Failed to initialize TestDispatcherServlet", ex);
}
MockMvc mockMvc = new MockMvc(dispatcherServlet, filters);
mockMvc.setDefaultRequest(defaultRequestBuilder);
mockMvc.setGlobalResultMatchers(globalResultMatchers);
mockMvc.setGlobalResultHandlers(globalResultHandlers);
return mockMvc;
}
@SuppressWarnings("serial")
private static | MockMvcBuilderSupport |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/observers/illegal/ObserverDisposesTest.java | {
"start": 571,
"end": 1096
} | class ____ {
@RegisterExtension
public ArcTestContainer container = ArcTestContainer.builder()
.beanClasses(Observer.class)
.shouldFail()
.build();
@Test
public void trigger() {
Throwable error = container.getFailure();
assertNotNull(error);
assertInstanceOf(DefinitionException.class, error);
assertTrue(error.getMessage().contains("Disposer method must not have an @Observes parameter"));
}
@Dependent
static | ObserverDisposesTest |
java | apache__flink | flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/typeutils/FieldInfoUtilsTest.java | {
"start": 1572,
"end": 3282
} | class ____ {
private static Stream<TypeInformation> parameters() throws Exception {
return Stream.of(
new RowTypeInfo(
new TypeInformation[] {Types.INT, Types.LONG, Types.SQL_TIMESTAMP},
new String[] {"f0", "f1", "f2"}),
new PojoTypeInfo(
MyPojo.class,
Arrays.asList(
new PojoField(MyPojo.class.getDeclaredField("f0"), Types.INT),
new PojoField(MyPojo.class.getDeclaredField("f1"), Types.LONG),
new PojoField(
MyPojo.class.getDeclaredField("f2"),
Types.SQL_TIMESTAMP))));
}
@ParameterizedTest(name = "{0}")
@MethodSource("parameters")
void testByNameModeReorder(TypeInformation typeInfo) {
FieldInfoUtils.TypeInfoSchema schema =
FieldInfoUtils.getFieldsInfo(
typeInfo, new Expression[] {$("f2"), $("f1"), $("f0")});
assertThat(schema.getFieldNames()).isEqualTo(new String[] {"f2", "f1", "f0"});
}
@ParameterizedTest(name = "{0}")
@MethodSource("parameters")
void testByNameModeReorderAndRename(TypeInformation typeInfo) {
FieldInfoUtils.TypeInfoSchema schema =
FieldInfoUtils.getFieldsInfo(
typeInfo,
new Expression[] {$("f1").as("aa"), $("f0").as("bb"), $("f2").as("cc")});
assertThat(schema.getFieldNames()).isEqualTo(new String[] {"aa", "bb", "cc"});
}
/** Test Pojo class. */
public static | FieldInfoUtilsTest |
java | apache__camel | components/camel-thymeleaf/src/test/java/org/apache/camel/component/thymeleaf/ThymeleafAbstractBaseTest.java | {
"start": 7508,
"end": 8005
} | class ____ implements Processor {
@Override
public void process(Exchange exchange) {
exchange.getIn().setHeader(ThymeleafConstants.THYMELEAF_TEMPLATE, stringTemplate());
exchange.getIn().setHeader(LAST_NAME, "Doe");
exchange.getIn().setHeader(FIRST_NAME, JANE);
exchange.getIn().setHeader(ITEM, "Widgets for Dummies");
exchange.setProperty(ORDER_NUMBER, "7");
}
}
protected static | TemplateHeaderProcessor |
java | mapstruct__mapstruct | processor/src/main/java/org/mapstruct/ap/spi/util/IntrospectorUtils.java | {
"start": 544,
"end": 1660
} | class ____ {
private IntrospectorUtils() {
}
/**
* Utility method to take a string and convert it to normal Java variable
* name capitalization. This normally means converting the first
* character from upper case to lower case, but in the (unusual) special
* case when there is more than one character and both the first and
* second characters are upper case, we leave it alone.
* <p>
* Thus "FooBah" becomes "fooBah" and "X" becomes "x", but "URL" stays
* as "URL".
*
* @param name The string to be decapitalized.
*
* @return The decapitalized version of the string.
*/
public static String decapitalize(String name) {
if ( name == null || name.isEmpty() ) {
return name;
}
if ( name.length() > 1 && Character.isUpperCase( name.charAt( 1 ) ) &&
Character.isUpperCase( name.charAt( 0 ) ) ) {
return name;
}
char[] chars = name.toCharArray();
chars[0] = Character.toLowerCase( chars[0] );
return new String( chars );
}
}
| IntrospectorUtils |
java | spring-projects__spring-boot | module/spring-boot-web-server/src/testFixtures/java/org/springframework/boot/web/server/servlet/AbstractServletWebServerFactoryTests.java | {
"start": 76873,
"end": 77345
} | class ____ implements Filter {
private ClassLoader contextClassLoader;
@Override
public void init(FilterConfig filterConfig) throws ServletException {
this.contextClassLoader = Thread.currentThread().getContextClassLoader();
}
@Override
public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain)
throws IOException, ServletException {
chain.doFilter(request, response);
}
}
}
| ThreadContextClassLoaderCapturingFilter |
java | apache__camel | components/camel-dns/src/generated/java/org/apache/camel/component/dns/types/DnsRecordConverterLoader.java | {
"start": 885,
"end": 2191
} | class ____ implements TypeConverterLoader, CamelContextAware {
private CamelContext camelContext;
public DnsRecordConverterLoader() {
}
@Override
public void setCamelContext(CamelContext camelContext) {
this.camelContext = camelContext;
}
@Override
public CamelContext getCamelContext() {
return camelContext;
}
@Override
public void load(TypeConverterRegistry registry) throws TypeConverterLoaderException {
registerConverters(registry);
}
private void registerConverters(TypeConverterRegistry registry) {
addTypeConverter(registry, org.xbill.DNS.Record.class, java.lang.String.class, true,
(type, exchange, value) -> {
Object answer = org.apache.camel.component.dns.types.DnsRecordConverter.toRecord((java.lang.String) value);
if (true && answer == null) {
answer = Void.class;
}
return answer;
});
}
private static void addTypeConverter(TypeConverterRegistry registry, Class<?> toType, Class<?> fromType, boolean allowNull, SimpleTypeConverter.ConversionMethod method) {
registry.addTypeConverter(toType, fromType, new SimpleTypeConverter(allowNull, method));
}
}
| DnsRecordConverterLoader |
java | google__dagger | dagger-compiler/main/java/dagger/internal/codegen/binding/FrameworkType.java | {
"start": 1307,
"end": 5360
} | enum ____ {
/** A {@link javax.inject.Provider}. */
PROVIDER {
@Override
public XCodeBlock to(
RequestKind requestKind,
XCodeBlock from) {
switch (requestKind) {
case INSTANCE:
return XCodeBlock.of("%L.get()", from);
case LAZY:
return XCodeBlock.of(
"%T.lazy(%L)",
XTypeNames.DOUBLE_CHECK,
from);
case PROVIDER:
return from;
case PROVIDER_OF_LAZY:
return XCodeBlock.of("%T.create(%L)", XTypeNames.PROVIDER_OF_LAZY, from);
case PRODUCER:
return XCodeBlock.of("%T.producerFromProvider(%L)", XTypeNames.PRODUCERS, from);
case FUTURE:
return XCodeBlock.of(
"%T.immediateFuture(%L)",
XTypeNames.FUTURES,
to(
RequestKind.INSTANCE,
from));
case PRODUCED:
return XCodeBlock.of(
"%T.successful(%L)",
XTypeNames.PRODUCED,
to(
RequestKind.INSTANCE,
from));
default:
throw new IllegalArgumentException(
String.format("Cannot request a %s from a %s", requestKind, this));
}
}
@Override
public XExpression to(
RequestKind requestKind,
XExpression from,
XProcessingEnv processingEnv) {
XCodeBlock codeBlock =
to(
requestKind,
from.codeBlock());
switch (requestKind) {
case INSTANCE:
return XExpression.create(from.type().unwrapType(), codeBlock);
case PROVIDER:
return from;
case PROVIDER_OF_LAZY:
return XExpression.create(
from.type().rewrapType(XTypeNames.LAZY).wrapType(XTypeNames.DAGGER_PROVIDER),
codeBlock);
case FUTURE:
return XExpression.create(
from.type().rewrapType(XTypeNames.LISTENABLE_FUTURE), codeBlock);
default:
return XExpression.create(
from.type().rewrapType(RequestKinds.frameworkClassName(requestKind)), codeBlock);
}
}
},
/** A {@link dagger.producers.Producer}. */
PRODUCER_NODE {
@Override
public XCodeBlock to(
RequestKind requestKind,
XCodeBlock from) {
switch (requestKind) {
case FUTURE:
return XCodeBlock.of("%L.get()", from);
case PRODUCER:
return from;
default:
throw new IllegalArgumentException(
String.format("Cannot request a %s from a %s", requestKind, this));
}
}
@Override
public XExpression to(
RequestKind requestKind,
XExpression from,
XProcessingEnv processingEnv) {
switch (requestKind) {
case FUTURE:
return XExpression.create(
from.type().rewrapType(XTypeNames.LISTENABLE_FUTURE),
to(
requestKind,
from.codeBlock()));
case PRODUCER:
return from;
default:
throw new IllegalArgumentException(
String.format("Cannot request a %s from a %s", requestKind, this));
}
}
};
/** Returns the framework type appropriate for fields for a given binding type. */
public static FrameworkType forBindingType(BindingType bindingType) {
switch (bindingType) {
case PROVISION:
return PROVIDER;
case PRODUCTION:
return PRODUCER_NODE;
case MEMBERS_INJECTION:
}
throw new AssertionError(bindingType);
}
/** Returns the framework type that exactly matches the given request kind, if one exists. */
public static Optional<FrameworkType> forRequestKind(RequestKind requestKind) {
switch (requestKind) {
case PROVIDER:
return Optional.of(FrameworkType.PROVIDER);
case PRODUCER:
return Optional.of(FrameworkType.PRODUCER_NODE);
default:
return Optional.empty();
}
}
/** The | FrameworkType |
java | micronaut-projects__micronaut-core | inject-java/src/test/groovy/io/micronaut/inject/constructor/arrayinjection/AImpl.java | {
"start": 711,
"end": 741
} | class ____ implements A {
}
| AImpl |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/support/ContextLoaderUtilsContextHierarchyTests.java | {
"start": 25477,
"end": 25682
} | interface ____ {
@AliasFor(annotation = ContextConfiguration.class)
String[] locations() default "A.xml";
}
@ContextConfigWithOverrides(locations = "one.xml")
private static | ContextConfigWithOverrides |
java | grpc__grpc-java | netty/src/main/java/io/grpc/netty/ProtocolNegotiators.java | {
"start": 23371,
"end": 25434
} | class ____ implements ProtocolNegotiator {
public ClientTlsProtocolNegotiator(SslContext sslContext,
ObjectPool<? extends Executor> executorPool, Optional<Runnable> handshakeCompleteRunnable,
X509TrustManager x509ExtendedTrustManager, String sni) {
this.sslContext = Preconditions.checkNotNull(sslContext, "sslContext");
this.executorPool = executorPool;
if (this.executorPool != null) {
this.executor = this.executorPool.getObject();
}
this.handshakeCompleteRunnable = handshakeCompleteRunnable;
this.x509ExtendedTrustManager = x509ExtendedTrustManager;
this.sni = sni;
}
private final SslContext sslContext;
private final ObjectPool<? extends Executor> executorPool;
private final Optional<Runnable> handshakeCompleteRunnable;
private final X509TrustManager x509ExtendedTrustManager;
private final String sni;
private Executor executor;
@Override
public AsciiString scheme() {
return Utils.HTTPS;
}
@Override
public ChannelHandler newHandler(GrpcHttp2ConnectionHandler grpcHandler) {
ChannelHandler gnh = new GrpcNegotiationHandler(grpcHandler);
ChannelLogger negotiationLogger = grpcHandler.getNegotiationLogger();
String authority;
if ("".equals(sni)) {
authority = null;
} else if (sni != null) {
authority = sni;
} else {
authority = grpcHandler.getAuthority();
}
ChannelHandler cth = new ClientTlsHandler(gnh, sslContext,
authority, this.executor, negotiationLogger, handshakeCompleteRunnable, this,
x509ExtendedTrustManager);
return new WaitUntilActiveHandler(cth, negotiationLogger);
}
@Override
public void close() {
if (this.executorPool != null && this.executor != null) {
this.executorPool.returnObject(this.executor);
}
}
@VisibleForTesting
boolean hasX509ExtendedTrustManager() {
return x509ExtendedTrustManager != null;
}
}
static final | ClientTlsProtocolNegotiator |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/DoubleBraceInitializationTest.java | {
"start": 12628,
"end": 13064
} | class ____ {
private Supplier<Map<String, Object>> test() {
return () -> ImmutableMap.of();
}
}
""")
.doTest();
}
@Test
public void statement() {
testHelper
.addInputLines(
"Test.java",
"""
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
| Test |
java | elastic__elasticsearch | test/framework/src/test/java/org/elasticsearch/test/ReachabilityCheckerTests.java | {
"start": 540,
"end": 2057
} | class ____ extends ESTestCase {
public void testSuccess() {
final var reachabilityChecker = new ReachabilityChecker();
var target = reachabilityChecker.register(createTarget());
reachabilityChecker.checkReachable();
target = null;
reachabilityChecker.ensureUnreachable();
assertNull(target);
}
public void testBecomesUnreachable() {
final var reachabilityChecker = new ReachabilityChecker();
var target = reachabilityChecker.register(createTarget());
reachabilityChecker.checkReachable();
target = null;
assertThat(
expectThrows(AssertionError.class, reachabilityChecker::checkReachable).getMessage(),
Matchers.startsWith("became unreachable: test object")
);
assertNull(target);
}
public void testStaysReachable() {
final var reachabilityChecker = new ReachabilityChecker();
var target = reachabilityChecker.register(createTarget());
reachabilityChecker.checkReachable();
assertThat(
expectThrows(AssertionError.class, () -> reachabilityChecker.ensureUnreachable(500)).getMessage(),
Matchers.startsWith("still reachable: test object")
);
assertNotNull(target);
}
private static Object createTarget() {
return new Object() {
@Override
public String toString() {
return "test object";
}
};
}
}
| ReachabilityCheckerTests |
java | apache__camel | components/camel-http/src/generated/java/org/apache/camel/component/http/HttpConvertersLoader.java | {
"start": 880,
"end": 3392
} | class ____ implements TypeConverterLoader, CamelContextAware {
private CamelContext camelContext;
public HttpConvertersLoader() {
}
@Override
public void setCamelContext(CamelContext camelContext) {
this.camelContext = camelContext;
}
@Override
public CamelContext getCamelContext() {
return camelContext;
}
@Override
public void load(TypeConverterRegistry registry) throws TypeConverterLoaderException {
registerConverters(registry);
}
private void registerConverters(TypeConverterRegistry registry) {
addTypeConverter(registry, org.apache.hc.core5.util.TimeValue.class, java.lang.String.class, false,
(type, exchange, value) -> {
Object answer = org.apache.camel.component.http.HttpConverters.toTimeValue((java.lang.String) value);
if (false && answer == null) {
answer = Void.class;
}
return answer;
});
addTypeConverter(registry, org.apache.hc.core5.util.TimeValue.class, long.class, false,
(type, exchange, value) -> {
Object answer = org.apache.camel.component.http.HttpConverters.toTimeValue((long) value);
if (false && answer == null) {
answer = Void.class;
}
return answer;
});
addTypeConverter(registry, org.apache.hc.core5.util.Timeout.class, java.lang.String.class, false,
(type, exchange, value) -> {
Object answer = org.apache.camel.component.http.HttpConverters.toTimeout((java.lang.String) value);
if (false && answer == null) {
answer = Void.class;
}
return answer;
});
addTypeConverter(registry, org.apache.hc.core5.util.Timeout.class, long.class, false,
(type, exchange, value) -> {
Object answer = org.apache.camel.component.http.HttpConverters.toTimeout((long) value);
if (false && answer == null) {
answer = Void.class;
}
return answer;
});
}
private static void addTypeConverter(TypeConverterRegistry registry, Class<?> toType, Class<?> fromType, boolean allowNull, SimpleTypeConverter.ConversionMethod method) {
registry.addTypeConverter(toType, fromType, new SimpleTypeConverter(allowNull, method));
}
}
| HttpConvertersLoader |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/SedaEndpointBuilderFactory.java | {
"start": 1468,
"end": 1597
} | interface ____ {
/**
* Builder for endpoint consumers for the SEDA component.
*/
public | SedaEndpointBuilderFactory |
java | elastic__elasticsearch | x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java | {
"start": 144961,
"end": 147916
} | class ____ extends ParserRuleContext {
public ValueExpressionContext valueExpression() {
return getRuleContext(ValueExpressionContext.class, 0);
}
public IdentifierContext identifier() {
return getRuleContext(IdentifierContext.class, 0);
}
public TerminalNode AS() {
return getToken(SqlBaseParser.AS, 0);
}
public NamedValueExpressionContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override
public int getRuleIndex() {
return RULE_namedValueExpression;
}
@Override
public void enterRule(ParseTreeListener listener) {
if (listener instanceof SqlBaseListener) ((SqlBaseListener) listener).enterNamedValueExpression(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if (listener instanceof SqlBaseListener) ((SqlBaseListener) listener).exitNamedValueExpression(this);
}
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if (visitor instanceof SqlBaseVisitor) return ((SqlBaseVisitor<? extends T>) visitor).visitNamedValueExpression(this);
else return visitor.visitChildren(this);
}
}
public final NamedValueExpressionContext namedValueExpression() throws RecognitionException {
NamedValueExpressionContext _localctx = new NamedValueExpressionContext(_ctx, getState());
enterRule(_localctx, 50, RULE_namedValueExpression);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(501);
valueExpression(0);
setState(506);
_errHandler.sync(this);
_la = _input.LA(1);
if ((((_la) & ~0x3f) == 0 && ((1L << _la) & -6012133270006397760L) != 0)
|| ((((_la - 70)) & ~0x3f) == 0 && ((1L << (_la - 70)) & -5764607520692920591L) != 0)
|| _la == BACKQUOTED_IDENTIFIER) {
{
setState(503);
_errHandler.sync(this);
_la = _input.LA(1);
if (_la == AS) {
{
setState(502);
match(AS);
}
}
setState(505);
identifier();
}
}
}
} catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
} finally {
exitRule();
}
return _localctx;
}
@SuppressWarnings("CheckReturnValue")
public static | NamedValueExpressionContext |
java | apache__flink | flink-core/src/main/java/org/apache/flink/util/AbstractAutoCloseableRegistry.java | {
"start": 1176,
"end": 1452
} | class ____ registries that allow to register instances of {@link
* Closeable}, which are all closed if this registry is closed.
*
* <p>Registering to an already closed registry will throw an exception and close the provided
* {@link Closeable}
*
* <p>All methods in this | for |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/collections/nature/elemental/ElementalBagTest.java | {
"start": 2079,
"end": 2581
} | class ____ {
@Id
private Integer id;
@ElementCollection
private Collection<String> phones = new ArrayList<>();
//Getters and setters are omitted for brevity
//end::ex-collection-elemental-model[]
private Person() {
// used by Hibernate
}
public Person(Integer id) {
this.id = id;
this.phones = new ArrayList<>();
}
public Collection<String> getPhones() {
return phones;
}
//tag::ex-collection-elemental-model[]
}
//end::ex-collection-elemental-model[]
}
| Person |
java | quarkusio__quarkus | extensions/amazon-lambda/common-runtime/src/main/java/io/quarkus/amazon/lambda/runtime/JacksonInputReader.java | {
"start": 160,
"end": 470
} | class ____ implements LambdaInputReader {
final private ObjectReader reader;
public JacksonInputReader(ObjectReader reader) {
this.reader = reader;
}
@Override
public Object readValue(InputStream is) throws IOException {
return reader.readValue(is);
}
}
| JacksonInputReader |
java | apache__flink | flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/catalog/UnknownCatalogTest.java | {
"start": 12123,
"end": 12389
} | class ____ extends GenericInMemoryCatalog {
public NullDefaultDatabaseCatalog(String name) {
super(name);
}
@Override
public String getDefaultDatabase() {
return null;
}
}
}
| NullDefaultDatabaseCatalog |
java | elastic__elasticsearch | x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java | {
"start": 52168,
"end": 54621
} | class ____ {
public static InferenceServiceConfiguration get() {
return configuration.getOrCompute();
}
private static final LazyInitializable<InferenceServiceConfiguration, RuntimeException> configuration = new LazyInitializable<>(
() -> {
var configurationMap = new HashMap<String, SettingsConfiguration>();
configurationMap.put(
MODEL_ID,
new SettingsConfiguration.Builder(supportedTaskTypes).setDescription(
"The name of the model to use for the inference task."
)
.setLabel("Model ID")
.setRequired(true)
.setSensitive(false)
.setUpdatable(false)
.setType(SettingsConfigurationFieldType.STRING)
.build()
);
configurationMap.put(
NUM_ALLOCATIONS,
new SettingsConfiguration.Builder(supportedTaskTypes).setDefaultValue(1)
.setDescription("The total number of allocations this model is assigned across machine learning nodes.")
.setLabel("Number Allocations")
.setRequired(true)
.setSensitive(false)
.setUpdatable(true)
.setType(SettingsConfigurationFieldType.INTEGER)
.build()
);
configurationMap.put(
NUM_THREADS,
new SettingsConfiguration.Builder(supportedTaskTypes).setDefaultValue(2)
.setDescription("Sets the number of threads used by each model allocation during inference.")
.setLabel("Number Threads")
.setRequired(true)
.setSensitive(false)
.setUpdatable(false)
.setType(SettingsConfigurationFieldType.INTEGER)
.build()
);
return new InferenceServiceConfiguration.Builder().setService(NAME)
.setName(SERVICE_NAME)
.setTaskTypes(supportedTaskTypes)
.setConfigurations(configurationMap)
.build();
}
);
}
}
| Configuration |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/oracle/select/OracleSelectTest121.java | {
"start": 932,
"end": 6588
} | class ____ extends MysqlTest {
public void test_0() throws Exception {
String sql = "SELECT DATE_TYPE , NVL ( TIME_0 , ? ) TIME_0 , NVL ( TIME_1 , ? ) TIME_1 , NVL ( TIME_2 , ? ) TIME_2 , NVL ( TIME_3 , ? ) TIME_3 , NVL ( TIME_4 , ? ) TIME_4 , NVL ( TIME_5 , ? ) TIME_5 , NVL ( TIME_6 , ? ) TIME_6 , NVL ( TIME_7 , ? ) TIME_7 , NVL ( TIME_8 , ? ) TIME_8 , NVL ( TIME_9 , ? ) TIME_9 , NVL ( TIME_10 , ? ) TIME_10 , NVL ( TIME_11 , ? ) TIME_11 , NVL ( TIME_12 , ? ) TIME_12 , NVL ( TIME_13 , ? ) TIME_13 , NVL ( TIME_14 , ? ) TIME_14 , NVL ( TIME_15 , ? ) TIME_15 , NVL ( TIME_16 , ? ) TIME_16 , NVL ( TIME_17 , ? ) TIME_17 , NVL ( TIME_18 , ? ) TIME_18 , NVL ( TIME_19 , ? ) TIME_19 , NVL ( TIME_20 , ? ) TIME_20 , NVL ( TIME_21 , ? ) TIME_21 , NVL ( TIME_22 , ? ) TIME_22 , NVL ( TIME_23 , ? ) TIME_23 , NVL ( TIME_ALL , ? ) TIME_ALL \n" +
"FROM ( \n" +
" SELECT DATE_TYPE , NVL ( HOUR , ? ) HOUR , SUM ( CNT ) CNT \n" +
" FROM ( \n" +
" SELECT ? DATE_TYPE , TO_NUMBER ( TO_CHAR ( ACCIDENT_TIME , ? ) ) HOUR , COUNT ( * ) CNT \n" +
" FROM PROD_AVAMS.ACCIDENT_INFO \n" +
" WHERE ACCIDENT_TIME BETWEEN TO_DATE ( :1 , ? ) \n" +
" AND TO_DATE ( :2 , ? ) \n" +
" GROUP BY TO_NUMBER ( TO_CHAR ( ACCIDENT_TIME , ? ) ) \n" +
" UNION ALL \n" +
" SELECT ? DATE_TYPE , TO_NUMBER ( TO_CHAR ( ACCIDENT_TIME , ? ) ) HOUR , COUNT ( * ) CNT \n" +
" FROM ACCIDENT_INFO \n" +
" WHERE ( ACCIDENT_TIME BETWEEN TO_DATE ( :3 , ? ) \n" +
" AND TO_DATE ( :4 , ? ) ) \n" +
" GROUP BY TO_NUMBER ( TO_CHAR ( ACCIDENT_TIME , ? ) ) \n" +
" ) T1 \n" +
" GROUP BY GROUPING SETS ( ( DATE_TYPE , HOUR ) , DATE_TYPE ) \n" +
") T2 \n" +
"PIVOT ( MAX ( CNT ) FOR HOUR IN ( ? \"TIME_0\" , ? \"TIME_1\" , ? \"TIME_2\" , ? \"TIME_3\" , ? \"TIME_4\" , ? \"TIME_5\" , ? \"TIME_6\" , ? \"TIME_7\" , ? \"TIME_8\" , ? \"TIME_9\" , ? \"TIME_10\" , ? \"TIME_11\" , ? \"TIME_12\" , ? \"TIME_13\" , ? \"TIME_14\" , ? \"TIME_15\" , ? \"TIME_16\" , ? \"TIME_17\" , ? \"TIME_18\" , ? \"TIME_19\" , ? \"TIME_20\" , ? \"TIME_21\" , ? \"TIME_22\" , ? \"TIME_23\" , ? \"TIME_ALL\" ) \n" +
") \n" +
"ORDER BY DATE_TYPE";
List<SQLStatement> statementList = SQLUtils.parseStatements(sql, JdbcConstants.ORACLE);
assertEquals(1, statementList.size());
SQLSelectStatement stmt = (SQLSelectStatement) statementList.get(0);
assertEquals("SELECT DATE_TYPE, NVL(TIME_0, ?) AS TIME_0\n" +
"\t, NVL(TIME_1, ?) AS TIME_1\n" +
"\t, NVL(TIME_2, ?) AS TIME_2\n" +
"\t, NVL(TIME_3, ?) AS TIME_3\n" +
"\t, NVL(TIME_4, ?) AS TIME_4\n" +
"\t, NVL(TIME_5, ?) AS TIME_5\n" +
"\t, NVL(TIME_6, ?) AS TIME_6\n" +
"\t, NVL(TIME_7, ?) AS TIME_7\n" +
"\t, NVL(TIME_8, ?) AS TIME_8\n" +
"\t, NVL(TIME_9, ?) AS TIME_9\n" +
"\t, NVL(TIME_10, ?) AS TIME_10\n" +
"\t, NVL(TIME_11, ?) AS TIME_11\n" +
"\t, NVL(TIME_12, ?) AS TIME_12\n" +
"\t, NVL(TIME_13, ?) AS TIME_13\n" +
"\t, NVL(TIME_14, ?) AS TIME_14\n" +
"\t, NVL(TIME_15, ?) AS TIME_15\n" +
"\t, NVL(TIME_16, ?) AS TIME_16\n" +
"\t, NVL(TIME_17, ?) AS TIME_17\n" +
"\t, NVL(TIME_18, ?) AS TIME_18\n" +
"\t, NVL(TIME_19, ?) AS TIME_19\n" +
"\t, NVL(TIME_20, ?) AS TIME_20\n" +
"\t, NVL(TIME_21, ?) AS TIME_21\n" +
"\t, NVL(TIME_22, ?) AS TIME_22\n" +
"\t, NVL(TIME_23, ?) AS TIME_23\n" +
"\t, NVL(TIME_ALL, ?) AS TIME_ALL\n" +
"FROM (\n" +
"\tSELECT DATE_TYPE, NVL(HOUR, ?) AS HOUR\n" +
"\t\t, SUM(CNT) AS CNT\n" +
"\tFROM (\n" +
"\t\tSELECT ? AS DATE_TYPE, TO_NUMBER(TO_CHAR(ACCIDENT_TIME, ?)) AS HOUR\n" +
"\t\t\t, COUNT(*) AS CNT\n" +
"\t\tFROM PROD_AVAMS.ACCIDENT_INFO\n" +
"\t\tWHERE ACCIDENT_TIME BETWEEN TO_DATE(:1, ?) AND TO_DATE(:2, ?)\n" +
"\t\tGROUP BY TO_NUMBER(TO_CHAR(ACCIDENT_TIME, ?))\n" +
"\t\tUNION ALL\n" +
"\t\tSELECT ? AS DATE_TYPE, TO_NUMBER(TO_CHAR(ACCIDENT_TIME, ?)) AS HOUR\n" +
"\t\t\t, COUNT(*) AS CNT\n" +
"\t\tFROM ACCIDENT_INFO\n" +
"\t\tWHERE (ACCIDENT_TIME BETWEEN TO_DATE(:3, ?) AND TO_DATE(:4, ?))\n" +
"\t\tGROUP BY TO_NUMBER(TO_CHAR(ACCIDENT_TIME, ?))\n" +
"\t) T1\n" +
"\tGROUP BY GROUPING SETS ((DATE_TYPE, HOUR), DATE_TYPE)\n" +
")\n" +
"PIVOT (MAX(CNT) FOR HOUR IN (? AS \"TIME_0\", ? AS \"TIME_1\", ? AS \"TIME_2\", ? AS \"TIME_3\", ? AS \"TIME_4\", ? AS \"TIME_5\", ? AS \"TIME_6\", ? AS \"TIME_7\", ? AS \"TIME_8\", ? AS \"TIME_9\", ? AS \"TIME_10\", ? AS \"TIME_11\", ? AS \"TIME_12\", ? AS \"TIME_13\", ? AS \"TIME_14\", ? AS \"TIME_15\", ? AS \"TIME_16\", ? AS \"TIME_17\", ? AS \"TIME_18\", ? AS \"TIME_19\", ? AS \"TIME_20\", ? AS \"TIME_21\", ? AS \"TIME_22\", ? AS \"TIME_23\", ? AS \"TIME_ALL\")) T2\n" +
"ORDER BY DATE_TYPE", stmt.toString());
}
}
| OracleSelectTest121 |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/util/ConcurrentLruCache.java | {
"start": 8617,
"end": 9391
} | enum ____ {
/*
* No drain operation currently running.
*/
IDLE {
@Override
boolean shouldDrainBuffers(boolean delayable) {
return !delayable;
}
},
/*
* A drain operation is required due to a pending write modification.
*/
REQUIRED {
@Override
boolean shouldDrainBuffers(boolean delayable) {
return true;
}
},
/*
* A drain operation is in progress.
*/
PROCESSING {
@Override
boolean shouldDrainBuffers(boolean delayable) {
return false;
}
};
/**
* Determine whether the buffers should be drained.
* @param delayable if a drain should be delayed until required
* @return if a drain should be attempted
*/
abstract boolean shouldDrainBuffers(boolean delayable);
}
private | DrainStatus |
java | apache__dubbo | dubbo-common/src/test/java/org/apache/dubbo/common/utils/CIDRUtilsTest.java | {
"start": 958,
"end": 2343
} | class ____ {
@Test
void testIpv4() throws UnknownHostException {
CIDRUtils cidrUtils = new CIDRUtils("192.168.1.0/26");
Assertions.assertTrue(cidrUtils.isInRange("192.168.1.63"));
Assertions.assertFalse(cidrUtils.isInRange("192.168.1.65"));
cidrUtils = new CIDRUtils("192.168.1.192/26");
Assertions.assertTrue(cidrUtils.isInRange("192.168.1.199"));
Assertions.assertFalse(cidrUtils.isInRange("192.168.1.190"));
}
@Test
void testIpv6() throws UnknownHostException {
CIDRUtils cidrUtils = new CIDRUtils("234e:0:4567::3d/64");
Assertions.assertTrue(cidrUtils.isInRange("234e:0:4567::3e"));
Assertions.assertTrue(cidrUtils.isInRange("234e:0:4567::ffff:3e"));
Assertions.assertFalse(cidrUtils.isInRange("234e:1:4567::3d"));
Assertions.assertFalse(cidrUtils.isInRange("234e:0:4567:1::3d"));
cidrUtils = new CIDRUtils("3FFE:FFFF:0:CC00::/54");
Assertions.assertTrue(cidrUtils.isInRange("3FFE:FFFF:0:CC00::dd"));
Assertions.assertTrue(cidrUtils.isInRange("3FFE:FFFF:0:CC00:0000:eeee:0909:dd"));
Assertions.assertTrue(cidrUtils.isInRange("3FFE:FFFF:0:CC0F:0000:eeee:0909:dd"));
Assertions.assertFalse(cidrUtils.isInRange("3EFE:FFFE:0:C107::dd"));
Assertions.assertFalse(cidrUtils.isInRange("1FFE:FFFE:0:CC00::dd"));
}
}
| CIDRUtilsTest |
java | elastic__elasticsearch | x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java | {
"start": 14633,
"end": 14889
} | class ____ extends AbstractShapeGeometryFieldMapper.AbstractShapeGeometryFieldType.BoundsBlockLoader {
GeoBoundsBlockLoader(String fieldName) {
super(fieldName);
}
}
}
public static | GeoBoundsBlockLoader |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/filecache/FileCache.java | {
"start": 12663,
"end": 13946
} | class ____ implements Runnable {
private final JobID jobID;
DeleteProcess(JobID jobID) {
this.jobID = jobID;
}
@Override
public void run() {
try {
synchronized (lock) {
Set<ExecutionAttemptID> jobRefs = jobRefHolders.get(jobID);
if (jobRefs != null && jobRefs.isEmpty()) {
// abort the copy
for (Future<Path> fileFuture : entries.get(jobID).values()) {
fileFuture.cancel(true);
}
// remove job specific entries in maps
entries.remove(jobID);
jobRefHolders.remove(jobID);
// remove the job wide temp directories
for (File storageDirectory : storageDirectories) {
File tempDir = new File(storageDirectory, jobID.toString());
FileUtils.deleteDirectory(tempDir);
}
}
}
} catch (IOException e) {
LOG.error("Could not delete file from local file cache.", e);
}
}
}
}
| DeleteProcess |
java | alibaba__nacos | auth/src/test/java/com/alibaba/nacos/auth/mock/MockAuthPluginService.java | {
"start": 1064,
"end": 1972
} | class ____ implements AuthPluginService {
public static final String TEST_PLUGIN = "test";
public static final String IDENTITY_TEST_KEY = "identity-test-key";
@Override
public Collection<String> identityNames() {
return Collections.singletonList(IDENTITY_TEST_KEY);
}
@Override
public boolean enableAuth(ActionTypes action, String type) {
return true;
}
@Override
public String getAuthServiceName() {
return TEST_PLUGIN;
}
@Override
public AuthResult validateAuthority(IdentityContext identityContext, Permission permission) {
return AuthResult.failureResult(401, "mock auth failed");
}
@Override
public AuthResult validateIdentity(IdentityContext identityContext, Resource resource) {
return AuthResult.failureResult(403, "mock auth failed");
}
}
| MockAuthPluginService |
java | spring-cloud__spring-cloud-gateway | spring-cloud-gateway-server-webflux/src/main/java/org/springframework/cloud/gateway/filter/factory/AddResponseHeaderGatewayFilterFactory.java | {
"start": 3631,
"end": 4094
} | class ____ extends AbstractNameValueGatewayFilterFactory.NameValueConfig {
private boolean override = true;
public boolean isOverride() {
return override;
}
public Config setOverride(boolean override) {
this.override = override;
return this;
}
@Override
public String toString() {
return new ToStringCreator(this).append(NAME_KEY, name)
.append(VALUE_KEY, value)
.append(OVERRIDE_KEY, override)
.toString();
}
}
}
| Config |
java | apache__maven | compat/maven-model/src/test/java/org/apache/maven/model/pom/PomMemoryAnalyzer.java | {
"start": 1288,
"end": 2480
} | class ____ analyzes Maven POM files to identify memory usage patterns and potential memory optimizations.
* This analyzer focuses on identifying duplicate strings and their memory impact across different paths in the POM structure.
*
* <p>The analyzer processes POM files recursively, tracking string occurrences and their locations within the POM structure.
* It can identify areas where string deduplication could provide significant memory savings.</p>
*
* <p>Usage example:</p>
* <pre>
* PomMemoryAnalyzer analyzer = new PomMemoryAnalyzer();
* Model model = reader.read(Files.newInputStream(pomPath));
* analyzer.analyzePom(model);
* analyzer.printAnalysis();
* </pre>
*
* <p>The analysis output includes:</p>
* <ul>
* <li>Total memory usage per POM path</li>
* <li>Potential memory savings through string deduplication</li>
* <li>Most frequent string values and their occurrence counts</li>
* <li>Statistics grouped by POM element types</li>
* </ul>
*
* <p>This tool is particularly useful for identifying memory optimization opportunities
* in large Maven multi-module projects where POM files may contain significant
* duplicate content.</p>
*/
public | that |
java | spring-cloud__spring-cloud-gateway | spring-cloud-gateway-server-webflux/src/main/java/org/springframework/cloud/gateway/support/BodyInserterContext.java | {
"start": 1052,
"end": 1745
} | class ____ implements BodyInserter.Context {
private final ExchangeStrategies exchangeStrategies;
public BodyInserterContext() {
this.exchangeStrategies = ExchangeStrategies.withDefaults();
}
public BodyInserterContext(ExchangeStrategies exchangeStrategies) {
this.exchangeStrategies = exchangeStrategies; // TODO: support custom strategies
}
@Override
public List<HttpMessageWriter<?>> messageWriters() {
return exchangeStrategies.messageWriters();
}
@Override
public Optional<ServerHttpRequest> serverRequest() {
return Optional.empty();
}
@Override
public Map<String, Object> hints() {
return Collections.emptyMap(); // TODO: support hints
}
}
| BodyInserterContext |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/aot/RuntimeHintsBeanFactoryInitializationAotProcessorTests.java | {
"start": 6671,
"end": 6907
} | class ____ implements RuntimeHintsRegistrar {
@Override
public void registerHints(RuntimeHints hints, ClassLoader classLoader) {
hints.resources().registerResourceBundle("sample");
}
}
public static | SampleRuntimeHintsRegistrar |
java | apache__camel | components/camel-caffeine/src/test/java/org/apache/camel/component/caffeine/cache/CaffeineCacheRemovalListenerProducerTest.java | {
"start": 1491,
"end": 9335
} | class ____ extends CaffeineCacheTestSupport {
// ****************************
// Clear
// ****************************
@Test
void testCacheClear() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMinimumMessageCount(1);
mock.expectedBodiesReceived((Object) null);
mock.expectedHeaderReceived(CaffeineConstants.ACTION_HAS_RESULT, false);
mock.expectedHeaderReceived(CaffeineConstants.ACTION_SUCCEEDED, true);
fluentTemplate().withHeader(CaffeineConstants.ACTION, CaffeineConstants.ACTION_CLEANUP).to("direct://start").send();
MockEndpoint.assertIsSatisfied(context);
}
// ****************************
// Put
// ****************************
@Test
void testCachePut() {
final String key = generateRandomString();
final String val = generateRandomString();
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMinimumMessageCount(1);
mock.expectedBodiesReceived(val);
mock.expectedHeaderReceived(CaffeineConstants.ACTION_HAS_RESULT, false);
mock.expectedHeaderReceived(CaffeineConstants.ACTION_SUCCEEDED, true);
fluentTemplate().withHeader(CaffeineConstants.ACTION, CaffeineConstants.ACTION_PUT)
.withHeader(CaffeineConstants.KEY, key).withBody(val).to("direct://start").send();
assertNotNull(getTestRemovalListenerCache().getIfPresent(key));
assertEquals(val, getTestRemovalListenerCache().getIfPresent(key));
}
@Test
void testCachePutAll() throws Exception {
final Map<String, String> map = generateRandomMapOfString(3);
final Set<String> keys = map.keySet().stream().limit(2).collect(Collectors.toSet());
fluentTemplate().withHeader(CaffeineConstants.ACTION, CaffeineConstants.ACTION_PUT_ALL).withBody(map)
.to("direct://start").send();
MockEndpoint mock1 = getMockEndpoint("mock:result");
mock1.expectedMinimumMessageCount(1);
mock1.expectedHeaderReceived(CaffeineConstants.ACTION_HAS_RESULT, false);
mock1.expectedHeaderReceived(CaffeineConstants.ACTION_SUCCEEDED, true);
final Map<String, String> elements = getTestRemovalListenerCache().getAllPresent(keys);
keys.forEach(k -> {
assertTrue(elements.containsKey(k));
assertEquals(map.get(k), elements.get(k));
});
MockEndpoint.assertIsSatisfied(context);
}
// ****************************
// Get
// ****************************
@Test
void testCacheGet() throws Exception {
final Cache<Object, Object> cache = getTestRemovalListenerCache();
final String key = generateRandomString();
final String val = generateRandomString();
cache.put(key, val);
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMinimumMessageCount(1);
mock.expectedBodiesReceived(val);
mock.expectedHeaderReceived(CaffeineConstants.ACTION_HAS_RESULT, true);
mock.expectedHeaderReceived(CaffeineConstants.ACTION_SUCCEEDED, true);
fluentTemplate().withHeader(CaffeineConstants.ACTION, CaffeineConstants.ACTION_GET)
.withHeader(CaffeineConstants.KEY, key).withBody(val).to("direct://start").send();
MockEndpoint.assertIsSatisfied(context);
}
@Test
void testCacheGetAll() throws Exception {
final Cache<Object, Object> cache = getTestRemovalListenerCache();
final Map<String, String> map = generateRandomMapOfString(3);
final Set<String> keys = map.keySet().stream().limit(2).collect(Collectors.toSet());
cache.putAll(map);
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMinimumMessageCount(1);
mock.expectedHeaderReceived(CaffeineConstants.ACTION_HAS_RESULT, true);
mock.expectedHeaderReceived(CaffeineConstants.ACTION_SUCCEEDED, true);
fluentTemplate().withHeader(CaffeineConstants.ACTION, CaffeineConstants.ACTION_GET_ALL)
.withHeader(CaffeineConstants.KEYS, keys).to("direct://start").send();
MockEndpoint.assertIsSatisfied(context);
final Map<String, String> elements = mock.getExchanges().get(0).getIn().getBody(Map.class);
keys.forEach(k -> {
assertTrue(elements.containsKey(k));
assertEquals(map.get(k), elements.get(k));
});
}
//
// ****************************
// INVALIDATE
// ****************************
@Test
void testCacheInvalidate() throws Exception {
final Cache<Object, Object> cache = getTestRemovalListenerCache();
final String key = generateRandomString();
final String val = generateRandomString();
cache.put(key, val);
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMinimumMessageCount(1);
mock.expectedHeaderReceived(CaffeineConstants.ACTION_HAS_RESULT, false);
mock.expectedHeaderReceived(CaffeineConstants.ACTION_SUCCEEDED, true);
fluentTemplate().withHeader(CaffeineConstants.ACTION, CaffeineConstants.ACTION_INVALIDATE)
.withHeader(CaffeineConstants.KEY, key).to("direct://start").send();
MockEndpoint.assertIsSatisfied(context);
assertNull(cache.getIfPresent(key));
}
@Test
void testCacheInvalidateAll() throws Exception {
final Cache<Object, Object> cache = getTestRemovalListenerCache();
final Map<String, String> map = generateRandomMapOfString(3);
final Set<String> keys = map.keySet().stream().limit(2).collect(Collectors.toSet());
cache.putAll(map);
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMinimumMessageCount(1);
mock.expectedHeaderReceived(CaffeineConstants.ACTION_HAS_RESULT, false);
mock.expectedHeaderReceived(CaffeineConstants.ACTION_SUCCEEDED, true);
fluentTemplate().withHeader(CaffeineConstants.ACTION, CaffeineConstants.ACTION_INVALIDATE_ALL)
.withHeader(CaffeineConstants.KEYS, keys).to("direct://start").send();
MockEndpoint.assertIsSatisfied(context);
final Map<String, String> elements = getTestRemovalListenerCache().getAllPresent(keys);
keys.forEach(k -> {
assertFalse(elements.containsKey(k));
});
}
@Test
void testStats() {
final Map<String, String> map = generateRandomMapOfString(3);
final Set<String> keys = map.keySet().stream().limit(2).collect(Collectors.toSet());
fluentTemplate().withHeader(CaffeineConstants.ACTION, CaffeineConstants.ACTION_PUT_ALL).withBody(map)
.to("direct://start").send();
MockEndpoint mock1 = getMockEndpoint("mock:result");
mock1.expectedMinimumMessageCount(1);
mock1.expectedHeaderReceived(CaffeineConstants.ACTION_HAS_RESULT, false);
mock1.expectedHeaderReceived(CaffeineConstants.ACTION_SUCCEEDED, true);
final Map<String, String> elements = getTestRemovalListenerCache().getAllPresent(keys);
keys.forEach(k -> {
assertTrue(elements.containsKey(k));
assertEquals(map.get(k), elements.get(k));
});
}
// ****************************
// Route
// ****************************
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("direct://start")
.to("caffeine-cache://cacheRl")
.to("log:org.apache.camel.component.caffeine?level=INFO&showAll=true&multiline=true")
.to("mock:result");
}
};
}
}
| CaffeineCacheRemovalListenerProducerTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/collection/delayedOperation/BagDelayedOperationNoCascadeTest.java | {
"start": 1559,
"end": 5712
} | class ____ {
private Long parentId;
@BeforeEach
public void setup(SessionFactoryScope scope) {
// start by cleaning up in case a test fails
if ( parentId != null ) {
cleanup( scope );
}
Parent parent = new Parent();
Child child1 = new Child( "Sherman" );
Child child2 = new Child( "Yogi" );
parent.addChild( child1 );
parent.addChild( child2 );
scope.inTransaction(
session -> {
session.persist( child1 );
session.persist( child2 );
session.persist( parent );
}
);
parentId = parent.getId();
}
@AfterEach
public void cleanup(SessionFactoryScope scope) {
scope.getSessionFactory().getSchemaManager().truncate();
parentId = null;
}
@Test
@JiraKey(value = "HHH-5855")
public void testSimpleAddManaged(SessionFactoryScope scope) {
// Add 2 Child entities
Child c1 = new Child( "Darwin" );
Child c2 = new Child( "Comet" );
scope.inTransaction(
session -> {
session.persist( c1 );
session.persist( c2 );
}
);
// Add a managed Child and commit
scope.inTransaction(
session -> {
Parent p = session.get( Parent.class, parentId );
assertFalse( Hibernate.isInitialized( p.getChildren() ) );
// get the first Child so it is managed; add to collection
p.addChild( session.get( Child.class, c1.getId() ) );
// collection should still be uninitialized
assertFalse( Hibernate.isInitialized( p.getChildren() ) );
}
);
scope.inTransaction(
session -> {
Parent p = session.get( Parent.class, parentId );
assertFalse( Hibernate.isInitialized( p.getChildren() ) );
assertEquals( 3, p.getChildren().size() );
}
);
// Add the other managed Child, merge and commit.
scope.inTransaction(
session -> {
Parent p = session.get( Parent.class, parentId );
assertFalse( Hibernate.isInitialized( p.getChildren() ) );
// get the second Child so it is managed; add to collection
p.addChild( session.get( Child.class, c2.getId() ) );
// collection should still be uninitialized
assertFalse( Hibernate.isInitialized( p.getChildren() ) );
session.merge( p );
}
);
scope.inTransaction(
session -> {
Parent p = session.get( Parent.class, parentId );
assertFalse( Hibernate.isInitialized( p.getChildren() ) );
assertEquals( 4, p.getChildren().size() );
}
);
}
@Test
@JiraKey(value = "HHH-11209")
public void testMergeInitializedBagAndRemerge(SessionFactoryScope scope) {
Parent parent = scope.fromTransaction(
session -> {
Parent p = session.get( Parent.class, parentId );
assertFalse( Hibernate.isInitialized( p.getChildren() ) );
// initialize
Hibernate.initialize( p.getChildren() );
assertTrue( Hibernate.isInitialized( p.getChildren() ) );
return p;
}
);
Parent modifiedParent = scope.fromTransaction(
session -> {
Parent p = (Parent) session.merge( parent );
Child c = new Child( "Zeke" );
c.setParent( p );
session.persist( c );
assertFalse( Hibernate.isInitialized( p.getChildren() ) );
p.getChildren().size();
p.getChildren().add( c );
return p;
}
);
// Merge detached Parent with initialized children
Parent p = scope.fromTransaction(
session -> {
Parent mergedParent = (Parent) session.merge( modifiedParent );
// after merging, p#children will be uninitialized
assertFalse( Hibernate.isInitialized( mergedParent.getChildren() ) );
assertTrue( ( (AbstractPersistentCollection) mergedParent.getChildren() ).hasQueuedOperations() );
return mergedParent;
}
);
assertFalse( ( (AbstractPersistentCollection) p.getChildren() ).hasQueuedOperations() );
// Merge detached Parent, now with uninitialized children no queued operations
scope.inTransaction(
session -> {
Parent mergedParent = (Parent) session.merge( p );
assertFalse( Hibernate.isInitialized( mergedParent.getChildren() ) );
assertFalse( ( (AbstractPersistentCollection) mergedParent.getChildren() ).hasQueuedOperations() );
}
);
}
@Entity(name = "Parent")
public static | BagDelayedOperationNoCascadeTest |
java | redisson__redisson | redisson/src/main/java/org/redisson/api/RTimeSeriesAsync.java | {
"start": 914,
"end": 13935
} | interface ____<V, L> extends RExpirableAsync {
/**
* Adds element to this time-series collection
* by specified <code>timestamp</code>.
*
* @param timestamp object timestamp
* @param object object itself
* @return void
*/
RFuture<Void> addAsync(long timestamp, V object);
/**
* Adds element with <code>label</code> to this time-series collection
* by specified <code>timestamp</code>.
*
* @param timestamp object timestamp
* @param object object itself
* @param label object label
*/
RFuture<Void> addAsync(long timestamp, V object, L label);
/**
* Adds all elements contained in the specified map to this time-series collection.
* Map contains of timestamp mapped by object.
*
* @param objects - map of elements to add
* @return void
*/
RFuture<Void> addAllAsync(Map<Long, V> objects);
/**
* Adds all entries collection to this time-series collection.
*
* @param entries collection of time series entries
* @return void
*/
RFuture<Void> addAllAsync(Collection<TimeSeriesEntry<V, L>> entries);
/**
* Use {@link #addAsync(long, Object, Duration)} instead
*
* @param timestamp - object timestamp
* @param object - object itself
* @param timeToLive - time to live interval
* @param timeUnit - unit of time to live interval
* @return void
*/
@Deprecated
RFuture<Void> addAsync(long timestamp, V object, long timeToLive, TimeUnit timeUnit);
/**
* Adds element to this time-series collection
* by specified <code>timestamp</code>.
*
* @param timestamp object timestamp
* @param object object itself
* @param timeToLive time to live interval
*/
RFuture<Void> addAsync(long timestamp, V object, Duration timeToLive);
/**
* Adds element with <code>label</code> to this time-series collection
* by specified <code>timestamp</code>.
*
* @param timestamp object timestamp
* @param object object itself
* @param label object label
* @param timeToLive time to live interval
* @return void
*/
RFuture<Void> addAsync(long timestamp, V object, L label, Duration timeToLive);
/**
* Use {@link #addAllAsync(Map, Duration)} instead
*
* @param objects - map of elements to add
* @param timeToLive - time to live interval
* @param timeUnit - unit of time to live interval
* @return void
*/
@Deprecated
RFuture<Void> addAllAsync(Map<Long, V> objects, long timeToLive, TimeUnit timeUnit);
/**
* Adds all elements contained in the specified map to this time-series collection.
* Map contains of timestamp mapped by object.
*
* @param objects map of elements to add
* @param timeToLive time to live interval
*/
RFuture<Void> addAllAsync(Map<Long, V> objects, Duration timeToLive);
/**
* Adds all time series entries collection to this time-series collection.
* Specified time to live interval applied to all entries defined in collection.
*
* @param entries collection of time series entries
* @param timeToLive time to live interval
* @return void
*/
RFuture<Void> addAllAsync(Collection<TimeSeriesEntry<V, L>> entries, Duration timeToLive);
/**
* Returns size of this set.
*
* @return size
*/
RFuture<Integer> sizeAsync();
/**
* Returns object by specified <code>timestamp</code> or <code>null</code> if it doesn't exist.
*
* @param timestamp - object timestamp
* @return object
*/
RFuture<V> getAsync(long timestamp);
/**
* Returns time series entry by specified <code>timestamp</code> or <code>null</code> if it doesn't exist.
*
* @param timestamp object timestamp
* @return time series entry
*/
RFuture<TimeSeriesEntry<V, L>> getEntryAsync(long timestamp);
/**
* Removes object by specified <code>timestamp</code>.
*
* @param timestamp - object timestamp
* @return <code>true</code> if an element was removed as a result of this call
*/
RFuture<Boolean> removeAsync(long timestamp);
/**
* Removes and returns object by specified <code>timestamp</code>.
*
* @param timestamp - object timestamp
* @return object or <code>null</code> if it doesn't exist
*/
RFuture<V> getAndRemoveAsync(long timestamp);
/**
* Removes and returns entry by specified <code>timestamp</code>.
*
* @param timestamp - object timestamp
* @return entry or <code>null</code> if it doesn't exist
*/
RFuture<TimeSeriesEntry<V, L>> getAndRemoveEntryAsync(long timestamp);
/**
* Removes and returns the head elements
*
* @param count - elements amount
* @return collection of head elements
*/
RFuture<Collection<V>> pollFirstAsync(int count);
/**
* Removes and returns head entries
*
* @param count - entries amount
* @return collection of head entries
*/
RFuture<Collection<TimeSeriesEntry<V, L>>> pollFirstEntriesAsync(int count);
/**
* Removes and returns the tail elements or {@code null} if this time-series collection is empty.
*
* @param count - elements amount
* @return the tail element or {@code null} if this time-series collection is empty
*/
RFuture<Collection<V>> pollLastAsync(int count);
/**
* Removes and returns tail entries
*
* @param count - entries amount
* @return collection of tail entries
*/
RFuture<Collection<TimeSeriesEntry<V, L>>> pollLastEntriesAsync(int count);
/**
* Removes and returns the head element or {@code null} if this time-series collection is empty.
*
* @return the head element,
* or {@code null} if this time-series collection is empty
*/
RFuture<V> pollFirstAsync();
/**
* Removes and returns head entry or {@code null} if this time-series collection is empty.
*
* @return the head entry,
* or {@code null} if this time-series collection is empty
*/
RFuture<TimeSeriesEntry<V, L>> pollFirstEntryAsync();
/**
* Removes and returns the tail element or {@code null} if this time-series collection is empty.
*
* @return the tail element or {@code null} if this time-series collection is empty
*/
RFuture<V> pollLastAsync();
/**
* Removes and returns the tail entry or {@code null} if this time-series collection is empty.
*
* @return the tail entry or {@code null} if this time-series collection is empty
*/
RFuture<TimeSeriesEntry<V, L>> pollLastEntryAsync();
/**
* Returns the tail element or {@code null} if this time-series collection is empty.
*
* @return the tail element or {@code null} if this time-series collection is empty
*/
RFuture<V> lastAsync();
/**
* Returns the tail entry or {@code null} if this time-series collection is empty.
*
* @return the tail entry or {@code null} if this time-series collection is empty
*/
RFuture<TimeSeriesEntry<V, L>> lastEntryAsync();
/**
* Returns the head element or {@code null} if this time-series collection is empty.
*
* @return the head element or {@code null} if this time-series collection is empty
*/
RFuture<V> firstAsync();
/**
* Returns the head entry or {@code null} if this time-series collection is empty.
*
* @return the head entry or {@code null} if this time-series collection is empty
*/
RFuture<TimeSeriesEntry<V, L>> firstEntryAsync();
/**
* Returns timestamp of the head timestamp or {@code null} if this time-series collection is empty.
*
* @return timestamp or {@code null} if this time-series collection is empty
*/
RFuture<Long> firstTimestampAsync();
/**
* Returns timestamp of the tail element or {@code null} if this time-series collection is empty.
*
* @return timestamp or {@code null} if this time-series collection is empty
*/
RFuture<Long> lastTimestampAsync();
/**
* Returns the tail elements of this time-series collection.
*
* @param count - elements amount
* @return the tail elements
*/
RFuture<Collection<V>> lastAsync(int count);
/**
* Returns the tail entries of this time-series collection.
*
* @param count - entries amount
* @return the tail entries
*/
RFuture<Collection<TimeSeriesEntry<V, L>>> lastEntriesAsync(int count);
/**
* Returns the head elements of this time-series collection.
*
* @param count - elements amount
* @return the head elements
*/
RFuture<Collection<V>> firstAsync(int count);
/**
* Returns the head entries of this time-series collection.
*
* @param count - entries amount
* @return the head entries
*/
RFuture<Collection<TimeSeriesEntry<V, L>>> firstEntriesAsync(int count);
/**
* Removes values within timestamp range. Including boundary values.
*
* @param startTimestamp - start timestamp
* @param endTimestamp - end timestamp
* @return number of removed elements
*/
RFuture<Integer> removeRangeAsync(long startTimestamp, long endTimestamp);
/**
* Returns ordered elements of this time-series collection within timestamp range. Including boundary values.
*
* @param startTimestamp - start timestamp
* @param endTimestamp - end timestamp
* @return elements collection
*/
RFuture<Collection<V>> rangeAsync(long startTimestamp, long endTimestamp);
/**
* Returns ordered elements of this time-series collection within timestamp range. Including boundary values.
*
* @param startTimestamp start timestamp
* @param endTimestamp end timestamp
* @param limit result size limit
* @return elements collection
*/
RFuture<Collection<V>> rangeAsync(long startTimestamp, long endTimestamp, int limit);
/**
* Returns elements of this time-series collection in reverse order within timestamp range. Including boundary values.
*
* @param startTimestamp - start timestamp
* @param endTimestamp - end timestamp
* @return elements collection
*/
RFuture<Collection<V>> rangeReversedAsync(long startTimestamp, long endTimestamp);
/**
* Returns elements of this time-series collection in reverse order within timestamp range. Including boundary values.
*
* @param startTimestamp start timestamp
* @param endTimestamp end timestamp
* @param limit result size limit
* @return elements collection
*/
RFuture<Collection<V>> rangeReversedAsync(long startTimestamp, long endTimestamp, int limit);
/**
* Returns ordered entries of this time-series collection within timestamp range. Including boundary values.
*
* @param startTimestamp - start timestamp
* @param endTimestamp - end timestamp
* @return elements collection
*/
RFuture<Collection<TimeSeriesEntry<V, L>>> entryRangeAsync(long startTimestamp, long endTimestamp);
/**
* Returns ordered entries of this time-series collection within timestamp range. Including boundary values.
*
* @param startTimestamp start timestamp
* @param endTimestamp end timestamp
* @param limit result size limit
* @return elements collection
*/
RFuture<Collection<TimeSeriesEntry<V, L>>> entryRangeAsync(long startTimestamp, long endTimestamp, int limit);
/**
* Returns entries of this time-series collection in reverse order within timestamp range. Including boundary values.
*
* @param startTimestamp - start timestamp
* @param endTimestamp - end timestamp
* @return elements collection
*/
RFuture<Collection<TimeSeriesEntry<V, L>>> entryRangeReversedAsync(long startTimestamp, long endTimestamp);
/**
* Returns entries of this time-series collection in reverse order within timestamp range. Including boundary values.
*
* @param startTimestamp start timestamp
* @param endTimestamp end timestamp
* @param limit result size limit
* @return elements collection
*/
RFuture<Collection<TimeSeriesEntry<V, L>>> entryRangeReversedAsync(long startTimestamp, long endTimestamp, int limit);
/**
* Adds object event listener
*
* @see org.redisson.api.listener.TrackingListener
* @see org.redisson.api.listener.ScoredSortedSetAddListener
* @see org.redisson.api.listener.ScoredSortedSetRemoveListener
* @see org.redisson.api.ExpiredObjectListener
* @see org.redisson.api.DeletedObjectListener
*
* @param listener object event listener
* @return listener id
*/
@Override
RFuture<Integer> addListenerAsync(ObjectListener listener);
}
| RTimeSeriesAsync |
java | apache__flink | flink-formats/flink-csv/src/test/java/org/apache/flink/formats/csv/CsvRowDataSerDeSchemaTest.java | {
"start": 3232,
"end": 23358
} | class ____ {
@Test
void testSerializeDeserialize() throws Exception {
testNullableField(BIGINT(), "null", null);
testNullableField(STRING(), "null", null);
testNullableField(STRING(), "\"This is a test.\"", "This is a test.");
testNullableField(STRING(), "\"This is a test\n\r.\"", "This is a test\n\r.");
testNullableField(BOOLEAN(), "true", true);
testNullableField(BOOLEAN(), "null", null);
testNullableField(TINYINT(), "124", (byte) 124);
testNullableField(SMALLINT(), "10000", (short) 10000);
testNullableField(INT(), "1234567", 1234567);
testNullableField(BIGINT(), "12345678910", 12345678910L);
testNullableField(FLOAT(), "0.33333334", 0.33333334f);
testNullableField(DOUBLE(), "0.33333333332", 0.33333333332d);
testNullableField(
DECIMAL(38, 25),
"1234.0000000000000000000000001",
new BigDecimal("1234.0000000000000000000000001"));
testNullableField(
DECIMAL(38, 0),
"123400000000000000000000000001",
new BigDecimal("123400000000000000000000000001"));
testNullableField(DATE(), "2018-10-12", Date.valueOf("2018-10-12"));
testNullableField(TIME(0), "12:12:12", Time.valueOf("12:12:12"));
testNullableField(
TIMESTAMP(0),
"\"2018-10-12 12:12:12\"",
LocalDateTime.parse("2018-10-12T12:12:12"));
testNullableField(
TIMESTAMP(0),
"\"2018-10-12 12:12:12.123\"",
LocalDateTime.parse("2018-10-12T12:12:12.123"));
testNullableField(TIMESTAMP_LTZ(0), "\"1970-01-01 00:02:03Z\"", Instant.ofEpochSecond(123));
testNullableField(
TIMESTAMP_LTZ(0), "\"1970-01-01 00:02:03.456Z\"", Instant.ofEpochMilli(123456));
testNullableField(
ROW(FIELD("f0", STRING()), FIELD("f1", INT()), FIELD("f2", BOOLEAN())),
"Hello;42;false",
Row.of("Hello", 42, false));
testNullableField(ARRAY(STRING()), "a;b;c", new String[] {"a", "b", "c"});
testNullableField(ARRAY(TINYINT()), "12;4;null", new Byte[] {12, 4, null});
testNullableField(BYTES(), "awML", new byte[] {107, 3, 11});
testNullableField(TIME(3), "12:12:12.232", LocalTime.parse("12:12:12.232"));
testNullableField(TIME(2), "12:12:12.23", LocalTime.parse("12:12:12.23"));
testNullableField(TIME(1), "12:12:12.2", LocalTime.parse("12:12:12.2"));
testNullableField(TIME(0), "12:12:12", LocalTime.parse("12:12:12"));
}
@Test
void testSerializeDeserializeCustomizedProperties() throws Exception {
Consumer<CsvRowDataSerializationSchema.Builder> serConfig =
(serSchemaBuilder) ->
serSchemaBuilder
.setEscapeCharacter('*')
.setQuoteCharacter('\'')
.setArrayElementDelimiter(":")
.setFieldDelimiter(';');
Consumer<CsvRowDataDeserializationSchema.Builder> deserConfig =
(deserSchemaBuilder) ->
deserSchemaBuilder
.setEscapeCharacter('*')
.setQuoteCharacter('\'')
.setArrayElementDelimiter(":")
.setFieldDelimiter(';');
testFieldDeserialization(STRING(), "123*'4**", "123'4*", deserConfig, ";");
testField(STRING(), "'123''4**'", "'123''4**'", serConfig, deserConfig, ";");
testFieldDeserialization(STRING(), "'a;b*'c'", "a;b'c", deserConfig, ";");
testField(STRING(), "'a;b''c'", "a;b'c", serConfig, deserConfig, ";");
testFieldDeserialization(INT(), " 12 ", 12, deserConfig, ";");
testField(INT(), "12", 12, serConfig, deserConfig, ";");
testFieldDeserialization(
ROW(FIELD("f0", STRING()), FIELD("f1", STRING())),
"1:hello",
Row.of("1", "hello"),
deserConfig,
";");
testField(
ROW(FIELD("f0", STRING()), FIELD("f1", STRING())),
"'1:hello'",
Row.of("1", "hello"),
serConfig,
deserConfig,
";");
testField(
ROW(FIELD("f0", STRING()), FIELD("f1", STRING())),
"'1:hello world'",
Row.of("1", "hello world"),
serConfig,
deserConfig,
";");
testField(
STRING(),
"null",
"null",
serConfig,
deserConfig,
";"); // string because null literal has not been set
testFieldDeserialization(
TIME(3), "12:12:12.232", LocalTime.parse("12:12:12.232"), deserConfig, ";");
testFieldDeserialization(
TIME(3), "12:12:12.232342", LocalTime.parse("12:12:12.232"), deserConfig, ";");
testFieldDeserialization(
TIME(3), "12:12:12.23", LocalTime.parse("12:12:12.23"), deserConfig, ";");
testFieldDeserialization(
TIME(2), "12:12:12.23", LocalTime.parse("12:12:12.23"), deserConfig, ";");
testFieldDeserialization(
TIME(2), "12:12:12.232312", LocalTime.parse("12:12:12.23"), deserConfig, ";");
testFieldDeserialization(
TIME(2), "12:12:12.2", LocalTime.parse("12:12:12.2"), deserConfig, ";");
testFieldDeserialization(
TIME(1), "12:12:12.2", LocalTime.parse("12:12:12.2"), deserConfig, ";");
testFieldDeserialization(
TIME(1), "12:12:12.2235", LocalTime.parse("12:12:12.2"), deserConfig, ";");
testFieldDeserialization(
TIME(1), "12:12:12", LocalTime.parse("12:12:12"), deserConfig, ";");
testFieldDeserialization(
TIME(0), "12:12:12", LocalTime.parse("12:12:12"), deserConfig, ";");
testFieldDeserialization(
TIME(0), "12:12:12.45", LocalTime.parse("12:12:12"), deserConfig, ";");
int precision = 5;
assertThatThrownBy(
() ->
testFieldDeserialization(
TIME(precision),
"12:12:12.45",
LocalTime.parse("12:12:12"),
deserConfig,
";"))
.hasMessage(
"Csv does not support TIME type with precision: 5, it only supports precision 0 ~ 3.");
}
@Test
void testDeserializeParseError() {
assertThatThrownBy(() -> testDeserialization(false, false, "Test,null,Test"))
.isInstanceOf(IOException.class);
}
@Test
void testDeserializeUnsupportedNull() throws Exception {
// unsupported null for integer
assertThat(testDeserialization(true, false, "Test,null,Test"))
.isEqualTo(Row.of("Test", null, "Test"));
}
@Test
void testDeserializeNullRow() throws Exception {
// return null for null input
assertThat(testDeserialization(false, false, null)).isNull();
}
@Test
void testDeserializeIncompleteRow() throws Exception {
// last two columns are missing
assertThat(testDeserialization(true, false, "Test")).isEqualTo(Row.of("Test", null, null));
}
@Test
void testDeserializeMoreColumnsThanExpected() throws Exception {
// one additional string column
assertThat(testDeserialization(true, false, "Test,12,Test,Test")).isNull();
}
@Test
void testDeserializeIgnoreComment() throws Exception {
// # is part of the string
assertThat(testDeserialization(false, false, "#Test,12,Test"))
.isEqualTo(Row.of("#Test", 12, "Test"));
}
@Test
void testDeserializeAllowComment() throws Exception {
// entire row is ignored
assertThat(testDeserialization(true, true, "#Test,12,Test")).isNull();
}
@Test
void testSerializationProperties() throws Exception {
DataType dataType = ROW(FIELD("f0", STRING()), FIELD("f1", INT()), FIELD("f2", STRING()));
RowType rowType = (RowType) dataType.getLogicalType();
CsvRowDataSerializationSchema.Builder serSchemaBuilder =
new CsvRowDataSerializationSchema.Builder(rowType);
assertThat(serialize(serSchemaBuilder, rowData("Test", 12, "Hello")))
.isEqualTo("Test,12,Hello".getBytes());
serSchemaBuilder.setQuoteCharacter('#');
assertThat(serialize(serSchemaBuilder, rowData("Test", 12, "2019-12-26 12:12:12")))
.isEqualTo("Test,12,#2019-12-26 12:12:12#".getBytes());
serSchemaBuilder.disableQuoteCharacter();
assertThat(serialize(serSchemaBuilder, rowData("Test", 12, "2019-12-26 12:12:12")))
.isEqualTo("Test,12,2019-12-26 12:12:12".getBytes());
}
@Test
void testInvalidNesting() {
assertThatThrownBy(
() ->
testNullableField(
ROW(FIELD("f0", ROW(FIELD("f0", STRING())))),
"FAIL",
Row.of(Row.of("FAIL"))))
.isInstanceOf(IllegalArgumentException.class);
}
@Test
void testInvalidType() {
assertThatThrownBy(
() ->
testNullableField(
RAW(Void.class, VoidSerializer.INSTANCE),
"FAIL",
new java.util.Date()))
.isInstanceOf(IllegalArgumentException.class);
}
@Test
void testSerializeDeserializeNestedTypes() throws Exception {
DataType subDataType0 =
ROW(
FIELD("f0c0", STRING()),
FIELD("f0c1", INT()),
FIELD("f0c2", STRING()),
FIELD("f0c3", TIMESTAMP()),
FIELD("f0c4", TIMESTAMP_LTZ()));
DataType subDataType1 =
ROW(
FIELD("f1c0", STRING()),
FIELD("f1c1", INT()),
FIELD("f1c2", STRING()),
FIELD("f0c3", TIMESTAMP()),
FIELD("f0c4", TIMESTAMP_LTZ()));
DataType dataType = ROW(FIELD("f0", subDataType0), FIELD("f1", subDataType1));
RowType rowType = (RowType) dataType.getLogicalType();
// serialization
CsvRowDataSerializationSchema.Builder serSchemaBuilder =
new CsvRowDataSerializationSchema.Builder(rowType);
// deserialization
CsvRowDataDeserializationSchema.Builder deserSchemaBuilder =
new CsvRowDataDeserializationSchema.Builder(rowType, InternalTypeInfo.of(rowType));
RowData normalRow =
GenericRowData.of(
rowData(
"hello",
1,
"This is 1st top column",
LocalDateTime.parse("1970-01-01T01:02:03"),
Instant.ofEpochMilli(1000)),
rowData(
"world",
2,
"This is 2nd top column",
LocalDateTime.parse("1970-01-01T01:02:04"),
Instant.ofEpochMilli(2000)));
testSerDeConsistency(normalRow, serSchemaBuilder, deserSchemaBuilder);
RowData nullRow =
GenericRowData.of(
null,
rowData(
"world",
2,
"This is 2nd top column after null",
LocalDateTime.parse("1970-01-01T01:02:05"),
Instant.ofEpochMilli(3000)));
testSerDeConsistency(nullRow, serSchemaBuilder, deserSchemaBuilder);
}
@Test
void testDeserializationWithDisableQuoteCharacter() throws Exception {
Consumer<CsvRowDataDeserializationSchema.Builder> deserConfig =
(deserSchemaBuilder) ->
deserSchemaBuilder.disableQuoteCharacter().setFieldDelimiter(',');
testFieldDeserialization(STRING(), "\"abc", "\"abc", deserConfig, ",");
}
@Test
void testSerializationWithTypesMismatch() {
DataType dataType = ROW(FIELD("f0", STRING()), FIELD("f1", INT()), FIELD("f2", INT()));
RowType rowType = (RowType) dataType.getLogicalType();
CsvRowDataSerializationSchema.Builder serSchemaBuilder =
new CsvRowDataSerializationSchema.Builder(rowType);
RowData rowData = rowData("Test", 1, "Test");
String errorMessage = "Fail to serialize at field: f2.";
assertThatThrownBy(() -> serialize(serSchemaBuilder, rowData))
.satisfies(anyCauseMatches(errorMessage));
}
@Test
void testDeserializationWithTypesMismatch() {
DataType dataType = ROW(FIELD("f0", STRING()), FIELD("f1", INT()), FIELD("f2", INT()));
RowType rowType = (RowType) dataType.getLogicalType();
CsvRowDataDeserializationSchema.Builder deserSchemaBuilder =
new CsvRowDataDeserializationSchema.Builder(rowType, InternalTypeInfo.of(rowType));
String data = "Test,1,Test";
String errorMessage = "Fail to deserialize at field: f2.";
assertThatThrownBy(() -> deserialize(deserSchemaBuilder, data))
.satisfies(anyCauseMatches(errorMessage));
}
private void testNullableField(DataType fieldType, String string, Object value)
throws Exception {
testField(
fieldType,
string,
value,
(deserSchema) -> deserSchema.setNullLiteral("null"),
(serSchema) -> serSchema.setNullLiteral("null"),
",");
}
private void testField(
DataType fieldType,
String csvValue,
Object value,
Consumer<CsvRowDataSerializationSchema.Builder> serializationConfig,
Consumer<CsvRowDataDeserializationSchema.Builder> deserializationConfig,
String fieldDelimiter)
throws Exception {
RowType rowType =
(RowType)
ROW(FIELD("f0", STRING()), FIELD("f1", fieldType), FIELD("f2", STRING()))
.getLogicalType();
String expectedCsv = "BEGIN" + fieldDelimiter + csvValue + fieldDelimiter + "END";
// deserialization
CsvRowDataDeserializationSchema.Builder deserSchemaBuilder =
new CsvRowDataDeserializationSchema.Builder(rowType, InternalTypeInfo.of(rowType));
deserializationConfig.accept(deserSchemaBuilder);
RowData deserializedRow = deserialize(deserSchemaBuilder, expectedCsv);
// serialization
CsvRowDataSerializationSchema.Builder serSchemaBuilder =
new CsvRowDataSerializationSchema.Builder(rowType);
serializationConfig.accept(serSchemaBuilder);
byte[] serializedRow = serialize(serSchemaBuilder, deserializedRow);
assertThat(new String(serializedRow)).isEqualTo(expectedCsv);
}
@SuppressWarnings("unchecked")
private void testFieldDeserialization(
DataType fieldType,
String csvValue,
Object value,
Consumer<CsvRowDataDeserializationSchema.Builder> deserializationConfig,
String fieldDelimiter)
throws Exception {
DataType dataType =
ROW(FIELD("f0", STRING()), FIELD("f1", fieldType), FIELD("f2", STRING()));
RowType rowType = (RowType) dataType.getLogicalType();
String csv = "BEGIN" + fieldDelimiter + csvValue + fieldDelimiter + "END";
Row expectedRow = Row.of("BEGIN", value, "END");
// deserialization
CsvRowDataDeserializationSchema.Builder deserSchemaBuilder =
new CsvRowDataDeserializationSchema.Builder(rowType, InternalTypeInfo.of(rowType));
deserializationConfig.accept(deserSchemaBuilder);
RowData deserializedRow = deserialize(deserSchemaBuilder, csv);
Row actualRow =
(Row)
DataFormatConverters.getConverterForDataType(dataType)
.toExternal(deserializedRow);
assertThat(actualRow).isEqualTo(expectedRow);
}
@SuppressWarnings("unchecked")
private Row testDeserialization(
boolean allowParsingErrors, boolean allowComments, String string) throws Exception {
DataType dataType = ROW(FIELD("f0", STRING()), FIELD("f1", INT()), FIELD("f2", STRING()));
RowType rowType = (RowType) dataType.getLogicalType();
CsvRowDataDeserializationSchema.Builder deserSchemaBuilder =
new CsvRowDataDeserializationSchema.Builder(rowType, InternalTypeInfo.of(rowType))
.setIgnoreParseErrors(allowParsingErrors)
.setAllowComments(allowComments);
RowData deserializedRow = deserialize(deserSchemaBuilder, string);
return (Row)
DataFormatConverters.getConverterForDataType(dataType).toExternal(deserializedRow);
}
private void testSerDeConsistency(
RowData originalRow,
CsvRowDataSerializationSchema.Builder serSchemaBuilder,
CsvRowDataDeserializationSchema.Builder deserSchemaBuilder)
throws Exception {
RowData deserializedRow =
deserialize(
deserSchemaBuilder, new String(serialize(serSchemaBuilder, originalRow)));
assertThat(originalRow).isEqualTo(deserializedRow);
}
private static byte[] serialize(
CsvRowDataSerializationSchema.Builder serSchemaBuilder, RowData row) throws Exception {
// we serialize and deserialize the schema to test runtime behavior
// when the schema is shipped to the cluster
CsvRowDataSerializationSchema schema =
InstantiationUtil.deserializeObject(
InstantiationUtil.serializeObject(serSchemaBuilder.build()),
CsvRowDataSerDeSchemaTest.class.getClassLoader());
open(schema);
return schema.serialize(row);
}
private static RowData deserialize(
CsvRowDataDeserializationSchema.Builder deserSchemaBuilder, String csv)
throws Exception {
// we serialize and deserialize the schema to test runtime behavior
// when the schema is shipped to the cluster
CsvRowDataDeserializationSchema schema =
InstantiationUtil.deserializeObject(
InstantiationUtil.serializeObject(deserSchemaBuilder.build()),
CsvRowDataSerDeSchemaTest.class.getClassLoader());
open(schema);
return schema.deserialize(csv != null ? csv.getBytes() : null);
}
private static RowData rowData(String str1, int integer, String str2) {
return GenericRowData.of(fromString(str1), integer, fromString(str2));
}
private static RowData rowData(
String str1, int integer, String str2, LocalDateTime localDateTime, Instant instant) {
return GenericRowData.of(
fromString(str1),
integer,
fromString(str2),
fromLocalDateTime(localDateTime),
fromInstant(instant));
}
}
| CsvRowDataSerDeSchemaTest |
java | netty__netty | example/src/main/java/io/netty/example/worldclock/WorldClockServerInitializer.java | {
"start": 1111,
"end": 1853
} | class ____ extends ChannelInitializer<SocketChannel> {
private final SslContext sslCtx;
public WorldClockServerInitializer(SslContext sslCtx) {
this.sslCtx = sslCtx;
}
@Override
public void initChannel(SocketChannel ch) throws Exception {
ChannelPipeline p = ch.pipeline();
if (sslCtx != null) {
p.addLast(sslCtx.newHandler(ch.alloc()));
}
p.addLast(new ProtobufVarint32FrameDecoder());
p.addLast(new ProtobufDecoder(WorldClockProtocol.Locations.getDefaultInstance()));
p.addLast(new ProtobufVarint32LengthFieldPrepender());
p.addLast(new ProtobufEncoder());
p.addLast(new WorldClockServerHandler());
}
}
| WorldClockServerInitializer |
java | spring-projects__spring-framework | spring-oxm/src/test/java/org/springframework/oxm/xstream/FlightSubclass.java | {
"start": 708,
"end": 749
} | class ____ extends Flight {
}
| FlightSubclass |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/jsontype/SubTypeResolutionTest.java | {
"start": 898,
"end": 987
} | class ____ extends DatabindTestUtil
{
// [databind#1964]
static | SubTypeResolutionTest |
java | spring-projects__spring-boot | core/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/condition/ConditionalOnBeanTests.java | {
"start": 23812,
"end": 23929
} | class ____ extends ExampleBean {
CustomExampleBean() {
super("custom subclass");
}
}
static | CustomExampleBean |
java | netty__netty | codec-http/src/main/java/io/netty/handler/codec/http/InvalidLineSeparatorException.java | {
"start": 1271,
"end": 1867
} | class ____ extends DecoderException {
private static final long serialVersionUID = 536224937231200736L;
public InvalidLineSeparatorException() {
super("Line Feed must be preceded by Carriage Return when terminating HTTP start- and header field-lines");
}
public InvalidLineSeparatorException(String message, Throwable cause) {
super(message, cause);
}
public InvalidLineSeparatorException(String message) {
super(message);
}
public InvalidLineSeparatorException(Throwable cause) {
super(cause);
}
}
| InvalidLineSeparatorException |
java | apache__camel | components/camel-fhir/camel-fhir-component/src/generated/java/org/apache/camel/component/fhir/FhirCapabilitiesEndpointConfiguration.java | {
"start": 1016,
"end": 2146
} | class ____ extends FhirConfiguration {
@UriParam
@ApiParam(optional = true, apiMethods = {@ApiMethod(methodName = "ofType", description="See ExtraParameters for a full list of parameters that can be passed, may be NULL")})
private java.util.Map<org.apache.camel.component.fhir.api.ExtraParameters, Object> extraParameters;
@UriParam
@ApiParam(optional = false, apiMethods = {@ApiMethod(methodName = "ofType", description="The model type")})
private Class<org.hl7.fhir.instance.model.api.IBaseConformance> type;
public java.util.Map<org.apache.camel.component.fhir.api.ExtraParameters, Object> getExtraParameters() {
return extraParameters;
}
public void setExtraParameters(java.util.Map<org.apache.camel.component.fhir.api.ExtraParameters, Object> extraParameters) {
this.extraParameters = extraParameters;
}
public Class<org.hl7.fhir.instance.model.api.IBaseConformance> getType() {
return type;
}
public void setType(Class<org.hl7.fhir.instance.model.api.IBaseConformance> type) {
this.type = type;
}
}
| FhirCapabilitiesEndpointConfiguration |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/store/audit/HttpReferrerAuditHeader.java | {
"start": 2838,
"end": 11270
} | class ____ {
/**
* Format of path to build: {@value}.
* the params passed in are (context ID, span ID, op).
* Update
* {@code TestHttpReferrerAuditHeader.SAMPLE_LOG_ENTRY} on changes
*/
public static final String REFERRER_PATH_FORMAT = "/hadoop/1/%3$s/%2$s/";
private static final Logger LOG =
LoggerFactory.getLogger(HttpReferrerAuditHeader.class);
/**
* Log for warning of problems creating headers will only log of
* a problem once per process instance.
* This is to avoid logs being flooded with errors.
*/
private static final LogExactlyOnce WARN_OF_URL_CREATION =
new LogExactlyOnce(LOG);
/**
* Log for warning of an exception raised when building
* the referrer header, including building the evaluated
* attributes.
*/
private static final LogExactlyOnce ERROR_BUILDING_REFERRER_HEADER =
new LogExactlyOnce(LOG);
/** Context ID. */
private final String contextId;
/** operation name. */
private final String operationName;
/** Span ID. */
private final String spanId;
/** optional first path. */
private final String path1;
/** optional second path. */
private final String path2;
/**
* The header as created in the constructor; used in toString().
* A new header is built on demand in {@link #buildHttpReferrer()}
* so that evaluated attributes are dynamically evaluated
* in the correct thread/place.
*/
private final String initialHeader;
/**
* Map of simple attributes.
*/
private final Map<String, String> attributes;
/**
* Parameters dynamically evaluated on the thread just before
* the request is made.
*/
private final Map<String, Supplier<String>> evaluated;
/**
* Elements to filter from the final header.
*/
private final Set<String> filter;
/**
* Instantiate.
* <p>
* All maps/enums passed down are copied into thread safe equivalents.
* as their origin is unknown and cannot be guaranteed to
* not be shared.
* <p>
* Context and operationId are expected to be well formed
* numeric/hex strings, at least adequate to be
* used as individual path elements in a URL.
*/
private HttpReferrerAuditHeader(
final Builder builder) {
this.contextId = requireNonNull(builder.contextId);
this.evaluated = new ConcurrentHashMap<>(builder.evaluated);
this.filter = ImmutableSet.copyOf(builder.filter);
this.operationName = requireNonNull(builder.operationName);
this.path1 = builder.path1;
this.path2 = builder.path2;
this.spanId = requireNonNull(builder.spanId);
// copy the parameters from the builder and extend
attributes = new ConcurrentHashMap<>(builder.attributes);
addAttribute(PARAM_OP, operationName);
addAttribute(PARAM_PATH, path1);
addAttribute(PARAM_PATH2, path2);
addAttribute(PARAM_ID, spanId);
// patch in global context values where not set
Iterable<Map.Entry<String, String>> globalContextValues
= builder.globalContextValues;
if (globalContextValues != null) {
for (Map.Entry<String, String> entry : globalContextValues) {
attributes.putIfAbsent(entry.getKey(), entry.getValue());
}
}
// build the referrer up. so as to find/report problems early
initialHeader = buildHttpReferrer();
}
/**
* Build the referrer string.
* This includes dynamically evaluating all of the evaluated
* attributes.
* If there is an error creating the string it will be logged once
* per entry, and "" returned.
* @return a referrer string or ""
*/
public synchronized String buildHttpReferrer() {
String header;
try {
Map<String, String> requestAttrs = new HashMap<>(attributes);
String queries;
// Update any params which are dynamically evaluated
evaluated.forEach((key, eval) ->
requestAttrs.put(key, eval.get()));
// now build the query parameters from all attributes, static and
// evaluated, stripping out any from the filter
queries = requestAttrs.entrySet().stream()
.filter(e -> !filter.contains(e.getKey()))
.map(e -> e.getKey() + "=" + e.getValue())
.collect(Collectors.joining("&"));
final URI uri = new URI("https", REFERRER_ORIGIN_HOST,
String.format(Locale.ENGLISH, REFERRER_PATH_FORMAT,
contextId, spanId, operationName),
queries,
null);
header = uri.toASCIIString();
} catch (URISyntaxException e) {
WARN_OF_URL_CREATION.warn("Failed to build URI for auditor: " + e, e);
header = "";
} catch (RuntimeException e) {
// do not let failure to build the header stop the request being
// issued.
ERROR_BUILDING_REFERRER_HEADER.warn("Failed to construct referred header {}", e.toString());
LOG.debug("Full stack", e);
header = "";
}
return header;
}
/**
* Add a query parameter if not null/empty
* There's no need to escape here as it is done in the URI
* constructor.
* @param key query key
* @param value query value
*/
private synchronized void addAttribute(String key,
String value) {
if (StringUtils.isNotEmpty(value)) {
attributes.put(key, value);
}
}
/**
* Set an attribute. If the value is non-null/empty,
* it will be used as a query parameter.
*
* @param key key to set
* @param value value.
*/
public void set(final String key, final String value) {
addAttribute(requireNonNull(key), value);
}
public String getContextId() {
return contextId;
}
public String getOperationName() {
return operationName;
}
public String getSpanId() {
return spanId;
}
public String getPath1() {
return path1;
}
public String getPath2() {
return path2;
}
@Override
public String toString() {
return new StringJoiner(", ",
HttpReferrerAuditHeader.class.getSimpleName() + "[", "]")
.add(initialHeader)
.toString();
}
/**
* Perform any escaping to valid path elements in advance of
* new URI() doing this itself. Only path separators need to
* be escaped/converted at this point.
* @param source source string
* @return an escaped path element.
*/
public static String escapeToPathElement(CharSequence source) {
int len = source.length();
StringBuilder r = new StringBuilder(len);
for (int i = 0; i < len; i++) {
char c = source.charAt(i);
String s = Character.toString(c);
switch (c) {
case '/':
case '@':
s = "+";
break;
default:
break;
}
r.append(s);
}
return r.toString();
}
/**
* Strip any quotes from around a header.
* This is needed when processing log entries.
* @param header field.
* @return field without quotes.
*/
public static String maybeStripWrappedQuotes(String header) {
String h = header;
// remove quotes if needed.
while (h.startsWith("\"")) {
h = h.substring(1);
}
while (h.endsWith("\"")) {
h = h.substring(0, h.length() - 1);
}
return h;
}
/**
* Split up the string. Uses httpClient: make sure it is on the classpath.
* Any query param with a name but no value, e.g ?something is
* returned in the map with an empty string as the value.
* @param header URI to parse
* @return a map of parameters.
* @throws URISyntaxException failure to build URI from header.
*/
public static Map<String, String> extractQueryParameters(String header)
throws URISyntaxException {
URI uri = new URI(maybeStripWrappedQuotes(header));
// get the decoded query
List<NameValuePair> params = URLEncodedUtils.parse(uri,
StandardCharsets.UTF_8);
Map<String, String> result = new HashMap<>(params.size());
for (NameValuePair param : params) {
String name = param.getName();
String value = param.getValue();
if (value == null) {
value = "";
}
result.put(name, value);
}
return result;
}
/**
* Get a builder.
* @return a new builder.
*/
public static Builder builder() {
return new Builder();
}
/**
* Builder.
*
* Context and operationId are expected to be well formed
* numeric/hex strings, at least adequate to be
* used as individual path elements in a URL.
*/
public static final | HttpReferrerAuditHeader |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentilesMethodTests.java | {
"start": 818,
"end": 2757
} | class ____ extends ESTestCase {
public void testValidOrdinals() {
assertThat(PercentilesMethod.TDIGEST.ordinal(), equalTo(0));
assertThat(PercentilesMethod.HDR.ordinal(), equalTo(1));
}
public void testwriteTo() throws Exception {
try (BytesStreamOutput out = new BytesStreamOutput()) {
PercentilesMethod.TDIGEST.writeTo(out);
try (StreamInput in = out.bytes().streamInput()) {
assertThat(in.readVInt(), equalTo(0));
}
}
try (BytesStreamOutput out = new BytesStreamOutput()) {
PercentilesMethod.HDR.writeTo(out);
try (StreamInput in = out.bytes().streamInput()) {
assertThat(in.readVInt(), equalTo(1));
}
}
}
public void testReadFrom() throws Exception {
try (BytesStreamOutput out = new BytesStreamOutput()) {
out.writeVInt(0);
try (StreamInput in = out.bytes().streamInput()) {
assertThat(PercentilesMethod.readFromStream(in), equalTo(PercentilesMethod.TDIGEST));
}
}
try (BytesStreamOutput out = new BytesStreamOutput()) {
out.writeVInt(1);
try (StreamInput in = out.bytes().streamInput()) {
assertThat(PercentilesMethod.readFromStream(in), equalTo(PercentilesMethod.HDR));
}
}
}
public void testInvalidReadFrom() throws Exception {
try (BytesStreamOutput out = new BytesStreamOutput()) {
out.writeVInt(randomIntBetween(2, Integer.MAX_VALUE));
try (StreamInput in = out.bytes().streamInput()) {
PercentilesMethod.readFromStream(in);
fail("Expected IOException");
} catch (IOException e) {
assertThat(e.getMessage(), containsString("Unknown PercentilesMethod ordinal ["));
}
}
}
}
| PercentilesMethodTests |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/filecontroller/LogAggregationFileController.java | {
"start": 4522,
"end": 5464
} | class ____ {
private Class<? extends FileSystem> fsType;
private Path logPath;
FsLogPathKey(Class<? extends FileSystem> fsType, Path logPath) {
this.fsType = fsType;
this.logPath = logPath;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
FsLogPathKey that = (FsLogPathKey) o;
return Objects.equals(fsType, that.fsType) && Objects.equals(logPath, that.logPath);
}
@Override
public int hashCode() {
return Objects.hash(fsType, logPath);
}
}
private static final ConcurrentHashMap<FsLogPathKey, Boolean> FS_CHMOD_CACHE
= new ConcurrentHashMap<>();
public LogAggregationFileController() {}
/**
* Initialize the log file controller.
* @param conf the Configuration
* @param controllerName the log controller | FsLogPathKey |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/time/InstantTemporalUnit.java | {
"start": 2096,
"end": 3703
} | class ____ extends BugChecker implements MethodInvocationTreeMatcher {
private static final String INSTANT = "java.time.Instant";
private static final String TEMPORAL_UNIT = "java.time.temporal.TemporalUnit";
private static final Matcher<ExpressionTree> INSTANT_OF_LONG_TEMPORAL_UNIT =
allOf(
anyOf(
instanceMethod()
.onExactClass(INSTANT)
.named("minus")
.withParameters("long", TEMPORAL_UNIT),
instanceMethod()
.onExactClass(INSTANT)
.named("plus")
.withParameters("long", TEMPORAL_UNIT),
instanceMethod()
.onExactClass(INSTANT)
.named("until")
.withParameters("java.time.temporal.Temporal", TEMPORAL_UNIT)),
Matchers.not(Matchers.packageStartsWith("java.")));
// This definition comes from Instant.isSupported(TemporalUnit)
static final ImmutableSet<ChronoUnit> INVALID_TEMPORAL_UNITS =
Arrays.stream(ChronoUnit.values())
.filter(c -> !c.isTimeBased())
.filter(c -> !c.equals(ChronoUnit.DAYS)) // DAYS is explicitly allowed
.collect(toImmutableEnumSet());
@Override
public Description matchMethodInvocation(MethodInvocationTree tree, VisitorState state) {
if (INSTANT_OF_LONG_TEMPORAL_UNIT.matches(tree, state)) {
if (getInvalidChronoUnit(tree.getArguments().get(1), INVALID_TEMPORAL_UNITS).isPresent()) {
return describeMatch(tree);
}
}
return Description.NO_MATCH;
}
}
| InstantTemporalUnit |
java | apache__camel | components/camel-as2/camel-as2-component/src/test/java/org/apache/camel/component/as2/AS2AsyncMDNServerManagerIT.java | {
"start": 3185,
"end": 21349
} | class ____ extends AbstractAS2ITSupport {
private static final String SERVER_FQDN = "server.example.com";
private static final String ORIGIN_SERVER_NAME = "AS2ClientManagerIntegrationTest Server";
private static final String AS2_VERSION = "1.1";
private static final String REQUEST_URI = "/";
private static final String SUBJECT = "Test Case";
private static final String AS2_NAME = "878051556";
private static final String FROM = "mrAS@example.org";
private static final String MDN_FROM = "as2Test@server.example.com";
private static final String MDN_SUBJECT_PREFIX = "MDN Response:";
private static final String MDN_MESSAGE_TEMPLATE = "TBD";
private static final String EDI_MESSAGE = "UNB+UNOA:1+005435656:1+006415160:1+060515:1434+00000000000778'\n"
+ "UNH+00000000000117+INVOIC:D:97B:UN'\n"
+ "BGM+380+342459+9'\n"
+ "DTM+3:20060515:102'\n"
+ "RFF+ON:521052'\n"
+ "NAD+BY+792820524::16++CUMMINS MID-RANGE ENGINE PLANT'\n"
+ "NAD+SE+005435656::16++GENERAL WIDGET COMPANY'\n"
+ "CUX+1:USD'\n"
+ "LIN+1++157870:IN'\n"
+ "IMD+F++:::WIDGET'\n"
+ "QTY+47:1020:EA'\n"
+ "ALI+US'\n"
+ "MOA+203:1202.58'\n"
+ "PRI+INV:1.179'\n"
+ "LIN+2++157871:IN'\n"
+ "IMD+F++:::DIFFERENT WIDGET'\n"
+ "QTY+47:20:EA'\n"
+ "ALI+JP'\n"
+ "MOA+203:410'\n"
+ "PRI+INV:20.5'\n"
+ "UNS+S'\n"
+ "MOA+39:2137.58'\n"
+ "ALC+C+ABG'\n"
+ "MOA+8:525'\n"
+ "UNT+23+00000000000117'\n"
+ "UNZ+1+00000000000778'\n";
private static final String EDI_MESSAGE_CONTENT_TRANSFER_ENCODING = "7bit";
private static final int PARTNER_TARGET_PORT = 8889;
private static final int RECEIPT_SERVER_PORT = AvailablePortFinder.getNextAvailable();
private static final int RECEIPT_SERVER_PORT2 = AvailablePortFinder.getNextAvailable();
private static final int RECEIPT_SERVER_PORT3 = AvailablePortFinder.getNextAvailable();
private static final int RECEIPT_SERVER_PORT4 = AvailablePortFinder.getNextAvailable();
private static AS2ServerConnection serverConnection;
private static RequestHandler requestHandler;
private static final String SIGNED_RECEIPT_MIC_ALGORITHMS = "sha1,md5";
private static KeyPair serverKP;
private static X509Certificate serverCert;
private static KeyPair clientKeyPair;
private static X509Certificate clientCert;
@BeforeAll
public static void setupTest() throws Exception {
setupKeysAndCertificates();
receiveTestMessages();
}
@AfterAll
public static void teardownTest() {
if (serverConnection != null) {
serverConnection.close();
}
}
// Verify the MDN is receipt returned asynchronously from the server when the request headers includes the
// 'Receipt-Delivery-Option' header specifying the return-URL.
@Test
public void deliveryHeaderMultipartReportTest() throws Exception {
DispositionNotificationMultipartReportEntity reportEntity
= executeRequestWithAsyncResponseHeader("direct://SEND", RECEIPT_SERVER_PORT, "mock:as2RcvRcptMsgs");
verifyMultiPartReportParts(reportEntity);
verifyMultiPartReportEntity(reportEntity);
}
// Verify the MDN is receipt returned asynchronously from the server when the endpoint uri includes the
// 'Receipt-Delivery-Option' path variable specifying the return-URL.
@Test
public void deliveryPathMultipartReportTest() throws Exception {
DispositionNotificationMultipartReportEntity reportEntity
= executeRequestWithAsyncResponsePath("direct://SEND3", "mock:as2RcvRcptMsgs3");
verifyMultiPartReportParts(reportEntity);
verifyMultiPartReportEntity(reportEntity);
}
// Verify the signed MDN receipt returned asynchronously from the server when the request headers includes the
// 'Receipt-Delivery-Option' header specifying the return-URL.
@Test
public void deliveryHeaderMultipartSignedEntityTest() throws Exception {
MultipartSignedEntity responseSignedEntity
= executeRequestWithSignedAsyncResponseHeader("direct://SEND", RECEIPT_SERVER_PORT2, "mock:as2RcvRcptMsgs2");
MimeEntity responseSignedDataEntity = responseSignedEntity.getSignedDataEntity();
assertTrue(responseSignedDataEntity instanceof DispositionNotificationMultipartReportEntity,
"Signed entity wrong type");
DispositionNotificationMultipartReportEntity reportEntity
= (DispositionNotificationMultipartReportEntity) responseSignedEntity.getSignedDataEntity();
verifyMultiPartReportParts(reportEntity);
verifyMultiPartReportEntity(reportEntity);
ApplicationPkcs7SignatureEntity signatureEntity = responseSignedEntity.getSignatureEntity();
assertNotNull(signatureEntity, "Signature Entity");
verifyMdnSignature(reportEntity);
}
// Verify the signed MDN receipt returned asynchronously from the server when the endpoint uri includes the
// 'Receipt-Delivery-Option' path variable specifying the return-URL.
@Test
public void deliveryPathMultipartSignedEntityTest() throws Exception {
MultipartSignedEntity responseSignedEntity
= executeRequestWithSignedAsyncResponsePath("direct://SEND4", "mock:as2RcvRcptMsgs4");
MimeEntity responseSignedDataEntity = responseSignedEntity.getSignedDataEntity();
assertTrue(responseSignedDataEntity instanceof DispositionNotificationMultipartReportEntity,
"Signed entity wrong type");
DispositionNotificationMultipartReportEntity reportEntity
= (DispositionNotificationMultipartReportEntity) responseSignedEntity.getSignedDataEntity();
verifyMultiPartReportParts(reportEntity);
verifyMultiPartReportEntity(reportEntity);
ApplicationPkcs7SignatureEntity signatureEntity = responseSignedEntity.getSignatureEntity();
assertNotNull(signatureEntity, "Signature Entity");
verifyMdnSignature(reportEntity);
}
private void verifyMultiPartReportParts(DispositionNotificationMultipartReportEntity reportEntity) {
assertEquals(2, reportEntity.getPartCount(), "Unexpected number of body parts in report");
MimeEntity reportPartOne = reportEntity.getPart(0);
assertEquals(ContentType.create(AS2MimeType.TEXT_PLAIN, StandardCharsets.US_ASCII).toString(),
reportPartOne.getContentType(), "Unexpected content type in first body part of report");
MimeEntity reportPartTwo = reportEntity.getPart(1);
assertEquals(ContentType.create(
AS2MimeType.MESSAGE_DISPOSITION_NOTIFICATION).toString(), reportPartTwo.getContentType(),
"Unexpected content type in second body part of report");
assertTrue(reportPartTwo instanceof AS2MessageDispositionNotificationEntity);
}
private void verifyMultiPartReportEntity(DispositionNotificationMultipartReportEntity reportEntity) {
// second part of MDN report
AS2MessageDispositionNotificationEntity messageDispositionNotificationEntity
= (AS2MessageDispositionNotificationEntity) reportEntity.getPart(1);
assertEquals(ORIGIN_SERVER_NAME, messageDispositionNotificationEntity.getReportingUA(),
"Unexpected value for reporting UA");
assertEquals(AS2_NAME, messageDispositionNotificationEntity.getFinalRecipient(),
"Unexpected value for final recipient");
String uniqueMessageId = HttpMessageUtils.getHeaderValue(requestHandler.getRequest(), AS2Header.MESSAGE_ID);
assertEquals(uniqueMessageId, messageDispositionNotificationEntity.getOriginalMessageId(),
"Unexpected value for original message ID");
assertEquals(DispositionMode.AUTOMATIC_ACTION_MDN_SENT_AUTOMATICALLY,
messageDispositionNotificationEntity.getDispositionMode(), "Unexpected value for disposition mode");
assertEquals(AS2DispositionType.PROCESSED, messageDispositionNotificationEntity.getDispositionType(),
"Unexpected value for disposition type");
}
private void verifyMdnSignature(DispositionNotificationMultipartReportEntity reportEntity) throws HttpException {
AS2MessageDispositionNotificationEntity messageDispositionNotificationEntity
= (AS2MessageDispositionNotificationEntity) reportEntity.getPart(1);
MicUtils.ReceivedContentMic receivedContentMic = messageDispositionNotificationEntity.getReceivedContentMic();
MicUtils.ReceivedContentMic computedContentMic
= MicUtils.createReceivedContentMic(
(ClassicHttpRequest) requestHandler.getRequest(), new Certificate[] { clientCert },
null);
assertEquals(computedContentMic.getEncodedMessageDigest(), receivedContentMic.getEncodedMessageDigest(),
"Received content MIC does not match computed");
}
private DispositionNotificationMultipartReportEntity verifyAsyncResponse(String mock) throws Exception {
Exchange exchange = receiveFromMock(mock);
Message message = exchange.getIn();
assertNotNull(message);
assertTrue(message.getBody() instanceof DispositionNotificationMultipartReportEntity);
return (DispositionNotificationMultipartReportEntity) message.getBody();
}
private MultipartSignedEntity verifySignedAsyncResponse(String mock) throws Exception {
Exchange exchange = receiveFromMock(mock);
Message message = exchange.getIn();
assertNotNull(message);
assertTrue(message.getBody() instanceof MultipartSignedEntity);
return (MultipartSignedEntity) message.getBody();
}
// Request asynchronous receipt by including a 'receiptDeliveryOption' header specifying the return url.
private DispositionNotificationMultipartReportEntity executeRequestWithAsyncResponseHeader(
String endpointUri, int port, String mock)
throws Exception {
executeRequestAsyncHeader(endpointUri, getAS2HeadersForAsyncReceipt("http://localhost:" + port + "/handle-receipts"));
return verifyAsyncResponse(mock);
}
// Request asynchronous receipt by including a 'receiptDeliveryOption' path parameter in the endpoint uri
// specifying the return url.
private DispositionNotificationMultipartReportEntity executeRequestWithAsyncResponsePath(String endpointUri, String mock)
throws Exception {
executeRequestAsyncPath(endpointUri, getAS2Headers());
return verifyAsyncResponse(mock);
}
// Request signed asynchronous receipt by including a 'receiptDeliveryOption' header specifying the return url,
// and a 'signedReceiptMicAlgorithms' header specifying the signing algorithms.
private MultipartSignedEntity executeRequestWithSignedAsyncResponseHeader(String endpointUri, int port, String mock)
throws Exception {
Map<String, Object> headers = getAS2HeadersForAsyncReceipt("http://localhost:" + port + "/handle-receipts");
addSignedMessageHeaders(headers);
// In order to receive signed MDN receipts the client must include both the 'signed-receipt-protocol' and
// the 'signed-receipt-micalg' option parameters.
executeRequestAsyncHeader(endpointUri, headers);
return verifySignedAsyncResponse(mock);
}
// Request a signed asynchronous receipt by including a 'receiptDeliveryOption' path parameter in the endpoint uri
// specifying the return url, and a 'signedReceiptMicAlgorithms' header specifying the signing algorithms.
private MultipartSignedEntity executeRequestWithSignedAsyncResponsePath(String endpointUri, String mock) throws Exception {
Map<String, Object> headers = getAS2Headers();
addSignedMessageHeaders(headers);
// In order to receive signed MDN receipts the client must include both the 'signed-receipt-protocol' and
// the 'signed-receipt-micalg' option parameters.
executeRequestAsyncPath(endpointUri, headers);
return verifySignedAsyncResponse(mock);
}
private void addSignedMessageHeaders(Map<String, Object> headers) {
headers.put("CamelAs2.as2MessageStructure", AS2MessageStructure.SIGNED);
headers.put("CamelAs2.signedReceiptMicAlgorithms", SIGNED_RECEIPT_MIC_ALGORITHMS);
headers.put("CamelAs2.signingCertificateChain", new Certificate[] { clientCert });
headers.put("CamelAs2.signingPrivateKey", clientKeyPair.getPrivate());
headers.put("CamelAs2.signingAlgorithm", AS2SignatureAlgorithm.SHA512WITHRSA);
}
// Headers required for a client to call the AS2 'send' api.
private Map<String, Object> getAS2Headers() {
final Map<String, Object> headers = new HashMap<>();
headers.put("CamelAs2.requestUri", REQUEST_URI);
headers.put("CamelAs2.subject", SUBJECT);
headers.put("CamelAs2.from", FROM);
headers.put("CamelAs2.as2From", AS2_NAME);
headers.put("CamelAs2.as2To", AS2_NAME);
headers.put("CamelAs2.as2MessageStructure", AS2MessageStructure.PLAIN);
headers.put("CamelAs2.ediMessageContentType", AS2MediaType.APPLICATION_EDIFACT);
headers.put("CamelAs2.ediMessageTransferEncoding", EDI_MESSAGE_CONTENT_TRANSFER_ENCODING);
headers.put("CamelAs2.dispositionNotificationTo", "mrAS2@example.com");
return headers;
}
// Headers requesting that the AS2-MDN (receipt) be returned asynchronously
private Map<String, Object> getAS2HeadersForAsyncReceipt(String deliveryAddress) {
Map<String, Object> headers = getAS2Headers();
headers.put("CamelAs2.receiptDeliveryOption", deliveryAddress);
return headers;
}
private Exchange receiveFromMock(String mockUri) throws Exception {
MockEndpoint mockEndpoint = getMockEndpoint(mockUri);
mockEndpoint.expectedMinimumMessageCount(1);
mockEndpoint.setResultWaitTime(TimeUnit.MILLISECONDS.convert(30, TimeUnit.SECONDS));
mockEndpoint.assertIsSatisfied();
List<Exchange> exchanges = mockEndpoint.getExchanges();
assertNotNull(exchanges);
assertFalse(exchanges.isEmpty());
return exchanges.get(0);
}
private Triple<HttpEntity, HttpRequest, HttpResponse> executeRequestAsyncHeader(
String endpointUri, Map<String, Object> headers)
throws Exception {
HttpEntity responseEntity = requestBodyAndHeaders(endpointUri, EDI_MESSAGE, headers);
return new ImmutableTriple<>(responseEntity, requestHandler.getRequest(), requestHandler.getResponse());
}
private Triple<HttpEntity, HttpRequest, HttpResponse> executeRequestAsyncPath(
String endpointUri, Map<String, Object> headers)
throws Exception {
HttpEntity responseEntity = requestBodyAndHeaders(endpointUri, EDI_MESSAGE, headers);
return new ImmutableTriple<>(responseEntity, requestHandler.getRequest(), requestHandler.getResponse());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
// with option for asynchronous receipt specified as header
from("direct://SEND")
.to("as2://client/send?inBody=ediMessage&httpSocketTimeout=5m&httpConnectionTimeout=5m");
// with option for asynchronous receipt specified as path-param
from("direct://SEND3")
.toF("as2://client/send?inBody=ediMessage&httpSocketTimeout=5m&httpConnectionTimeout=5m"
+ "&receiptDeliveryOption=%s", "http://localhost:" + RECEIPT_SERVER_PORT3 + "/handle-receipts");
from("direct://SEND4")
.toF("as2://client/send?inBody=ediMessage&httpSocketTimeout=5m&httpConnectionTimeout=5m"
+ "&receiptDeliveryOption=%s", "http://localhost:" + RECEIPT_SERVER_PORT4 + "/handle-receipts");
// asynchronous AS2-MDN (receipt) server instance
fromF("as2://receipt/receive?requestUriPattern=/handle-receipts&asyncMdnPortNumber=%s",
RECEIPT_SERVER_PORT)
.to("mock:as2RcvRcptMsgs");
fromF("as2://receipt/receive?requestUriPattern=/handle-receipts&asyncMdnPortNumber=%s",
RECEIPT_SERVER_PORT2)
.to("mock:as2RcvRcptMsgs2");
fromF("as2://receipt/receive?requestUriPattern=/handle-receipts&asyncMdnPortNumber=%s",
RECEIPT_SERVER_PORT3)
.to("mock:as2RcvRcptMsgs3");
fromF("as2://receipt/receive?requestUriPattern=/handle-receipts&asyncMdnPortNumber=%s",
RECEIPT_SERVER_PORT4)
.to("mock:as2RcvRcptMsgs4");
}
};
}
public static | AS2AsyncMDNServerManagerIT |
java | google__auto | common/src/test/java/com/google/auto/common/MoreTypesIsTypeOfTest.java | {
"start": 1433,
"end": 1812
} | class ____ {
@Rule public CompilationRule compilationRule = new CompilationRule();
private Elements elementUtils;
private Types typeUtils;
@Before
public void setUp() {
this.elementUtils = compilationRule.getElements();
this.typeUtils = compilationRule.getTypes();
}
@Test
public void isTypeOf_primitiveAndBoxedPrimitiveTypes() {
| MoreTypesIsTypeOfTest |
java | redisson__redisson | redisson/src/main/java/org/redisson/api/listener/ScoredSortedSetAddListener.java | {
"start": 923,
"end": 1141
} | interface ____ extends ObjectListener {
/**
* Invoked when entry added to RScoredSortedSet object
*
* @param name - name of object
*/
void onAdd(String name);
}
| ScoredSortedSetAddListener |
java | spring-projects__spring-framework | framework-docs/src/main/java/org/springframework/docs/web/webmvc/mvcconfig/mvcconfigadvancedjava/WebConfiguration.java | {
"start": 894,
"end": 985
} | class ____ extends DelegatingWebMvcConfiguration {
// ...
}
// end::snippet[] | WebConfiguration |
java | google__guava | android/guava-tests/test/com/google/common/eventbus/EventBusTest.java | {
"start": 9583,
"end": 10097
} | class ____ be sure that we generate two methods (bridge and original).
@SuppressWarnings("AnonymousToLambda")
public void testRegistrationWithBridgeMethod() {
AtomicInteger calls = new AtomicInteger();
bus.register(
new Callback<String>() {
@Subscribe
@Override
public void call(String s) {
calls.incrementAndGet();
}
});
bus.post("hello");
assertEquals(1, calls.get());
}
public void testPrimitiveSubscribeFails() {
| to |
java | eclipse-vertx__vert.x | vertx-core/src/test/java/io/vertx/it/servicehelper/ServiceHelperTest.java | {
"start": 3725,
"end": 4523
} | class ____ a custom classloader.
ClassLoader custom = new URLClassLoader(new URL[]{
new File(TestUtils.MAVEN_TARGET_DIR, "classes").toURI().toURL(),
new File(TestUtils.MAVEN_TARGET_DIR, "test-classes").toURI().toURL(),
serviceHelperFile.toURI().toURL(),
}, null);
Class serviceHelperClass = custom.loadClass(ServiceHelper.class.getName());
Class someFactoryClass = custom.loadClass(SomeFactory.class.getName());
assertThat(serviceHelperClass.getClassLoader()).isEqualTo(custom);
assertThat(someFactoryClass.getClassLoader()).isEqualTo(custom);
Method method = serviceHelperClass.getMethod("loadFactories", Class.class);
Collection collection = (Collection) method.invoke(null, someFactoryClass);
assertThat(collection).hasSize(1);
}
}
| from |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/AbstractToString.java | {
"start": 11357,
"end": 11576
} | enum ____ {
/** String concatenation, or an enclosing print method. */
IMPLICIT,
/** {@code String.valueOf()} or {@code #toString()}. */
EXPLICIT,
FORMAT_METHOD,
FLOGGER,
NONE,
}
}
| ToStringKind |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/exceptions/InvalidResourceBlacklistRequestException.java | {
"start": 1456,
"end": 1892
} | class ____ extends YarnException {
private static final long serialVersionUID = 384957911L;
public InvalidResourceBlacklistRequestException(Throwable cause) {
super(cause);
}
public InvalidResourceBlacklistRequestException(String message) {
super(message);
}
public InvalidResourceBlacklistRequestException(String message, Throwable cause) {
super(message, cause);
}
}
| InvalidResourceBlacklistRequestException |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/util/SystemMillisClock.java | {
"start": 891,
"end": 990
} | interface ____ returns the system time in millisecond granularity.
* @since 2.11
*/
public final | that |
java | spring-projects__spring-security | web/src/test/java/org/springframework/security/web/authentication/rememberme/TokenBasedRememberMeServicesTests.java | {
"start": 2247,
"end": 22701
} | class ____ {
private UserDetailsService uds;
private UserDetails user = new User("someone", "password", true, true, true, true,
AuthorityUtils.createAuthorityList("ROLE_ABC"));
private TokenBasedRememberMeServices services;
@BeforeEach
public void createTokenBasedRememberMeServices() {
this.uds = mock(UserDetailsService.class);
this.services = new TokenBasedRememberMeServices("key", this.uds);
}
void udsWillReturnUser() {
given(this.uds.loadUserByUsername(any(String.class))).willReturn(this.user);
}
void udsWillThrowNotFound() {
given(this.uds.loadUserByUsername(any(String.class))).willThrow(new UsernameNotFoundException(""));
}
void udsWillReturnNull() {
given(this.uds.loadUserByUsername(any(String.class))).willReturn(null);
}
private long determineExpiryTimeFromBased64EncodedToken(String validToken) {
String cookieAsPlainText = CodecTestUtils.decodeBase64(validToken);
String[] cookieTokens = getCookieTokens(cookieAsPlainText);
if (isValidCookieTokensLength(cookieTokens)) {
try {
return Long.parseLong(cookieTokens[1]);
}
catch (NumberFormatException ignored) {
}
}
return -1;
}
private String[] getCookieTokens(String cookieAsPlainText) {
return StringUtils.delimitedListToStringArray(cookieAsPlainText, ":");
}
private String determineAlgorithmNameFromBase64EncodedToken(String validToken) {
String cookieAsPlainText = CodecTestUtils.decodeBase64(validToken);
String[] cookieTokens = getCookieTokens(cookieAsPlainText);
if (isValidCookieTokensLength(cookieTokens)) {
return cookieTokens[2];
}
return null;
}
private boolean isValidCookieTokensLength(String[] cookieTokens) {
return cookieTokens.length == 3 || cookieTokens.length == 4;
}
private String generateCorrectCookieContentForTokenNoAlgorithmName(long expiryTime, String username,
String password, String key) {
return generateCorrectCookieContentForTokenWithAlgorithmName(expiryTime, username, password, key,
RememberMeTokenAlgorithm.MD5);
}
private String generateCorrectCookieContentForTokenNoAlgorithmName(long expiryTime, String username,
String password, String key, RememberMeTokenAlgorithm algorithm) {
// format is:
// username + ":" + expiryTime + ":" + Md5Hex(username + ":" + expiryTime + ":" +
// password + ":" + key)
String signatureValue = CodecTestUtils.algorithmHex(algorithm.getDigestAlgorithm(),
username + ":" + expiryTime + ":" + password + ":" + key);
String tokenValue = username + ":" + expiryTime + ":" + signatureValue;
return CodecTestUtils.encodeBase64(tokenValue);
}
private String generateCorrectCookieContentForTokenWithAlgorithmName(long expiryTime, String username,
String password, String key, RememberMeTokenAlgorithm algorithm) {
// format is:
// username + ":" + expiryTime + ":" + algorithmName + ":" + algorithmHex(username
// + ":" + expiryTime + ":" +
// password + ":" + key)
String signatureValue = CodecTestUtils.algorithmHex(algorithm.getDigestAlgorithm(),
username + ":" + expiryTime + ":" + password + ":" + key);
String tokenValue = username + ":" + expiryTime + ":" + algorithm.name() + ":" + signatureValue;
return CodecTestUtils.encodeBase64(tokenValue);
}
@Test
public void autoLoginReturnsNullIfNoCookiePresented() {
MockHttpServletResponse response = new MockHttpServletResponse();
Authentication result = this.services.autoLogin(new MockHttpServletRequest(), response);
assertThat(result).isNull();
// No cookie set
assertThat(response.getCookie(AbstractRememberMeServices.SPRING_SECURITY_REMEMBER_ME_COOKIE_KEY)).isNull();
}
@Test
public void autoLoginIgnoresUnrelatedCookie() {
Cookie cookie = new Cookie("unrelated_cookie", "foobar");
MockHttpServletRequest request = new MockHttpServletRequest();
request.setCookies(cookie);
MockHttpServletResponse response = new MockHttpServletResponse();
Authentication result = this.services.autoLogin(request, response);
assertThat(result).isNull();
assertThat(response.getCookie(AbstractRememberMeServices.SPRING_SECURITY_REMEMBER_ME_COOKIE_KEY)).isNull();
}
@Test
public void autoLoginReturnsNullForExpiredCookieAndClearsCookie() {
Cookie cookie = new Cookie(AbstractRememberMeServices.SPRING_SECURITY_REMEMBER_ME_COOKIE_KEY,
generateCorrectCookieContentForTokenNoAlgorithmName(System.currentTimeMillis() - 1000000, "someone",
"password", "key"));
MockHttpServletRequest request = new MockHttpServletRequest();
request.setCookies(cookie);
MockHttpServletResponse response = new MockHttpServletResponse();
assertThat(this.services.autoLogin(request, response)).isNull();
Cookie returnedCookie = response.getCookie(AbstractRememberMeServices.SPRING_SECURITY_REMEMBER_ME_COOKIE_KEY);
assertThat(returnedCookie).isNotNull();
assertThat(returnedCookie.getMaxAge()).isZero();
}
@Test
public void autoLoginReturnsNullAndClearsCookieIfMissingThreeTokensInCookieValue() {
Cookie cookie = new Cookie(AbstractRememberMeServices.SPRING_SECURITY_REMEMBER_ME_COOKIE_KEY,
CodecTestUtils.encodeBase64("x"));
MockHttpServletRequest request = new MockHttpServletRequest();
request.setCookies(cookie);
MockHttpServletResponse response = new MockHttpServletResponse();
assertThat(this.services.autoLogin(request, response)).isNull();
Cookie returnedCookie = response.getCookie(AbstractRememberMeServices.SPRING_SECURITY_REMEMBER_ME_COOKIE_KEY);
assertThat(returnedCookie).isNotNull();
assertThat(returnedCookie.getMaxAge()).isZero();
}
@Test
public void autoLoginClearsNonBase64EncodedCookie() {
Cookie cookie = new Cookie(AbstractRememberMeServices.SPRING_SECURITY_REMEMBER_ME_COOKIE_KEY,
"NOT_BASE_64_ENCODED");
MockHttpServletRequest request = new MockHttpServletRequest();
request.setCookies(cookie);
MockHttpServletResponse response = new MockHttpServletResponse();
assertThat(this.services.autoLogin(request, response)).isNull();
Cookie returnedCookie = response.getCookie(AbstractRememberMeServices.SPRING_SECURITY_REMEMBER_ME_COOKIE_KEY);
assertThat(returnedCookie).isNotNull();
assertThat(returnedCookie.getMaxAge()).isZero();
}
@Test
public void autoLoginClearsCookieIfSignatureBlocksDoesNotMatchExpectedValue() {
udsWillReturnUser();
Cookie cookie = new Cookie(AbstractRememberMeServices.SPRING_SECURITY_REMEMBER_ME_COOKIE_KEY,
generateCorrectCookieContentForTokenNoAlgorithmName(System.currentTimeMillis() + 1000000, "someone",
"password", "WRONG_KEY"));
MockHttpServletRequest request = new MockHttpServletRequest();
request.setCookies(cookie);
MockHttpServletResponse response = new MockHttpServletResponse();
assertThat(this.services.autoLogin(request, response)).isNull();
Cookie returnedCookie = response.getCookie(AbstractRememberMeServices.SPRING_SECURITY_REMEMBER_ME_COOKIE_KEY);
assertThat(returnedCookie).isNotNull();
assertThat(returnedCookie.getMaxAge()).isZero();
}
@Test
public void autoLoginClearsCookieIfTokenDoesNotContainANumberInCookieValue() {
Cookie cookie = new Cookie(AbstractRememberMeServices.SPRING_SECURITY_REMEMBER_ME_COOKIE_KEY,
CodecTestUtils.encodeBase64("username:NOT_A_NUMBER:signature"));
MockHttpServletRequest request = new MockHttpServletRequest();
request.setCookies(cookie);
MockHttpServletResponse response = new MockHttpServletResponse();
assertThat(this.services.autoLogin(request, response)).isNull();
Cookie returnedCookie = response.getCookie(AbstractRememberMeServices.SPRING_SECURITY_REMEMBER_ME_COOKIE_KEY);
assertThat(returnedCookie).isNotNull();
assertThat(returnedCookie.getMaxAge()).isZero();
}
@Test
public void autoLoginClearsCookieIfUserNotFound() {
udsWillThrowNotFound();
Cookie cookie = new Cookie(AbstractRememberMeServices.SPRING_SECURITY_REMEMBER_ME_COOKIE_KEY,
generateCorrectCookieContentForTokenNoAlgorithmName(System.currentTimeMillis() + 1000000, "someone",
"password", "key"));
MockHttpServletRequest request = new MockHttpServletRequest();
request.setCookies(cookie);
MockHttpServletResponse response = new MockHttpServletResponse();
assertThat(this.services.autoLogin(request, response)).isNull();
Cookie returnedCookie = response.getCookie(AbstractRememberMeServices.SPRING_SECURITY_REMEMBER_ME_COOKIE_KEY);
assertThat(returnedCookie).isNotNull();
assertThat(returnedCookie.getMaxAge()).isZero();
}
@Test
public void autoLoginClearsCookieIfUserServiceMisconfigured() {
udsWillReturnNull();
Cookie cookie = new Cookie(AbstractRememberMeServices.SPRING_SECURITY_REMEMBER_ME_COOKIE_KEY,
generateCorrectCookieContentForTokenNoAlgorithmName(System.currentTimeMillis() + 1000000, "someone",
"password", "key"));
MockHttpServletRequest request = new MockHttpServletRequest();
request.setCookies(cookie);
MockHttpServletResponse response = new MockHttpServletResponse();
assertThatIllegalArgumentException().isThrownBy(() -> this.services.autoLogin(request, response));
}
@Test
public void autoLoginWithValidTokenAndUserSucceeds() {
udsWillReturnUser();
Cookie cookie = new Cookie(AbstractRememberMeServices.SPRING_SECURITY_REMEMBER_ME_COOKIE_KEY,
generateCorrectCookieContentForTokenNoAlgorithmName(System.currentTimeMillis() + 1000000, "someone",
"password", "key"));
MockHttpServletRequest request = new MockHttpServletRequest();
request.setCookies(cookie);
MockHttpServletResponse response = new MockHttpServletResponse();
Authentication result = this.services.autoLogin(request, response);
assertThat(result).isNotNull();
assertThat(result.getPrincipal()).isEqualTo(this.user);
}
@Test
public void autoLoginWhenTokenNoAlgorithmAndDifferentMatchingAlgorithmThenReturnsNullAndClearCookie() {
udsWillReturnUser();
Cookie cookie = new Cookie(AbstractRememberMeServices.SPRING_SECURITY_REMEMBER_ME_COOKIE_KEY,
generateCorrectCookieContentForTokenNoAlgorithmName(System.currentTimeMillis() + 1000000, "someone",
"password", "key", RememberMeTokenAlgorithm.MD5));
MockHttpServletRequest request = new MockHttpServletRequest();
request.setCookies(cookie);
MockHttpServletResponse response = new MockHttpServletResponse();
this.services.setMatchingAlgorithm(RememberMeTokenAlgorithm.SHA256);
Authentication result = this.services.autoLogin(request, response);
Cookie returnedCookie = response.getCookie(AbstractRememberMeServices.SPRING_SECURITY_REMEMBER_ME_COOKIE_KEY);
assertThat(result).isNull();
assertThat(returnedCookie).isNotNull();
assertThat(returnedCookie.getMaxAge()).isZero();
}
@Test
public void autoLoginWhenTokenNoAlgorithmAndSameMatchingAlgorithmThenSucceeds() {
udsWillReturnUser();
Cookie cookie = new Cookie(AbstractRememberMeServices.SPRING_SECURITY_REMEMBER_ME_COOKIE_KEY,
generateCorrectCookieContentForTokenNoAlgorithmName(System.currentTimeMillis() + 1000000, "someone",
"password", "key", RememberMeTokenAlgorithm.SHA256));
MockHttpServletRequest request = new MockHttpServletRequest();
request.setCookies(cookie);
MockHttpServletResponse response = new MockHttpServletResponse();
this.services.setMatchingAlgorithm(RememberMeTokenAlgorithm.SHA256);
Authentication result = this.services.autoLogin(request, response);
assertThat(result).isNotNull();
assertThat(result.getPrincipal()).isEqualTo(this.user);
}
@Test
public void autoLoginWhenTokenHasAlgorithmAndSameMatchingAlgorithmThenUsesTokenAlgorithmAndSucceeds() {
udsWillReturnUser();
Cookie cookie = new Cookie(AbstractRememberMeServices.SPRING_SECURITY_REMEMBER_ME_COOKIE_KEY,
generateCorrectCookieContentForTokenWithAlgorithmName(System.currentTimeMillis() + 1000000, "someone",
"password", "key", RememberMeTokenAlgorithm.SHA256));
MockHttpServletRequest request = new MockHttpServletRequest();
request.setCookies(cookie);
MockHttpServletResponse response = new MockHttpServletResponse();
this.services.setMatchingAlgorithm(RememberMeTokenAlgorithm.SHA256);
Authentication result = this.services.autoLogin(request, response);
assertThat(result).isNotNull();
assertThat(result.getPrincipal()).isEqualTo(this.user);
}
@Test
public void autoLoginWhenTokenHasAlgorithmAndDifferentMatchingAlgorithmThenUsesTokenAlgorithmAndSucceeds() {
udsWillReturnUser();
Cookie cookie = new Cookie(AbstractRememberMeServices.SPRING_SECURITY_REMEMBER_ME_COOKIE_KEY,
generateCorrectCookieContentForTokenWithAlgorithmName(System.currentTimeMillis() + 1000000, "someone",
"password", "key", RememberMeTokenAlgorithm.SHA256));
MockHttpServletRequest request = new MockHttpServletRequest();
request.setCookies(cookie);
MockHttpServletResponse response = new MockHttpServletResponse();
this.services.setMatchingAlgorithm(RememberMeTokenAlgorithm.MD5);
Authentication result = this.services.autoLogin(request, response);
assertThat(result).isNotNull();
assertThat(result.getPrincipal()).isEqualTo(this.user);
}
@Test
public void testGettersSetters() {
assertThat(this.services.getUserDetailsService()).isEqualTo(this.uds);
assertThat(this.services.getKey()).isEqualTo("key");
assertThat(this.services.getParameter()).isEqualTo(AbstractRememberMeServices.DEFAULT_PARAMETER);
this.services.setParameter("some_param");
assertThat(this.services.getParameter()).isEqualTo("some_param");
this.services.setTokenValiditySeconds(12);
assertThat(this.services.getTokenValiditySeconds()).isEqualTo(12);
}
@Test
public void loginFailClearsCookie() {
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
this.services.loginFail(request, response);
Cookie cookie = response.getCookie(AbstractRememberMeServices.SPRING_SECURITY_REMEMBER_ME_COOKIE_KEY);
assertThat(cookie).isNotNull();
assertThat(cookie.getMaxAge()).isZero();
}
@Test
public void loginSuccessIgnoredIfParameterNotSetOrFalse() {
TokenBasedRememberMeServices services = new TokenBasedRememberMeServices("key",
new AbstractRememberMeServicesTests.MockUserDetailsService(null, false));
MockHttpServletRequest request = new MockHttpServletRequest();
request.addParameter(AbstractRememberMeServices.DEFAULT_PARAMETER, "false");
MockHttpServletResponse response = new MockHttpServletResponse();
services.loginSuccess(request, response, new TestingAuthenticationToken("someone", "password", "ROLE_ABC"));
Cookie cookie = response.getCookie(AbstractRememberMeServices.SPRING_SECURITY_REMEMBER_ME_COOKIE_KEY);
assertThat(cookie).isNull();
}
@Test
public void loginSuccessNormalWithNonUserDetailsBasedPrincipalSetsExpectedCookie() {
// SEC-822
this.services.setTokenValiditySeconds(500000000);
MockHttpServletRequest request = new MockHttpServletRequest();
request.addParameter(AbstractRememberMeServices.DEFAULT_PARAMETER, "true");
MockHttpServletResponse response = new MockHttpServletResponse();
this.services.loginSuccess(request, response,
new TestingAuthenticationToken("someone", "password", "ROLE_ABC"));
Cookie cookie = response.getCookie(AbstractRememberMeServices.SPRING_SECURITY_REMEMBER_ME_COOKIE_KEY);
String expiryTime = this.services.decodeCookie(cookie.getValue())[1];
long expectedExpiryTime = 1000L * 500000000;
expectedExpiryTime += System.currentTimeMillis();
assertThat(Long.parseLong(expiryTime) > expectedExpiryTime - 10000).isTrue();
assertThat(cookie).isNotNull();
assertThat(cookie.getMaxAge()).isEqualTo(this.services.getTokenValiditySeconds());
assertThat(CodecTestUtils.isBase64(cookie.getValue().getBytes())).isTrue();
assertThat(new Date().before(new Date(determineExpiryTimeFromBased64EncodedToken(cookie.getValue())))).isTrue();
}
@Test
public void loginSuccessNormalWithUserDetailsBasedPrincipalSetsExpectedCookie() {
MockHttpServletRequest request = new MockHttpServletRequest();
request.addParameter(AbstractRememberMeServices.DEFAULT_PARAMETER, "true");
MockHttpServletResponse response = new MockHttpServletResponse();
this.services.loginSuccess(request, response,
new TestingAuthenticationToken("someone", "password", "ROLE_ABC"));
Cookie cookie = response.getCookie(AbstractRememberMeServices.SPRING_SECURITY_REMEMBER_ME_COOKIE_KEY);
assertThat(cookie).isNotNull();
assertThat(cookie.getMaxAge()).isEqualTo(this.services.getTokenValiditySeconds());
assertThat(CodecTestUtils.isBase64(cookie.getValue().getBytes())).isTrue();
assertThat(new Date().before(new Date(determineExpiryTimeFromBased64EncodedToken(cookie.getValue())))).isTrue();
}
@Test
public void loginSuccessWhenDefaultEncodingAlgorithmThenContainsAlgorithmName() {
MockHttpServletRequest request = new MockHttpServletRequest();
request.addParameter(AbstractRememberMeServices.DEFAULT_PARAMETER, "true");
MockHttpServletResponse response = new MockHttpServletResponse();
this.services.loginSuccess(request, response,
new TestingAuthenticationToken("someone", "password", "ROLE_ABC"));
Cookie cookie = response.getCookie(AbstractRememberMeServices.SPRING_SECURITY_REMEMBER_ME_COOKIE_KEY);
assertThat(cookie).isNotNull();
assertThat(cookie.getMaxAge()).isEqualTo(this.services.getTokenValiditySeconds());
assertThat(CodecTestUtils.isBase64(cookie.getValue().getBytes())).isTrue();
assertThat(new Date().before(new Date(determineExpiryTimeFromBased64EncodedToken(cookie.getValue())))).isTrue();
assertThat("SHA256").isEqualTo(determineAlgorithmNameFromBase64EncodedToken(cookie.getValue()));
}
@Test
public void loginSuccessWhenCustomEncodingAlgorithmThenContainsAlgorithmName() {
MockHttpServletRequest request = new MockHttpServletRequest();
request.addParameter(AbstractRememberMeServices.DEFAULT_PARAMETER, "true");
MockHttpServletResponse response = new MockHttpServletResponse();
this.services = new TokenBasedRememberMeServices("key", this.uds, RememberMeTokenAlgorithm.SHA256);
this.services.loginSuccess(request, response,
new TestingAuthenticationToken("someone", "password", "ROLE_ABC"));
Cookie cookie = response.getCookie(AbstractRememberMeServices.SPRING_SECURITY_REMEMBER_ME_COOKIE_KEY);
assertThat(cookie).isNotNull();
assertThat(cookie.getMaxAge()).isEqualTo(this.services.getTokenValiditySeconds());
assertThat(CodecTestUtils.isBase64(cookie.getValue().getBytes())).isTrue();
assertThat(new Date().before(new Date(determineExpiryTimeFromBased64EncodedToken(cookie.getValue())))).isTrue();
assertThat("SHA256").isEqualTo(determineAlgorithmNameFromBase64EncodedToken(cookie.getValue()));
}
// SEC-933
@Test
public void obtainPasswordReturnsNullForTokenWithNullCredentials() {
TestingAuthenticationToken token = new TestingAuthenticationToken("username", null);
assertThat(this.services.retrievePassword(token)).isNull();
}
// SEC-949
@Test
public void negativeValidityPeriodIsSetOnCookieButExpiryTimeRemainsAtTwoWeeks() {
MockHttpServletRequest request = new MockHttpServletRequest();
request.addParameter(AbstractRememberMeServices.DEFAULT_PARAMETER, "true");
MockHttpServletResponse response = new MockHttpServletResponse();
this.services.setTokenValiditySeconds(-1);
this.services.loginSuccess(request, response,
new TestingAuthenticationToken("someone", "password", "ROLE_ABC"));
Cookie cookie = response.getCookie(AbstractRememberMeServices.SPRING_SECURITY_REMEMBER_ME_COOKIE_KEY);
assertThat(cookie).isNotNull();
// Check the expiry time is within 50ms of two weeks from current time
assertThat(determineExpiryTimeFromBased64EncodedToken(cookie.getValue())
- System.currentTimeMillis() > AbstractRememberMeServices.TWO_WEEKS_S - 50)
.isTrue();
assertThat(cookie.getMaxAge()).isEqualTo(-1);
assertThat(CodecTestUtils.isBase64(cookie.getValue().getBytes())).isTrue();
}
@Test
public void constructorWhenEncodingAlgorithmNullThenException() {
assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> new TokenBasedRememberMeServices("key", this.uds, null))
.withMessage("encodingAlgorithm cannot be null");
}
@Test
public void constructorWhenNoEncodingAlgorithmSpecifiedThenSha256() {
TokenBasedRememberMeServices rememberMeServices = new TokenBasedRememberMeServices("key", this.uds);
RememberMeTokenAlgorithm encodingAlgorithm = (RememberMeTokenAlgorithm) ReflectionTestUtils
.getField(rememberMeServices, "encodingAlgorithm");
assertThat(encodingAlgorithm).isSameAs(RememberMeTokenAlgorithm.SHA256);
}
}
| TokenBasedRememberMeServicesTests |
java | apache__avro | lang/java/avro/src/main/java/org/apache/avro/SchemaCompatibility.java | {
"start": 5221,
"end": 6404
} | class ____ {
private final Schema mReader;
private final Schema mWriter;
/**
* Initializes a new reader/writer pair.
*
* @param reader Reader schema.
* @param writer Writer schema.
*/
public ReaderWriter(final Schema reader, final Schema writer) {
mReader = reader;
mWriter = writer;
}
/** {@inheritDoc} */
@Override
public int hashCode() {
return System.identityHashCode(mReader) ^ System.identityHashCode(mWriter);
}
/** {@inheritDoc} */
@Override
public boolean equals(Object obj) {
if (!(obj instanceof ReaderWriter)) {
return false;
}
final ReaderWriter that = (ReaderWriter) obj;
// Use pointer comparison here:
return (this.mReader == that.mReader) && (this.mWriter == that.mWriter);
}
/** {@inheritDoc} */
@Override
public String toString() {
return String.format("ReaderWriter{reader:%s, writer:%s}", mReader, mWriter);
}
}
/**
* Determines the compatibility of a reader/writer schema pair.
*
* <p>
* Provides memoization to handle recursive schemas.
* </p>
*/
private static final | ReaderWriter |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/converted/converter/ExplicitJavaTypeDescriptorTest.java | {
"start": 7115,
"end": 8004
} | class ____ implements Serializable {
private String state;
public PseudoMutableState(String state) {
this.state = state;
}
public String getState() {
return state;
}
// mutable
public void setState(String state) {
// just a safety net - the idea is that the user is promising to not mutate the internal state
throw new UnsupportedOperationException( "illegal attempt to mutate state" );
}
@Override
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( o == null || getClass() != o.getClass() ) {
return false;
}
PseudoMutableState that = (PseudoMutableState) o;
return getState() != null ? getState().equals( that.getState() ) : that.getState() == null;
}
@Override
public int hashCode() {
return getState() != null ? getState().hashCode() : 0;
}
}
@Converter
public static | PseudoMutableState |
java | hibernate__hibernate-orm | hibernate-spatial/src/test/java/org/hibernate/spatial/testing/dialects/NativeSQLTemplates.java | {
"start": 2058,
"end": 4810
} | class ____ {
protected final Map<CommonSpatialFunction, String> sqls = new HashMap<>();
// Note that we alias the function invocation so that
// we can map the return value to the required type
public NativeSQLTemplates() {
sqls.put( ST_ASTEXT, "select id, st_astext(geom) as result from %s" );
sqls.put( ST_GEOMETRYTYPE, "select id, st_geometrytype(geom) as result from %s" );
sqls.put( ST_DIMENSION, "select id, st_dimension(geom) as result from %s" );
sqls.put( ST_ENVELOPE, "select id, st_envelope(geom) as result from %s" );
sqls.put( ST_SRID, "select id, st_srid(geom) as result from %s" );
sqls.put( ST_ASBINARY, "select id, st_asbinary(geom) as result from %s" );
sqls.put( ST_ISEMPTY, "select id, st_isempty(geom) as result from %s" );
sqls.put( ST_ISSIMPLE, "select id, st_issimple(geom) as result from %s" );
sqls.put( ST_BOUNDARY, "select id, st_boundary(geom) as result from %s" );
sqls.put( ST_OVERLAPS, "select id, st_overlaps(geom, st_geomfromtext(:filter, 4326)) as result from %s" );
sqls.put( ST_INTERSECTS, "select id, st_intersects(geom, st_geomfromtext(:filter, 4326)) as result from %s" );
sqls.put( ST_CROSSES, "select id, st_crosses(geom, st_geomfromtext(:filter, 4326)) as result from %s" );
sqls.put( ST_CONTAINS, "select id, st_contains(geom, st_geomfromtext(:filter, 4326)) as result from %s" );
sqls.put( ST_DISJOINT, "select id, st_disjoint(geom, st_geomfromtext(:filter, 4326)) as result from %s" );
sqls.put( ST_RELATE, "select id, st_relate(geom, st_geomfromtext(:filter, 4326)) as result from %s" );
sqls.put( ST_TOUCHES, "select id, st_touches(geom, st_geomfromtext(:filter, 4326)) as result from %s" );
sqls.put( ST_WITHIN, "select id, st_within(geom, st_geomfromtext(:filter, 4326)) as result from %s" );
sqls.put( ST_EQUALS, "select id, st_equals(geom, st_geomfromtext(:filter, 4326)) as result from %s" );
sqls.put( ST_DISTANCE, "select id, st_distance(geom, st_geomfromtext(:filter, 4326)) as result from %s" );
sqls.put( ST_BUFFER, "select id, st_buffer(geom, 2) as result from %s" );
sqls.put( ST_CONVEXHULL, "select id, st_convexhull(geom) as result from %s" );
sqls.put( ST_DIFFERENCE, "select id, st_difference(geom, st_geomfromtext(:filter, 4326)) as result from %s" );
sqls.put(
ST_INTERSECTION,
"select id, st_intersection(geom, st_geomfromtext(:filter, 4326)) as result from %s"
);
sqls.put(
ST_SYMDIFFERENCE,
"select id, st_symdifference(geom, st_geomfromtext(:filter, 4326)) as result from %s"
);
sqls.put( ST_UNION, "select id, st_union(geom, st_geomfromtext(:filter, 4326)) as result from %s" );
}
public Map<CommonSpatialFunction, String> all() {
return Collections.unmodifiableMap( this.sqls );
}
}
| NativeSQLTemplates |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/visitor/INodeCountVisitor.java | {
"start": 1276,
"end": 1448
} | interface ____ {
int getCount(INode inode);
}
public static Counts countTree(INode root) {
return new INodeCountVisitor().count(root);
}
private static | Counts |
java | dropwizard__dropwizard | dropwizard-logging/src/main/java/io/dropwizard/logging/common/TlsSocketAppenderFactory.java | {
"start": 3797,
"end": 11608
} | class ____<E extends DeferredProcessingAware> extends TcpSocketAppenderFactory<E> {
@Nullable
private String keyStorePath;
@Nullable
private String keyStorePassword;
@NotEmpty
private String keyStoreType = "JKS";
@Nullable
private String keyStoreProvider;
@Nullable
private String trustStorePath;
@Nullable
private String trustStorePassword;
@NotEmpty
private String trustStoreType = "JKS";
@Nullable
private String trustStoreProvider;
@Nullable
private String jceProvider;
@Nullable
private List<String> supportedProtocols;
@Nullable
private List<String> excludedProtocols;
@Nullable
private List<String> supportedCipherSuites;
@Nullable
private List<String> excludedCipherSuites;
private boolean validateCerts;
private boolean validatePeers;
@JsonProperty
public boolean isValidatePeers() {
return validatePeers;
}
@JsonProperty
public void setValidatePeers(boolean validatePeers) {
this.validatePeers = validatePeers;
}
@JsonProperty
public boolean isValidateCerts() {
return validateCerts;
}
@JsonProperty
public void setValidateCerts(boolean validateCerts) {
this.validateCerts = validateCerts;
}
@JsonProperty
@Nullable
public List<String> getExcludedCipherSuites() {
return excludedCipherSuites;
}
@JsonProperty
public void setExcludedCipherSuites(List<String> excludedCipherSuites) {
this.excludedCipherSuites = excludedCipherSuites;
}
@JsonProperty
@Nullable
public List<String> getSupportedCipherSuites() {
return supportedCipherSuites;
}
@JsonProperty
public void setSupportedCipherSuites(List<String> supportedCipherSuites) {
this.supportedCipherSuites = supportedCipherSuites;
}
@JsonProperty
@Nullable
public List<String> getExcludedProtocols() {
return excludedProtocols;
}
@JsonProperty
public void setExcludedProtocols(List<String> excludedProtocols) {
this.excludedProtocols = excludedProtocols;
}
@JsonProperty
@Nullable
public List<String> getSupportedProtocols() {
return supportedProtocols;
}
@JsonProperty
public void setSupportedProtocols(List<String> supportedProtocols) {
this.supportedProtocols = supportedProtocols;
}
@JsonProperty
@Nullable
public String getTrustStoreProvider() {
return trustStoreProvider;
}
@JsonProperty
public void setTrustStoreProvider(String trustStoreProvider) {
this.trustStoreProvider = trustStoreProvider;
}
@JsonProperty
@Nullable
public String getTrustStoreType() {
return trustStoreType;
}
@JsonProperty
public void setTrustStoreType(String trustStoreType) {
this.trustStoreType = trustStoreType;
}
@JsonProperty
@Nullable
public String getTrustStorePassword() {
return trustStorePassword;
}
@JsonProperty
public void setTrustStorePassword(String trustStorePassword) {
this.trustStorePassword = trustStorePassword;
}
@JsonProperty
@Nullable
public String getTrustStorePath() {
return trustStorePath;
}
@JsonProperty
public void setTrustStorePath(String trustStorePath) {
this.trustStorePath = trustStorePath;
}
@JsonProperty
@Nullable
public String getKeyStoreProvider() {
return keyStoreProvider;
}
@JsonProperty
public void setKeyStoreProvider(String keyStoreProvider) {
this.keyStoreProvider = keyStoreProvider;
}
@JsonProperty
@Nullable
public String getKeyStoreType() {
return keyStoreType;
}
@JsonProperty
public void setKeyStoreType(String keyStoreType) {
this.keyStoreType = keyStoreType;
}
@JsonProperty
@Nullable
public String getKeyStorePassword() {
return keyStorePassword;
}
@JsonProperty
public void setKeyStorePassword(String keyStorePassword) {
this.keyStorePassword = keyStorePassword;
}
@JsonProperty
@Nullable
public String getKeyStorePath() {
return keyStorePath;
}
@JsonProperty
public void setKeyStorePath(String keyStorePath) {
this.keyStorePath = keyStorePath;
}
@JsonProperty
@Nullable
public String getJceProvider() {
return jceProvider;
}
@JsonProperty
public void setJceProvider(String jceProvider) {
this.jceProvider = jceProvider;
}
private SslContextFactory createSslContextFactory() {
SslContextFactory factory = new SslContextFactory.Server();
if (keyStorePath != null) {
factory.setKeyStorePath(keyStorePath);
}
factory.setKeyStoreType(keyStoreType);
if (keyStorePassword != null) {
factory.setKeyStorePassword(keyStorePassword);
}
if (keyStoreProvider != null) {
factory.setKeyStoreProvider(keyStoreProvider);
}
if (trustStorePath != null) {
factory.setTrustStorePath(trustStorePath);
}
if (trustStorePassword != null) {
factory.setTrustStorePassword(trustStorePassword);
}
factory.setTrustStoreType(trustStoreType);
if (trustStoreProvider != null) {
factory.setTrustStoreProvider(trustStoreProvider);
}
factory.setValidateCerts(validateCerts);
factory.setValidatePeerCerts(validatePeers);
if (supportedProtocols != null) {
factory.setIncludeProtocols(supportedProtocols.toArray(new String[0]));
}
if (excludedProtocols != null) {
factory.setExcludeProtocols(excludedProtocols.toArray(new String[0]));
}
if (supportedCipherSuites != null) {
factory.setIncludeCipherSuites(supportedCipherSuites.toArray(new String[0]));
}
if (excludedCipherSuites != null) {
factory.setExcludeCipherSuites(excludedCipherSuites.toArray(new String[0]));
}
if (jceProvider != null) {
factory.setProvider(jceProvider);
}
return factory;
}
@Override
protected SocketFactory socketFactory() {
final SslContextFactory sslContextFactory = createSslContextFactory();
try {
sslContextFactory.start();
} catch (Exception e) {
throw new IllegalStateException("Unable to configure SSLContext", e);
}
// We use an adapter over the `newSslSocket` call of Jetty's `SslContextFactory`, because it provides more
// advanced socket configuration than Java's `SSLSocketFactory`.
return new SocketFactory() {
@Override
public Socket createSocket() throws IOException {
return sslContextFactory.newSslSocket();
}
@Override
public Socket createSocket(String host, int port) {
return unsupported();
}
@Override
public Socket createSocket(String host, int port, InetAddress localHost, int localPort) {
return unsupported();
}
@Override
public Socket createSocket(InetAddress host, int port) {
return unsupported();
}
@Override
public Socket createSocket(InetAddress address, int port, InetAddress localAddress, int localPort) {
return unsupported();
}
private Socket unsupported() {
throw new UnsupportedOperationException("Only createSocket is supported");
}
};
}
}
| TlsSocketAppenderFactory |
java | quarkusio__quarkus | extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/HotDeploymentConfigBuildStep.java | {
"start": 592,
"end": 2134
} | class ____ {
private static final DotName STARTUP_EVENT_NAME = DotName.createSimple(StartupEvent.class.getName());
@BuildStep
HotDeploymentWatchedFileBuildItem configFile() {
return new HotDeploymentWatchedFileBuildItem("META-INF/beans.xml");
}
@BuildStep
DisableInstrumentationForIndexPredicateBuildItem startup() {
return new DisableInstrumentationForIndexPredicateBuildItem(new Predicate<Index>() {
@Override
public boolean test(Index index) {
if (!index.getAnnotations(StartupBuildSteps.STARTUP_NAME).isEmpty()) {
return true;
}
List<AnnotationInstance> observesInstances = index.getAnnotations(DotNames.OBSERVES);
if (!observesInstances.isEmpty()) {
for (AnnotationInstance observesInstance : observesInstances) {
if (observesInstance.target().kind() == AnnotationTarget.Kind.METHOD_PARAMETER) {
MethodParameterInfo methodParameterInfo = observesInstance.target().asMethodParameter();
short paramPos = methodParameterInfo.position();
if (STARTUP_EVENT_NAME.equals(methodParameterInfo.method().parameterType(paramPos).name())) {
return true;
}
}
}
}
return false;
}
});
}
}
| HotDeploymentConfigBuildStep |
java | elastic__elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/GetInferenceDiagnosticsActionResponseTests.java | {
"start": 934,
"end": 4601
} | class ____ extends AbstractBWCWireSerializationTestCase<
GetInferenceDiagnosticsAction.Response> {
public static GetInferenceDiagnosticsAction.Response createRandom() {
List<GetInferenceDiagnosticsAction.NodeResponse> responses = randomList(
2,
10,
GetInferenceDiagnosticsActionNodeResponseTests::createRandom
);
return new GetInferenceDiagnosticsAction.Response(ClusterName.DEFAULT, responses, List.of());
}
public void testToXContent() throws IOException {
var node = DiscoveryNodeUtils.create("id");
var externalPoolStats = new PoolStats(1, 2, 3, 4);
var eisPoolStats = new PoolStats(5, 6, 7, 8);
var entity = new GetInferenceDiagnosticsAction.Response(
ClusterName.DEFAULT,
List.of(
new GetInferenceDiagnosticsAction.NodeResponse(
node,
externalPoolStats,
eisPoolStats,
new GetInferenceDiagnosticsAction.NodeResponse.Stats(5, 6, 7, 8)
)
),
List.of()
);
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
entity.toXContent(builder, null);
String xContentResult = org.elasticsearch.common.Strings.toString(builder);
assertThat(xContentResult, is(XContentHelper.stripWhitespace("""
{
"id":{
"external": {
"connection_pool_stats":{
"leased_connections":1,
"pending_connections":2,
"available_connections":3,
"max_connections":4
}
},
"eis_mtls": {
"connection_pool_stats":{
"leased_connections":5,
"pending_connections":6,
"available_connections":7,
"max_connections":8
}
},
"inference_endpoint_registry":{
"cache_count": 5,
"cache_hits": 6,
"cache_misses": 7,
"cache_evictions": 8
}
}
}""")));
}
@Override
protected Writeable.Reader<GetInferenceDiagnosticsAction.Response> instanceReader() {
return GetInferenceDiagnosticsAction.Response::new;
}
@Override
protected GetInferenceDiagnosticsAction.Response createTestInstance() {
return createRandom();
}
@Override
protected GetInferenceDiagnosticsAction.Response mutateInstance(GetInferenceDiagnosticsAction.Response instance) {
return new GetInferenceDiagnosticsAction.Response(
ClusterName.DEFAULT,
instance.getNodes().subList(1, instance.getNodes().size()),
List.of()
);
}
@Override
protected GetInferenceDiagnosticsAction.Response mutateInstanceForVersion(
GetInferenceDiagnosticsAction.Response instance,
TransportVersion version
) {
return new GetInferenceDiagnosticsAction.Response(
instance.getClusterName(),
instance.getNodes()
.stream()
.map(nodeResponse -> GetInferenceDiagnosticsActionNodeResponseTests.mutateNodeResponseForVersion(nodeResponse, version))
.toList(),
instance.failures()
);
}
}
| GetInferenceDiagnosticsActionResponseTests |
java | elastic__elasticsearch | x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/support/WatcherIndexTemplateRegistryTests.java | {
"start": 3555,
"end": 19650
} | class ____ extends ESTestCase {
private WatcherIndexTemplateRegistry registry;
private NamedXContentRegistry xContentRegistry;
private ClusterService clusterService;
private ThreadPool threadPool;
private Client client;
private ProjectClient projectClient;
@SuppressWarnings("unchecked")
@Before
public void createRegistryAndClient() {
threadPool = mock(ThreadPool.class);
when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY));
when(threadPool.generic()).thenReturn(EsExecutors.DIRECT_EXECUTOR_SERVICE);
projectClient = mock(ProjectClient.class);
client = mock(Client.class);
when(client.threadPool()).thenReturn(threadPool);
when(client.projectClient(any())).thenReturn(projectClient);
clusterService = mock(ClusterService.class);
when(clusterService.getSettings()).thenReturn(Settings.EMPTY);
List<NamedXContentRegistry.Entry> entries = new ArrayList<>(ClusterModule.getNamedXWriteables());
entries.addAll(
Arrays.asList(
new NamedXContentRegistry.Entry(
LifecycleType.class,
new ParseField(TimeseriesLifecycleType.TYPE),
(p) -> TimeseriesLifecycleType.INSTANCE
),
new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(DeleteAction.NAME), DeleteAction::parse)
)
);
xContentRegistry = new NamedXContentRegistry(entries);
registry = new WatcherIndexTemplateRegistry(Settings.EMPTY, clusterService, threadPool, client, xContentRegistry);
}
public void testThatNonExistingTemplatesAreAddedImmediately() {
DiscoveryNode node = DiscoveryNodeUtils.create("node");
DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").masterNodeId("node").add(node).build();
ClusterChangedEvent event = createClusterChangedEvent(Collections.emptyMap(), nodes);
registry.clusterChanged(event);
ArgumentCaptor<TransportPutComposableIndexTemplateAction.Request> argumentCaptor = ArgumentCaptor.forClass(
TransportPutComposableIndexTemplateAction.Request.class
);
verify(projectClient, times(1)).execute(same(TransportPutComposableIndexTemplateAction.TYPE), argumentCaptor.capture(), any());
ClusterChangedEvent newEvent = addTemplateToState(event);
registry.clusterChanged(newEvent);
argumentCaptor = ArgumentCaptor.forClass(TransportPutComposableIndexTemplateAction.Request.class);
verify(projectClient, times(1)).execute(same(TransportPutComposableIndexTemplateAction.TYPE), argumentCaptor.capture(), any());
TransportPutComposableIndexTemplateAction.Request req = argumentCaptor.getAllValues()
.stream()
.filter(r -> r.name().equals(WatcherIndexTemplateRegistryField.HISTORY_TEMPLATE_NAME))
.findFirst()
.orElseThrow(() -> new AssertionError("expected the watch history template to be put"));
assertThat(req.indexTemplate().template().settings().get("index.lifecycle.name"), equalTo("watch-history-ilm-policy-16"));
}
public void testThatNonExistingTemplatesAreAddedEvenWithILMUsageDisabled() {
DiscoveryNode node = DiscoveryNodeUtils.create("node");
DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").masterNodeId("node").add(node).build();
registry = new WatcherIndexTemplateRegistry(
Settings.builder().put(Watcher.USE_ILM_INDEX_MANAGEMENT.getKey(), false).build(),
clusterService,
threadPool,
client,
xContentRegistry
);
ClusterChangedEvent event = createClusterChangedEvent(Collections.emptyMap(), Collections.emptyMap(), nodes);
registry.clusterChanged(event);
ArgumentCaptor<TransportPutComposableIndexTemplateAction.Request> argumentCaptor = ArgumentCaptor.forClass(
TransportPutComposableIndexTemplateAction.Request.class
);
verify(projectClient, times(1)).execute(same(TransportPutComposableIndexTemplateAction.TYPE), argumentCaptor.capture(), any());
// now delete one template from the cluster state and lets retry
ClusterChangedEvent newEvent = addTemplateToState(event);
registry.clusterChanged(newEvent);
ArgumentCaptor<PutIndexTemplateRequest> captor = ArgumentCaptor.forClass(PutIndexTemplateRequest.class);
verify(projectClient, times(1)).execute(same(TransportPutComposableIndexTemplateAction.TYPE), argumentCaptor.capture(), any());
captor.getAllValues().forEach(req -> assertNull(req.settings().get("index.lifecycle.name")));
verify(projectClient, times(0)).execute(eq(ILMActions.PUT), any(), any());
}
public void testThatNonExistingPoliciesAreAddedImmediately() {
DiscoveryNode node = DiscoveryNodeUtils.create("node");
DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").masterNodeId("node").add(node).build();
ClusterChangedEvent event = createClusterChangedEvent(Collections.emptyMap(), nodes);
registry.clusterChanged(event);
verify(projectClient, times(1)).execute(eq(ILMActions.PUT), any(), any());
}
public void testPolicyAlreadyExists() {
DiscoveryNode node = DiscoveryNodeUtils.create("node");
DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").masterNodeId("node").add(node).build();
Map<String, LifecyclePolicy> policyMap = new HashMap<>();
List<LifecyclePolicy> policies = registry.getLifecyclePolicies();
assertThat(policies, hasSize(1));
LifecyclePolicy policy = policies.get(0);
policyMap.put(policy.getName(), policy);
ClusterChangedEvent event = createClusterChangedEvent(Collections.emptyMap(), policyMap, nodes);
registry.clusterChanged(event);
verify(projectClient, times(0)).execute(eq(ILMActions.PUT), any(), any());
}
public void testNoPolicyButILMDisabled() {
DiscoveryNode node = DiscoveryNodeUtils.create("node");
DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").masterNodeId("node").add(node).build();
registry = new WatcherIndexTemplateRegistry(
Settings.builder().put(Watcher.USE_ILM_INDEX_MANAGEMENT.getKey(), false).build(),
clusterService,
threadPool,
client,
xContentRegistry
);
ClusterChangedEvent event = createClusterChangedEvent(Collections.emptyMap(), Collections.emptyMap(), nodes);
registry.clusterChanged(event);
verify(projectClient, times(0)).execute(eq(ILMActions.PUT), any(), any());
}
public void testPolicyAlreadyExistsButDiffers() throws IOException {
DiscoveryNode node = DiscoveryNodeUtils.create("node");
DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").masterNodeId("node").add(node).build();
Map<String, LifecyclePolicy> policyMap = new HashMap<>();
String policyStr = "{\"phases\":{\"delete\":{\"min_age\":\"1m\",\"actions\":{\"delete\":{}}}}}";
List<LifecyclePolicy> policies = registry.getLifecyclePolicies();
assertThat(policies, hasSize(1));
LifecyclePolicy policy = policies.get(0);
try (
XContentParser parser = XContentType.JSON.xContent()
.createParser(XContentParserConfiguration.EMPTY.withRegistry(xContentRegistry), policyStr)
) {
LifecyclePolicy different = LifecyclePolicy.parse(parser, policy.getName());
policyMap.put(policy.getName(), different);
ClusterChangedEvent event = createClusterChangedEvent(Collections.emptyMap(), policyMap, nodes);
registry.clusterChanged(event);
verify(projectClient, times(0)).execute(eq(ILMActions.PUT), any(), any());
}
}
public void testThatTemplatesExist() {
{
Map<String, Integer> existingTemplates = new HashMap<>();
existingTemplates.put(".watch-history", INDEX_TEMPLATE_VERSION);
assertThat(WatcherIndexTemplateRegistry.validate(createClusterState(existingTemplates)), is(false));
}
{
Map<String, Integer> existingTemplates = new HashMap<>();
existingTemplates.put(".watch-history", INDEX_TEMPLATE_VERSION);
existingTemplates.put(".triggered_watches", INDEX_TEMPLATE_VERSION);
existingTemplates.put(".watches", INDEX_TEMPLATE_VERSION);
assertThat(WatcherIndexTemplateRegistry.validate(createClusterState(existingTemplates)), is(false));
}
{
Map<String, Integer> existingTemplates = new HashMap<>();
existingTemplates.put(WatcherIndexTemplateRegistryField.HISTORY_TEMPLATE_NAME, INDEX_TEMPLATE_VERSION);
existingTemplates.put(".triggered_watches", INDEX_TEMPLATE_VERSION);
existingTemplates.put(".watches", INDEX_TEMPLATE_VERSION);
assertThat(WatcherIndexTemplateRegistry.validate(createClusterState(existingTemplates)), is(true));
}
{
Map<String, Integer> existingTemplates = new HashMap<>();
existingTemplates.put(".watch-history-11", 11);
existingTemplates.put(".triggered_watches", 11);
existingTemplates.put(".watches", 11);
assertThat(WatcherIndexTemplateRegistry.validate(createClusterState(existingTemplates)), is(false));
}
{
Map<String, Integer> existingTemplates = new HashMap<>();
existingTemplates.put(".watch-history-15", 15);
existingTemplates.put(".triggered_watches", 15);
existingTemplates.put(".watches", 15);
assertThat(WatcherIndexTemplateRegistry.validate(createClusterState(existingTemplates)), is(true));
}
{
Map<String, Integer> existingTemplates = new HashMap<>();
existingTemplates.put(WatcherIndexTemplateRegistryField.HISTORY_TEMPLATE_NAME, INDEX_TEMPLATE_VERSION);
existingTemplates.put(".triggered_watches", INDEX_TEMPLATE_VERSION);
existingTemplates.put(".watches", INDEX_TEMPLATE_VERSION);
existingTemplates.put("whatever", null);
existingTemplates.put("else", null);
assertThat(WatcherIndexTemplateRegistry.validate(createClusterState(existingTemplates)), is(true));
}
}
public void testThatTemplatesAreNotAppliedOnSameVersionNodes() {
DiscoveryNode localNode = DiscoveryNodeUtils.create("node");
DiscoveryNode masterNode = DiscoveryNodeUtils.create("master");
DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").masterNodeId("master").add(localNode).add(masterNode).build();
Map<String, Integer> existingTemplates = new HashMap<>();
existingTemplates.put(WatcherIndexTemplateRegistryField.HISTORY_TEMPLATE_NAME, INDEX_TEMPLATE_VERSION);
ClusterChangedEvent event = createClusterChangedEvent(existingTemplates, nodes);
registry.clusterChanged(event);
verifyNoMoreInteractions(client);
}
public void testThatMissingMasterNodeDoesNothing() {
DiscoveryNode localNode = DiscoveryNodeUtils.create("node");
DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").add(localNode).build();
Map<String, Integer> existingTemplates = new HashMap<>();
existingTemplates.put(WatcherIndexTemplateRegistryField.HISTORY_TEMPLATE_NAME, INDEX_TEMPLATE_VERSION);
ClusterChangedEvent event = createClusterChangedEvent(existingTemplates, nodes);
registry.clusterChanged(event);
verifyNoMoreInteractions(client);
}
private ClusterChangedEvent createClusterChangedEvent(Map<String, Integer> existingTemplateNames, DiscoveryNodes nodes) {
return createClusterChangedEvent(existingTemplateNames, Collections.emptyMap(), nodes);
}
private ClusterState createClusterState(
Map<String, Integer> existingTemplates,
Map<String, LifecyclePolicy> existingPolicies,
DiscoveryNodes nodes
) {
Map<String, ComposableIndexTemplate> indexTemplates = new HashMap<>();
for (Map.Entry<String, Integer> template : existingTemplates.entrySet()) {
final ComposableIndexTemplate mockTemplate = mock(ComposableIndexTemplate.class);
when(mockTemplate.version()).thenReturn((long) template.getValue());
indexTemplates.put(template.getKey(), mockTemplate);
}
Map<String, LifecyclePolicyMetadata> existingILMMeta = existingPolicies.entrySet()
.stream()
.collect(Collectors.toMap(Map.Entry::getKey, e -> new LifecyclePolicyMetadata(e.getValue(), Collections.emptyMap(), 1, 1)));
IndexLifecycleMetadata ilmMeta = new IndexLifecycleMetadata(existingILMMeta, OperationMode.RUNNING);
final var project = ProjectMetadata.builder(randomProjectIdOrDefault())
.indexTemplates(indexTemplates)
.putCustom(IndexLifecycleMetadata.TYPE, ilmMeta)
.build();
return ClusterState.builder(new ClusterName("test"))
// We need to ensure only one project is present in the cluster state to simplify the assertions in these tests.
.metadata(Metadata.builder().projectMetadata(Map.of(project.id(), project)).build())
.blocks(new ClusterBlocks.Builder().build())
.nodes(nodes)
.build();
}
private ClusterChangedEvent createClusterChangedEvent(
Map<String, Integer> existingTemplates,
Map<String, LifecyclePolicy> existingPolicies,
DiscoveryNodes nodes
) {
ClusterState cs = createClusterState(existingTemplates, existingPolicies, nodes);
ClusterChangedEvent realEvent = new ClusterChangedEvent(
"created-from-test",
cs,
ClusterState.builder(new ClusterName("test")).build()
);
ClusterChangedEvent event = spy(realEvent);
when(event.localNodeMaster()).thenReturn(nodes.isLocalNodeElectedMaster());
return event;
}
private ClusterChangedEvent addTemplateToState(ClusterChangedEvent previousEvent) {
final ComposableIndexTemplate mockTemplate = mock(ComposableIndexTemplate.class);
when(mockTemplate.version()).thenReturn((long) INDEX_TEMPLATE_VERSION);
ProjectMetadata newProject = ProjectMetadata.builder(previousEvent.state().metadata().projects().values().iterator().next())
.put(WatcherIndexTemplateRegistryField.HISTORY_TEMPLATE_NAME, mockTemplate)
.build();
ClusterState newState = ClusterState.builder(previousEvent.state()).putProjectMetadata(newProject).build();
return new ClusterChangedEvent("created-from-test", newState, previousEvent.state());
}
private ClusterState createClusterState(Map<String, Integer> existingTemplates) {
Metadata.Builder metadataBuilder = Metadata.builder();
HashMap<String, ComposableIndexTemplate> templates = new HashMap<>();
for (Map.Entry<String, Integer> template : existingTemplates.entrySet()) {
ComposableIndexTemplate indexTemplate = mock(ComposableIndexTemplate.class);
when(indexTemplate.version()).thenReturn(template.getValue() == null ? null : (long) template.getValue());
when(indexTemplate.indexPatterns()).thenReturn(Arrays.asList(generateRandomStringArray(10, 100, false, false)));
templates.put(template.getKey(), indexTemplate);
}
@NotMultiProjectCapable(description = "Watcher is not available in serverless")
final var projectId = ProjectId.DEFAULT;
metadataBuilder.put(ProjectMetadata.builder(projectId).indexTemplates(templates));
return ClusterState.builder(new ClusterName("foo")).metadata(metadataBuilder.build()).build();
}
private static | WatcherIndexTemplateRegistryTests |
java | google__gson | gson/src/test/java/com/google/gson/functional/JsonAdapterAnnotationOnClassesTest.java | {
"start": 15664,
"end": 15797
} | class ____ {
String f;
WithDelayedDelegatingFactory(String f) {
this.f = f;
}
static | WithDelayedDelegatingFactory |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/metamodel/mapping/internal/ToOneAttributeMapping.java | {
"start": 41818,
"end": 41950
} | class ____{
@OneToOne(mappedBy = "identicallyNamedAssociation", fetch = FetchType.EAGER)
private EntityB b;
}
| EntityA |
java | eclipse-vertx__vert.x | vertx-core/src/test/java/io/vertx/it/metrics/ServiceLoaderTest.java | {
"start": 798,
"end": 2021
} | class ____ {
@Test
public void testMetricsFromServiceLoader() {
testMetricsFromServiceLoader(true);
}
@Test
public void testMetricsFromServiceLoaderDisabled() {
testMetricsFromServiceLoader(false);
}
private void testMetricsFromServiceLoader(boolean enabled) {
MetricsOptions metricsOptions = new MetricsOptions().setEnabled(enabled);
VertxOptions options = new VertxOptions().setMetricsOptions(metricsOptions);
Vertx vertx = Vertx.vertx(options);
VertxMetrics metrics = ((VertxInternal) vertx).metrics();
if (enabled) {
assertNotNull(metrics);
assertTrue(metrics instanceof FakeVertxMetrics);
assertEquals(metricsOptions.isEnabled(), ((FakeVertxMetrics)metrics).options().isEnabled());
} else {
assertNull(metrics);
}
}
@Test
public void testSetMetricsInstanceTakesPrecedenceOverServiceLoader() {
VertxMetrics metrics = new VertxMetrics() {
};
VertxBuilder builder = Vertx.builder()
.with(new VertxOptions().setMetricsOptions(new MetricsOptions().setEnabled(true)))
.withMetrics(options -> metrics);
Vertx vertx = builder.build();
assertSame(metrics, ((VertxInternal) vertx).metrics());
}
}
| ServiceLoaderTest |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/NoAllocationCheckerTest.java | {
"start": 875,
"end": 1440
} | class ____ {
private final CompilationTestHelper compilationHelper =
CompilationTestHelper.newInstance(NoAllocationChecker.class, getClass());
@Test
public void positiveCase() {
compilationHelper
.addSourceLines(
"NoAllocationCheckerPositiveCases.java",
"""
package com.google.errorprone.bugpatterns.testdata;
import com.google.errorprone.annotations.NoAllocation;
/**
* @author agoode@google.com (Adam Goode)
*/
public | NoAllocationCheckerTest |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.