language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | redisson__redisson | redisson/src/main/java/org/redisson/api/redisnode/RedisClusterNodeAsync.java | {
"start": 884,
"end": 3716
} | interface ____ extends RedisNodeAsync {
/**
* Returns cluster information reported by this Redis node
*
* @return cluster information
*/
RFuture<Map<String, String>> clusterInfoAsync();
/**
* Returns id of this Redis node
*
* @return Redis node Id
*/
RFuture<String> clusterIdAsync();
/**
* Adds slots to this Redis node
*
* @param slots slots to add
* @return void
*/
RFuture<Void> clusterAddSlotsAsync(int... slots);
/**
* Reconfigures this Redis node as replica of Redis node by defined id.
*
* @param nodeId Redis node Id
* @return void
*/
RFuture<Void> clusterReplicateAsync(String nodeId);
/**
* Removes Redis node by defined id from Cluster
*
* @param nodeId
* @return void
*/
RFuture<Void> clusterForgetAsync(String nodeId);
/**
* Removes slots from this Redis node
*
* @param slots slots to remove
* @return void
*/
RFuture<Void> clusterDeleteSlotsAsync(int... slots);
/**
* Counts keys in defined slot
*
* @param slot slot
* @return keys amount
*/
RFuture<Long> clusterCountKeysInSlotAsync(int slot);
/**
* Returns keys in defines slot limited by count
*
* @param slot slot
* @param count limits keys amount
* @return keys
*/
RFuture<List<String>> clusterGetKeysInSlotAsync(int slot, int count);
/**
* Sets slot to this Redis node according to defined command
*
* @param slot slot
* @param command slot command
* @return void
*/
RFuture<Void> clusterSetSlotAsync(int slot, SetSlotCommand command);
/**
* Sets slot to this Redis node according to defined command
*
* @param slot slot
* @param command slot command
* @param nodeId Redis node id
* @return void
*/
RFuture<Void> clusterSetSlotAsync(int slot, SetSlotCommand command, String nodeId);
/**
* Joins Redis node by the defined address to Cluster
* <p>
* Address example: <code>redis://127.0.0.1:9233</code>
*
* @param address Redis node address
* @return void
*/
RFuture<Void> clusterMeetAsync(String address);
/**
* Returns number of failure reports for Redis node by defined id
*
* @param nodeId Redis node id
* @return amount of failure reports
*/
RFuture<Long> clusterCountFailureReportsAsync(String nodeId);
/**
* Removes all slots from this Redis node
* @return void
*/
RFuture<Void> clusterFlushSlotsAsync();
/**
* Return Redis Cluster slots mapped to Redis nodes
*
* @return slots mapping
*/
RFuture<Map<ClusterSlotRange, Set<String>>> clusterSlotsAsync();
}
| RedisClusterNodeAsync |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/mysql/select/MySqlSelectTest_2.java | {
"start": 1020,
"end": 2374
} | class ____ extends MysqlTest {
public void test_0() throws Exception {
String sql = "SELECT college, region, seed FROM tournament ORDER BY 2, 3;";
MySqlStatementParser parser = new MySqlStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement statemen = statementList.get(0);
// print(statementList);
assertEquals(1, statementList.size());
MySqlSchemaStatVisitor visitor = new MySqlSchemaStatVisitor();
statemen.accept(visitor);
// System.out.println("Tables : " + visitor.getTables());
// System.out.println("fields : " + visitor.getColumns());
// System.out.println("coditions : " + visitor.getConditions());
// System.out.println("orderBy : " + visitor.getOrderByColumns());
assertEquals(1, visitor.getTables().size());
assertEquals(3, visitor.getColumns().size());
assertEquals(0, visitor.getConditions().size());
assertTrue(visitor.getTables().containsKey(new TableStat.Name("tournament")));
assertTrue(visitor.getColumns().contains(new Column("tournament", "college")));
assertTrue(visitor.getColumns().contains(new Column("tournament", "region")));
assertTrue(visitor.getColumns().contains(new Column("tournament", "seed")));
}
}
| MySqlSelectTest_2 |
java | reactor__reactor-core | reactor-core/src/jcstress/java/reactor/core/publisher/FluxSwitchOnFirstConditionalStressTest.java | {
"start": 2416,
"end": 3712
} | class ____
extends FluxSwitchOnFirstConditionalStressTest {
@Override
Flux<String> switchOnFirst(Signal<? extends String> signal,
Flux<String> inbound) {
return new Flux<String>() {
@Override
public void subscribe(CoreSubscriber<? super String> actual) {
inbound.subscribe(inboundSubscriber);
inboundSubscriber.request(1);
outboundSubscription.subscribe(actual);
}
};
}
@Actor
public void next() {
main.tryOnNext("test");
}
@Actor
public void request() {
outboundSubscriber.request(1);
}
@Arbiter
public void arbiter(LLLLL_Result result) {
result.r1 = outboundSubscription.requestsCount;
result.r2 = outboundSubscription.requested;
result.r3 = inboundSubscription.requestsCount;
result.r4 = inboundSubscription.requested;
result.r5 = inboundSubscriber.onNextCalls;
}
}
// Ignore, flaky test (https://github.com/reactor/reactor-core/issues/3627)
//@JCStressTest
@Outcome(id = {
"1, 2, 0, 1"}, expect = ACCEPTABLE, desc = "inbound next with error happens first")
@Outcome(id = {
"1, 0, 0, 1"}, expect = ACCEPTABLE, desc = "cancellation happened first")
@Outcome(id = {"1, 3, 0, 1"}, expect = ACCEPTABLE, desc = "cancellation in between")
@State
public static | OutboundOnSubscribeAndRequestStressTest |
java | elastic__elasticsearch | libs/ssl-config/src/main/java/org/elasticsearch/common/ssl/DerParser.java | {
"start": 1139,
"end": 1281
} | class ____ {
// Constructed Flag
private static final int CONSTRUCTED = 0x20;
// Tag and data types
public static final | DerParser |
java | eclipse-vertx__vert.x | vertx-core/src/main/java/io/vertx/core/file/OpenOptions.java | {
"start": 771,
"end": 8726
} | class ____ {
public static final String DEFAULT_PERMS = null;
public static final boolean DEFAULT_READ = true;
public static final boolean DEFAULT_WRITE = true;
public static final boolean DEFAULT_CREATE = true;
public static final boolean DEFAULT_CREATENEW = false;
public static final boolean DEFAULT_DSYNC = false;
public static final boolean DEFAULT_SYNC = false;
public static final boolean DEFAULT_DELETEONCLOSE = false;
public static final boolean DEFAULT_TRUNCATEEXISTING = false;
public static final boolean DEFAULT_SPARSE = false;
/**
* Whether the file should be opened in append mode by default = false.
*/
public static final boolean DEFAULT_APPEND = false;
private String perms = DEFAULT_PERMS;
private boolean read = DEFAULT_READ;
private boolean write = DEFAULT_WRITE;
private boolean create = DEFAULT_CREATE;
private boolean createNew = DEFAULT_CREATENEW;
private boolean dsync = DEFAULT_DSYNC;
private boolean sync = DEFAULT_SYNC;
private boolean deleteOnClose = DEFAULT_DELETEONCLOSE;
private boolean truncateExisting = DEFAULT_TRUNCATEEXISTING;
private boolean sparse = DEFAULT_SPARSE;
private boolean append = DEFAULT_APPEND;
/**
* Default constructor
*/
public OpenOptions() {
super();
}
/**
* Copy constructor
*
* @param other the options to copy
*/
public OpenOptions(OpenOptions other) {
this.perms = other.perms;
this.read = other.read;
this.write = other.write;
this.create = other.create;
this.createNew = other.createNew;
this.dsync = other.dsync;
this.sync = other.sync;
this.deleteOnClose = other.deleteOnClose;
this.truncateExisting = other.truncateExisting;
this.sparse = other.sparse;
this.append = other.append;
}
/**
* Constructor to create options from JSON
*
* @param json the JSON
*/
public OpenOptions(JsonObject json) {
this();
OpenOptionsConverter.fromJson(json, this);
}
/**
* Get the permissions string to be used if creating a file
*
* @return the permissions string
*/
public String getPerms() {
return perms;
}
/**
* Set the permissions string
*
* @param perms the permissions string
* @return a reference to this, so the API can be used fluently
*/
public OpenOptions setPerms(String perms) {
this.perms = perms;
return this;
}
/**
* Is the file to opened for reading?
*
* @return true if to be opened for reading
*/
public boolean isRead() {
return read;
}
/**
* Set whether the file is to be opened for reading
*
* @param read true if the file is to be opened for reading
* @return a reference to this, so the API can be used fluently
*/
public OpenOptions setRead(boolean read) {
this.read = read;
return this;
}
/**
* Is the file to opened for writing?
*
* @return true if to be opened for writing
*/
public boolean isWrite() {
return write;
}
/**
* Set whether the file is to be opened for writing
*
* @param write true if the file is to be opened for writing
* @return a reference to this, so the API can be used fluently
*/
public OpenOptions setWrite(boolean write) {
this.write = write;
return this;
}
/**
* Should the file be created if it does not already exist?
*
* @return true if the file should be created if it does not already exist
*/
public boolean isCreate() {
return create;
}
/**
* Set whether the file should be created if it does not already exist.
*
* @param create true if the file should be created if it does not already exist
* @return a reference to this, so the API can be used fluently
*/
public OpenOptions setCreate(boolean create) {
this.create = create;
return this;
}
/**
* Should the file be created if and the open fail if it already exists?
*
* @return true if the file should be created if and the open fail if it already exists.
*/
public boolean isCreateNew() {
return createNew;
}
/**
* Set whether the file should be created and fail if it does exist already.
*
* @param createNew true if the file should be created or fail if it exists already
* @return a reference to this, so the API can be used fluently
*/
public OpenOptions setCreateNew(boolean createNew) {
this.createNew = createNew;
return this;
}
/**
* Should the file be deleted when it's closed, or the JVM is shutdown.
*
* @return true if the file should be deleted when it's closed or the JVM shutdown
*/
public boolean isDeleteOnClose() {
return deleteOnClose;
}
/**
* Set whether the file should be deleted when it's closed, or the JVM is shutdown.
*
* @param deleteOnClose whether the file should be deleted when it's closed, or the JVM is shutdown.
* @return a reference to this, so the API can be used fluently
*/
public OpenOptions setDeleteOnClose(boolean deleteOnClose) {
this.deleteOnClose = deleteOnClose;
return this;
}
/**
* If the file exists and is opened for writing should the file be truncated to zero length on open?
*
* @return true if the file exists and is opened for writing and the file be truncated to zero length on open
*/
public boolean isTruncateExisting() {
return truncateExisting;
}
/**
* Set whether the file should be truncated to zero length on opening if it exists and is opened for write
*
* @param truncateExisting true if the file should be truncated to zero length on opening if it exists and is opened for write
* @return a reference to this, so the API can be used fluently
*/
public OpenOptions setTruncateExisting(boolean truncateExisting) {
this.truncateExisting = truncateExisting;
return this;
}
/**
* Set whether a hint should be provided that the file to created is sparse
*
* @return true if a hint should be provided that the file to created is sparse
*/
public boolean isSparse() {
return sparse;
}
/**
* Set whether a hint should be provided that the file to created is sparse
* @param sparse true if a hint should be provided that the file to created is sparse
* @return a reference to this, so the API can be used fluently
*/
public OpenOptions setSparse(boolean sparse) {
this.sparse = sparse;
return this;
}
/**
* If true then every write to the file's content and metadata will be written synchronously to the underlying hardware.
*
* @return true if sync
*/
public boolean isSync() {
return sync;
}
/**
* Set whether every write to the file's content and meta-data will be written synchronously to the underlying hardware.
* @param sync true if sync
* @return a reference to this, so the API can be used fluently
*/
public OpenOptions setSync(boolean sync) {
this.sync = sync;
return this;
}
/**
* If true then every write to the file's content will be written synchronously to the underlying hardware.
*
* @return true if sync
*/
public boolean isDsync() {
return dsync;
}
/**
* Set whether every write to the file's content will be written synchronously to the underlying hardware.
* @param dsync true if sync
* @return a reference to this, so the API can be used fluently
*/
public OpenOptions setDsync(boolean dsync) {
this.dsync = dsync;
return this;
}
/**
* @return true if the file should be opened in append mode, false otherwise
*/
public boolean isAppend() {
return append;
}
/**
* Whether the file should be opened in append mode. Defaults to {@code false}.
*
* @param append true to open file in append mode, false otherwise
* @return a reference to this, so the API can be used fluently
*/
public OpenOptions setAppend(boolean append) {
this.append = append;
return this;
}
}
| OpenOptions |
java | elastic__elasticsearch | client/rest/src/main/java/org/elasticsearch/client/RestClient.java | {
"start": 30559,
"end": 31806
} | class ____ {
private final ResponseListener responseListener;
private volatile Exception exception;
FailureTrackingResponseListener(ResponseListener responseListener) {
this.responseListener = responseListener;
}
/**
* Notifies the caller of a response through the wrapped listener
*/
void onSuccess(Response response) {
responseListener.onSuccess(response);
}
/**
* Tracks one last definitive failure and returns to the caller by notifying the wrapped listener
*/
void onDefinitiveFailure(Exception e) {
trackFailure(e);
responseListener.onFailure(this.exception);
}
/**
* Tracks an exception, which caused a retry hence we should not return yet to the caller
*/
void trackFailure(Exception e) {
addSuppressedException(this.exception, e);
this.exception = e;
}
}
/**
* Listener that allows to be notified whenever a failure happens. Useful when sniffing is enabled, so that we can sniff on failure.
* The default implementation is a no-op.
*/
public static | FailureTrackingResponseListener |
java | elastic__elasticsearch | x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectPrepareAuthenticationAction.java | {
"start": 1337,
"end": 4443
} | class ____ extends HandledTransportAction<
OpenIdConnectPrepareAuthenticationRequest,
OpenIdConnectPrepareAuthenticationResponse> {
private final Realms realms;
@Inject
public TransportOpenIdConnectPrepareAuthenticationAction(
TransportService transportService,
ActionFilters actionFilters,
Realms realms
) {
super(
OpenIdConnectPrepareAuthenticationAction.NAME,
transportService,
actionFilters,
OpenIdConnectPrepareAuthenticationRequest::new,
EsExecutors.DIRECT_EXECUTOR_SERVICE
);
this.realms = realms;
}
@Override
protected void doExecute(
Task task,
OpenIdConnectPrepareAuthenticationRequest request,
ActionListener<OpenIdConnectPrepareAuthenticationResponse> listener
) {
Realm realm = null;
if (Strings.hasText(request.getIssuer())) {
List<OpenIdConnectRealm> matchingRealms = this.realms.stream()
.filter(r -> r instanceof OpenIdConnectRealm && ((OpenIdConnectRealm) r).isIssuerValid(request.getIssuer()))
.map(r -> (OpenIdConnectRealm) r)
.toList();
if (matchingRealms.isEmpty()) {
listener.onFailure(
new ElasticsearchSecurityException("Cannot find OpenID Connect realm with issuer [{}]", request.getIssuer())
);
return;
} else if (matchingRealms.size() > 1) {
listener.onFailure(
new ElasticsearchSecurityException("Found multiple OpenID Connect realm with issuer [{}]", request.getIssuer())
);
return;
} else {
realm = matchingRealms.get(0);
}
} else if (Strings.hasText(request.getRealmName())) {
realm = this.realms.realm(request.getRealmName());
}
if (realm instanceof OpenIdConnectRealm) {
prepareAuthenticationResponse(
(OpenIdConnectRealm) realm,
request.getState(),
request.getNonce(),
request.getLoginHint(),
listener
);
} else {
listener.onFailure(
new ElasticsearchSecurityException("Cannot find OpenID Connect realm with name [{}]", request.getRealmName())
);
}
}
private static void prepareAuthenticationResponse(
OpenIdConnectRealm realm,
String state,
String nonce,
String loginHint,
ActionListener<OpenIdConnectPrepareAuthenticationResponse> listener
) {
try {
final OpenIdConnectPrepareAuthenticationResponse authenticationResponse = realm.buildAuthenticationRequestUri(
state,
nonce,
loginHint
);
listener.onResponse(authenticationResponse);
} catch (ElasticsearchException e) {
listener.onFailure(e);
}
}
}
| TransportOpenIdConnectPrepareAuthenticationAction |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/ext/jdk8/OptionalMergeTest.java | {
"start": 366,
"end": 459
} | class ____
extends DatabindTestUtil
{
// [modules-java8#214]
static | OptionalMergeTest |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/web/servlet/htmlunit/MockMvcWebConnectionBuilderSupport.java | {
"start": 1676,
"end": 6176
} | class ____<T extends MockMvcWebConnectionBuilderSupport<T>> {
private final MockMvc mockMvc;
private final List<WebRequestMatcher> requestMatchers = new ArrayList<>();
private String contextPath = "";
private boolean alwaysUseMockMvc;
/**
* Create a new instance using the supplied {@link MockMvc} instance.
* @param mockMvc the {@code MockMvc} instance to use; never {@code null}
*/
protected MockMvcWebConnectionBuilderSupport(MockMvc mockMvc) {
Assert.notNull(mockMvc, "MockMvc must not be null");
this.mockMvc = mockMvc;
this.requestMatchers.add(new HostRequestMatcher("localhost"));
}
/**
* Create a new instance using the supplied {@link WebApplicationContext}.
* @param context the {@code WebApplicationContext} to create a {@code MockMvc}
* instance from; never {@code null}
*/
protected MockMvcWebConnectionBuilderSupport(WebApplicationContext context) {
this(MockMvcBuilders.webAppContextSetup(context).build());
}
/**
* Create a new instance using the supplied {@link WebApplicationContext}
* and {@link MockMvcConfigurer}.
* @param context the {@code WebApplicationContext} to create a {@code MockMvc}
* instance from; never {@code null}
* @param configurer the MockMvcConfigurer to apply; never {@code null}
*/
protected MockMvcWebConnectionBuilderSupport(WebApplicationContext context, MockMvcConfigurer configurer) {
this(MockMvcBuilders.webAppContextSetup(context).apply(configurer).build());
}
/**
* Set the context path to use.
* <p>If the supplied value is {@code null} or empty, the first path
* segment of the request URL is assumed to be the context path.
* <p>Default is {@code ""}.
* @param contextPath the context path to use
* @return this builder for further customization
*/
@SuppressWarnings("unchecked")
public T contextPath(String contextPath) {
this.contextPath = contextPath;
return (T) this;
}
/**
* Specify that {@link MockMvc} should always be used regardless of
* what the request looks like.
* @return this builder for further customization
*/
@SuppressWarnings("unchecked")
public T alwaysUseMockMvc() {
this.alwaysUseMockMvc = true;
return (T) this;
}
/**
* Add additional {@link WebRequestMatcher} instances that will ensure
* that {@link MockMvc} is used to process the request, if such a matcher
* matches against the web request.
* @param matchers additional {@code WebRequestMatcher} instances
* @return this builder for further customization
*/
@SuppressWarnings("unchecked")
public T useMockMvc(WebRequestMatcher... matchers) {
Collections.addAll(this.requestMatchers, matchers);
return (T) this;
}
/**
* Add additional {@link WebRequestMatcher} instances that return {@code true}
* if a supplied host matches — for example, {@code "example.com"} or
* {@code "example.com:8080"}.
* @param hosts additional hosts that ensure {@code MockMvc} gets invoked
* @return this builder for further customization
*/
@SuppressWarnings("unchecked")
public T useMockMvcForHosts(String... hosts) {
this.requestMatchers.add(new HostRequestMatcher(hosts));
return (T) this;
}
/**
* Create a new {@link WebConnection} that will use a {@link MockMvc}
* instance if one of the specified {@link WebRequestMatcher} instances
* matches.
* @param webClient the WebClient to use if none of the specified
* {@code WebRequestMatcher} instances matches (never {@code null})
* @return a new {@code WebConnection} that will use a {@code MockMvc}
* instance if one of the specified {@code WebRequestMatcher} matches
* @since 4.3
* @see #alwaysUseMockMvc()
* @see #useMockMvc(WebRequestMatcher...)
* @see #useMockMvcForHosts(String...)
*/
protected final WebConnection createConnection(WebClient webClient) {
Assert.notNull(webClient, "WebClient must not be null");
return createConnection(webClient, webClient.getWebConnection());
}
private WebConnection createConnection(WebClient webClient, WebConnection defaultConnection) {
WebConnection connection = new MockMvcWebConnection(this.mockMvc, webClient, this.contextPath);
if (this.alwaysUseMockMvc) {
return connection;
}
List<DelegateWebConnection> delegates = new ArrayList<>(this.requestMatchers.size());
for (WebRequestMatcher matcher : this.requestMatchers) {
delegates.add(new DelegateWebConnection(matcher, connection));
}
return new DelegatingWebConnection(defaultConnection, delegates);
}
}
| MockMvcWebConnectionBuilderSupport |
java | apache__maven | its/core-it-support/core-it-plugins/maven-it-plugin-site/src/main/java/org/apache/maven/plugin/coreit/GenerateMojo.java | {
"start": 1670,
"end": 4124
} | class ____ extends AbstractMojo {
/**
* The path to the output directory of the site.
*/
@Parameter(defaultValue = "${project.reporting.outputDirectory}")
private File outputDirectory;
/**
* The language for the reports.
*/
@Parameter(defaultValue = "en")
private String language = "en";
/**
* A flag whether to ignore errors from reports and continue the generation.
*/
@Parameter(defaultValue = "false")
private boolean ignoreErrors;
/**
* The reports configured for the current build.
*/
@Parameter(defaultValue = "${reports}", required = true, readonly = true)
private List reports;
/**
* Runs this mojo.
*
* @throws MojoExecutionException If the output file could not be created.
*/
public void execute() throws MojoExecutionException, MojoFailureException {
getLog().info("[MAVEN-CORE-IT-LOG] Using output directory " + outputDirectory);
Locale locale = new Locale(language);
getLog().info("[MAVEN-CORE-IT-LOG] Using locale " + locale);
InvocationHandler handler = new InvocationHandler() {
@Override
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
return null;
}
};
Sink sink = (Sink) Proxy.newProxyInstance(getClass().getClassLoader(), new Class[] {Sink.class}, handler);
for (Object report1 : reports) {
MavenReport report = (MavenReport) report1;
try {
if (report.canGenerateReport()) {
getLog().info("[MAVEN-CORE-IT-LOG] Generating report " + report);
try {
report.setReportOutputDirectory(outputDirectory);
report.generate(sink, locale);
} catch (Throwable e) {
getLog().warn("[MAVEN-CORE-IT-LOG] " + e, e);
if (!ignoreErrors) {
throw new MojoExecutionException("Failed to generate report " + report, e);
}
}
} else {
getLog().info("[MAVEN-CORE-IT-LOG] Skipping report " + report);
}
} catch (MavenReportException e) {
getLog().info("[MAVEN-CORE-IT-LOG] Failing report " + report);
}
}
}
}
| GenerateMojo |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/utils/Checksums.java | {
"start": 965,
"end": 1038
} | class ____ intended for INTERNAL usage only within Kafka.
*/
public final | is |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/ext/jdk8/OptionalMapsTest.java | {
"start": 445,
"end": 1727
} | class ____ {
public Map<String, Optional<?>> values;
public OptMapBean(String key, Optional<?> v) {
values = new LinkedHashMap<>();
values.put(key, v);
}
}
/*
/**********************************************************************
/* Test methods
/**********************************************************************
*/
@Test
public void testMapElementInclusion() throws Exception
{
ObjectMapper mapper = jsonMapperBuilder().changeDefaultPropertyInclusion(
incl -> incl.withValueInclusion(JsonInclude.Include.NON_NULL)
.withContentInclusion(JsonInclude.Include.NON_ABSENT))
.build();
// first: Absent entry/-ies should NOT be included
assertEquals("{\"values\":{}}",
mapper.writeValueAsString(new OptMapBean("key", Optional.empty())));
// but non-empty should
assertEquals("{\"values\":{\"key\":\"value\"}}",
mapper.writeValueAsString(new OptMapBean("key", Optional.of("value"))));
// and actually even empty
assertEquals("{\"values\":{\"key\":\"\"}}",
mapper.writeValueAsString(new OptMapBean("key", Optional.of(""))));
}
}
| OptMapBean |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/cluster/routing/ShardMovementWriteLoadSimulatorTests.java | {
"start": 1532,
"end": 15193
} | class ____ extends ESTestCase {
private static final RoutingChangesObserver NOOP = new RoutingChangesObserver() {
};
private static final String[] INDICES = { "indexOne", "indexTwo", "indexThree" };
/**
* We should not adjust the values if there's no movement
*/
public void testNoShardMovement() {
final var originalNode0ThreadPoolStats = randomThreadPoolUsageStats();
final var originalNode1ThreadPoolStats = randomThreadPoolUsageStats();
final var allocation = createRoutingAllocationWithRandomisedWriteLoads(
Set.of(),
originalNode0ThreadPoolStats,
originalNode1ThreadPoolStats
);
final var shardMovementWriteLoadSimulator = new ShardMovementWriteLoadSimulator(allocation);
final var calculatedNodeUsageStates = shardMovementWriteLoadSimulator.simulatedNodeUsageStatsForThreadPools();
assertThat(calculatedNodeUsageStates, Matchers.aMapWithSize(2));
assertThat(
calculatedNodeUsageStates.get("node_0").threadPoolUsageStatsMap().get("write"),
sameInstance(originalNode0ThreadPoolStats)
);
assertThat(
calculatedNodeUsageStates.get("node_1").threadPoolUsageStatsMap().get("write"),
sameInstance(originalNode1ThreadPoolStats)
);
}
public void testMovementOfAShardWillMoveThreadPoolStats() {
final var originalNode0ThreadPoolStats = randomThreadPoolUsageStats();
final var originalNode1ThreadPoolStats = randomThreadPoolUsageStats();
final var originalNode2ThreadPoolStats = randomThreadPoolUsageStats();
final var allocation = createRoutingAllocationWithRandomisedWriteLoads(
Set.of(),
originalNode0ThreadPoolStats,
originalNode1ThreadPoolStats,
originalNode2ThreadPoolStats
);
final var shardMovementWriteLoadSimulator = new ShardMovementWriteLoadSimulator(allocation);
// Relocate a random shard from node_0 to node_1
final var randomShard = randomFrom(StreamSupport.stream(allocation.routingNodes().node("node_0").spliterator(), false).toList());
final var expectedShardSize = randomNonNegativeLong();
final var moveShardTuple = allocation.routingNodes().relocateShard(randomShard, "node_1", expectedShardSize, "testing", NOOP);
shardMovementWriteLoadSimulator.simulateShardStarted(moveShardTuple.v2());
final ShardRouting movedAndStartedShard = allocation.routingNodes().startShard(moveShardTuple.v2(), NOOP, expectedShardSize);
final var calculatedNodeUsageStats = shardMovementWriteLoadSimulator.simulatedNodeUsageStatsForThreadPools();
assertThat(calculatedNodeUsageStats, Matchers.aMapWithSize(3));
final var shardWriteLoad = allocation.clusterInfo().getShardWriteLoads().get(randomShard.shardId());
final var expectedUtilisationReductionAtSource = shardWriteLoad / originalNode0ThreadPoolStats.totalThreadPoolThreads();
final var expectedUtilisationIncreaseAtDestination = shardWriteLoad / originalNode1ThreadPoolStats.totalThreadPoolThreads();
// Some node_0 utilization should have been moved to node_1
if (expectedUtilisationReductionAtSource > originalNode0ThreadPoolStats.averageThreadPoolUtilization()) {
// We don't return utilization less than zero because that makes no sense
assertThat(getAverageWritePoolUtilization(shardMovementWriteLoadSimulator, "node_0"), equalTo(0.0f));
} else {
assertThat(
(double) originalNode0ThreadPoolStats.averageThreadPoolUtilization() - getAverageWritePoolUtilization(
shardMovementWriteLoadSimulator,
"node_0"
),
closeTo(expectedUtilisationReductionAtSource, 0.001f)
);
}
assertThat(
(double) getAverageWritePoolUtilization(shardMovementWriteLoadSimulator, "node_1") - originalNode1ThreadPoolStats
.averageThreadPoolUtilization(),
closeTo(expectedUtilisationIncreaseAtDestination, 0.001f)
);
// Queue latency reduced for node_0 since it has a shard moved out
assertThat(getMaxThreadPoolQueueLatency(shardMovementWriteLoadSimulator, "node_0"), equalTo(0L));
// Queue latency stays unchanged for node_1 since it only has a shard moved in
assertThat(
getMaxThreadPoolQueueLatency(shardMovementWriteLoadSimulator, "node_1"),
equalTo(originalNode1ThreadPoolStats.maxThreadPoolQueueLatencyMillis())
);
// Queue latency stays unchanged for node_2 since it has no shard movement
assertThat(
getMaxThreadPoolQueueLatency(shardMovementWriteLoadSimulator, "node_2"),
equalTo(originalNode2ThreadPoolStats.maxThreadPoolQueueLatencyMillis())
);
// Then move it back
final var moveBackTuple = allocation.routingNodes()
.relocateShard(movedAndStartedShard, "node_0", expectedShardSize, "testing", NOOP);
shardMovementWriteLoadSimulator.simulateShardStarted(moveBackTuple.v2());
// The utilization numbers should return to their original values
assertThat(
getAverageWritePoolUtilization(shardMovementWriteLoadSimulator, "node_0"),
equalTo(originalNode0ThreadPoolStats.averageThreadPoolUtilization())
);
assertThat(
getAverageWritePoolUtilization(shardMovementWriteLoadSimulator, "node_1"),
equalTo(originalNode1ThreadPoolStats.averageThreadPoolUtilization())
);
// We intentionally keep things simple so that if a shard has moved away from a node, its queue latency is reduced to zero
// regardless of whether other shards have subsequently moved onto or out of the same node.
assertThat(getMaxThreadPoolQueueLatency(shardMovementWriteLoadSimulator, "node_0"), equalTo(0L));
assertThat(getMaxThreadPoolQueueLatency(shardMovementWriteLoadSimulator, "node_1"), equalTo(0L));
// Queue latency stays unchanged for node_2 since it has no shard movement
assertThat(
getMaxThreadPoolQueueLatency(shardMovementWriteLoadSimulator, "node_2"),
equalTo(originalNode2ThreadPoolStats.maxThreadPoolQueueLatencyMillis())
);
}
public void testMovementBetweenNodesWithNoThreadPoolAndWriteLoadStats() {
final var originalNode0ThreadPoolStats = randomBoolean() ? randomThreadPoolUsageStats() : null;
final var originalNode1ThreadPoolStats = randomBoolean() ? randomThreadPoolUsageStats() : null;
final var allocation = createRoutingAllocationWithRandomisedWriteLoads(
new HashSet<>(randomSubsetOf(Arrays.asList(INDICES))),
originalNode0ThreadPoolStats,
originalNode1ThreadPoolStats
);
final var shardMovementWriteLoadSimulator = new ShardMovementWriteLoadSimulator(allocation);
// Relocate a random shard from node_0 to node_1
final var expectedShardSize = randomNonNegativeLong();
final var randomShard = randomFrom(StreamSupport.stream(allocation.routingNodes().node("node_0").spliterator(), false).toList());
final var moveShardTuple = allocation.routingNodes().relocateShard(randomShard, "node_1", expectedShardSize, "testing", NOOP);
shardMovementWriteLoadSimulator.simulateShardStarted(moveShardTuple.v2());
allocation.routingNodes().startShard(moveShardTuple.v2(), NOOP, expectedShardSize);
final var simulated = shardMovementWriteLoadSimulator.simulatedNodeUsageStatsForThreadPools();
assertThat(simulated.containsKey("node_0"), equalTo(originalNode0ThreadPoolStats != null));
assertThat(simulated.containsKey("node_1"), equalTo(originalNode1ThreadPoolStats != null));
}
public void testShardWithNoWriteLoadStillResetsQueueLatency() {
final ClusterState clusterState = createClusterState();
final var allocation = createRoutingAllocationWithShardWriteLoads(
clusterState,
Set.of(INDICES),
Map.of(),
randomThreadPoolUsageStats(),
randomThreadPoolUsageStats()
);
final var shardMovementWriteLoadSimulator = new ShardMovementWriteLoadSimulator(allocation);
// Relocate a random shard from node_0 to node_1
final var randomShard = randomFrom(StreamSupport.stream(allocation.routingNodes().node("node_0").spliterator(), false).toList());
final var moveShardTuple = allocation.routingNodes().relocateShard(randomShard, "node_1", 0, "testing", NOOP);
shardMovementWriteLoadSimulator.simulateShardStarted(moveShardTuple.v2());
final var simulated = shardMovementWriteLoadSimulator.simulatedNodeUsageStatsForThreadPools();
final var threadPoolUsageStats = simulated.get("node_0").threadPoolUsageStatsMap().get("write");
assertThat(threadPoolUsageStats.maxThreadPoolQueueLatencyMillis(), equalTo(0L)); // queue latency is reset
// No change to write load since shard has no write load
assertThat(
threadPoolUsageStats.averageThreadPoolUtilization(),
equalTo(
allocation.clusterInfo()
.getNodeUsageStatsForThreadPools()
.get("node_0")
.threadPoolUsageStatsMap()
.get("write")
.averageThreadPoolUtilization()
)
);
}
public void testUpdateThreadPoolQueueLatencyWithShardMovements() {
final long originalLatency = randomNonNegativeLong();
assertThat(
ShardMovementWriteLoadSimulator.adjustThreadPoolQueueLatencyWithShardMovements(originalLatency, false),
equalTo(originalLatency)
);
assertThat(ShardMovementWriteLoadSimulator.adjustThreadPoolQueueLatencyWithShardMovements(originalLatency, true), equalTo(0L));
}
private float getAverageWritePoolUtilization(ShardMovementWriteLoadSimulator shardMovementWriteLoadSimulator, String nodeId) {
final var generatedNodeUsageStates = shardMovementWriteLoadSimulator.simulatedNodeUsageStatsForThreadPools();
final var node0WritePoolStats = generatedNodeUsageStates.get(nodeId).threadPoolUsageStatsMap().get("write");
return node0WritePoolStats.averageThreadPoolUtilization();
}
private long getMaxThreadPoolQueueLatency(ShardMovementWriteLoadSimulator shardMovementWriteLoadSimulator, String nodeId) {
final var generatedNodeUsageStates = shardMovementWriteLoadSimulator.simulatedNodeUsageStatsForThreadPools();
final var writePoolStats = generatedNodeUsageStates.get(nodeId).threadPoolUsageStatsMap().get("write");
return writePoolStats.maxThreadPoolQueueLatencyMillis();
}
private NodeUsageStatsForThreadPools.ThreadPoolUsageStats randomThreadPoolUsageStats() {
return new NodeUsageStatsForThreadPools.ThreadPoolUsageStats(
randomIntBetween(4, 16),
randomBoolean() ? 0.0f : randomFloatBetween(0.1f, 1.0f, true),
randomLongBetween(0, 60_000)
);
}
private RoutingAllocation createRoutingAllocationWithRandomisedWriteLoads(
Set<String> indicesWithNoWriteLoad,
NodeUsageStatsForThreadPools.ThreadPoolUsageStats... arrayOfNodeThreadPoolStats
) {
final ClusterState clusterState = createClusterState();
final Map<ShardId, Double> shardWriteLoads = clusterState.metadata()
.getProject(ProjectId.DEFAULT)
.stream()
.filter(index -> indicesWithNoWriteLoad.contains(index.getIndex().getName()) == false)
.flatMap(index -> IntStream.range(0, 3).mapToObj(shardNum -> new ShardId(index.getIndex(), shardNum)))
.collect(
Collectors.toUnmodifiableMap(shardId -> shardId, shardId -> randomBoolean() ? 0.0f : randomDoubleBetween(0.1, 5.0, true))
);
return createRoutingAllocationWithShardWriteLoads(
clusterState,
indicesWithNoWriteLoad,
shardWriteLoads,
arrayOfNodeThreadPoolStats
);
}
private RoutingAllocation createRoutingAllocationWithShardWriteLoads(
ClusterState clusterState,
Set<String> indicesWithNoWriteLoad,
Map<ShardId, Double> shardWriteLoads,
NodeUsageStatsForThreadPools.ThreadPoolUsageStats... arrayOfNodeThreadPoolStats
) {
final Map<String, NodeUsageStatsForThreadPools> nodeUsageStats = new HashMap<>();
for (int i = 0; i < arrayOfNodeThreadPoolStats.length; i++) {
final var nodeThreadPoolStats = arrayOfNodeThreadPoolStats[i];
if (nodeThreadPoolStats != null) {
final var nodeId = "node_" + i;
nodeUsageStats.put(nodeId, new NodeUsageStatsForThreadPools(nodeId, Map.of("write", nodeThreadPoolStats)));
}
}
final ClusterInfo clusterInfo = ClusterInfo.builder()
.nodeUsageStatsForThreadPools(nodeUsageStats)
.shardWriteLoads(shardWriteLoads)
.build();
return new RoutingAllocation(
new AllocationDeciders(List.of()),
clusterState,
clusterInfo,
SnapshotShardSizeInfo.EMPTY,
System.nanoTime()
).mutableCloneForSimulation();
}
private ClusterState createClusterState() {
return ClusterStateCreationUtils.stateWithAssignedPrimariesAndReplicas(INDICES, 3, 0);
}
}
| ShardMovementWriteLoadSimulatorTests |
java | eclipse-vertx__vert.x | vertx-core/src/test/java/io/vertx/tests/json/JsonObjectTest.java | {
"start": 62019,
"end": 66032
} | class ____ exception
// and the compiler would here just warn that the condition is alwasy true
assertNotNull(numbers.getNumber("BigDecimal"));
assertNotNull(numbers.getNumber("BigInteger"));
assertNotNull(numbers.getNumber("byte"));
assertNotNull(numbers.getNumber("double"));
assertNotNull(numbers.getNumber("float"));
assertNotNull(numbers.getNumber("int"));
assertNotNull(numbers.getNumber("long"));
assertNotNull(numbers.getNumber("short"));
// ensure that types are preserved
assertTrue(numbers.getNumber("BigDecimal") instanceof BigDecimal);
assertTrue(numbers.getNumber("BigInteger") instanceof BigInteger);
assertTrue(numbers.getNumber("byte") instanceof Byte);
assertTrue(numbers.getNumber("double") instanceof Double);
assertTrue(numbers.getNumber("float") instanceof Float);
assertTrue(numbers.getNumber("int") instanceof Integer);
assertTrue(numbers.getNumber("long") instanceof Long);
assertTrue(numbers.getNumber("short") instanceof Short);
// test overflow
JsonObject object = new JsonObject().put("v", 42000);
Number n = object.getNumber("v");
// 42000 is bigger than Short.MAX_VALUE so it shall overflow (silently)
assertEquals(Short.MIN_VALUE + (42000 - Short.MAX_VALUE - 1), n.shortValue());
// but not overflow if int
assertEquals(42000, n.intValue());
}
@Test
public void testNumberDefaults() {
JsonObject numbers = new JsonObject();
// getting any kind of number should be allowed
for (Number n : new Number[] {
new BigDecimal("124567890.0987654321"),
new BigInteger("1234567890123456789012345678901234567890"),
(byte) 0x0a,
Math.PI,
(float) Math.PI,
42,
1234567890123456789L,
Short.MAX_VALUE
}) {
assertNumberEqualsAndHashCode(n, numbers.getNumber("missingKey", n));
}
}
@Test
public void testStreamRawVSJSON() {
JsonObject obj = new JsonObject()
.put("days", TimeUnit.DAYS)
.put("minutes", TimeUnit.MINUTES);
// assert that stream values are converted to String as per JSON rules
List <Map.Entry> jsonData = obj
.stream()
.peek(t -> {
assertTrue(t instanceof Map.Entry);
assertTrue(t.getValue() instanceof String);
})
.collect(Collectors.toList());
for (Map.Entry o : jsonData) {
assertTrue(o.getValue() instanceof String);
}
// test raw
// assert that stream values are converted to String as per JSON rules
List<Map.Entry<String, ?>> rawData = obj
.getMap()
.entrySet()
.stream()
.peek(t -> {
assertTrue(t instanceof Map.Entry);
assertTrue(t.getValue() instanceof TimeUnit);
})
.collect(Collectors.toList());
for (Map.Entry<String, ?> o : rawData) {
assertTrue(o.getValue() instanceof TimeUnit);
}
}
@Test
public void testJsonObjectOfArgs() {
// test Ingeger
Integer i = 1;
// test string
String s = "a string";
// test boolean
boolean b = true;
// test JsonObject
String jk = "json key";
String jv = "json value";
JsonObject j = new JsonObject().put(jk, jv);
JsonObject j2 = JsonObject.of(jk, jv);
// test JsonArray
String v1 = "json array value 1";
String v2 = "json array value 2";
JsonArray a = new JsonArray().add(v1).add(v2);
// test null
JsonObject n = null;
JsonObject jobj = JsonObject.of("i", i, "s", s, "b", b, "j", j, "j2", j2, "a", a, "n", n);
assertEquals(i, jobj.getInteger("i"));
assertEquals(s, jobj.getString("s"));
assertEquals(b, jobj.getBoolean("b"));
assertEquals(j, jobj.getJsonObject("j"));
// j2 is also equals to j, different object with same value
assertEquals(j, jobj.getJsonObject("j2"));
assertEquals(a, jobj.getJsonArray("a"));
assertEquals(n, jobj.getJsonObject("n"));
}
@Test
public void testJsonObjectOfEmpty() {
assertEquals(new JsonObject(), JsonObject.of());
}
}
| cast |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/lazy/proxy/SetIdentifierOnAEnhancedProxyTest.java | {
"start": 8254,
"end": 8525
} | class ____ {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
Long id;
String name;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
@Entity(name = "Child")
@Table(name = "CHILD")
static | Person |
java | netty__netty | transport-classes-kqueue/src/main/java/io/netty/channel/kqueue/BsdSocket.java | {
"start": 1539,
"end": 2079
} | class ____ extends Socket {
// These limits are just based on observations. I couldn't find anything in header files which formally
// define these limits.
private static final int APPLE_SND_LOW_AT_MAX = 1 << 17;
private static final int FREEBSD_SND_LOW_AT_MAX = 1 << 15;
static final int BSD_SND_LOW_AT_MAX = Math.min(APPLE_SND_LOW_AT_MAX, FREEBSD_SND_LOW_AT_MAX);
/**
* The `endpoints` structure passed to `connectx(2)` has an optional "source interface" field,
* which is the index of the network | BsdSocket |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/MethodCanBeStaticTest.java | {
"start": 9667,
"end": 9947
} | enum ____ {
VALUE {
private void foo() {}
}
}
""")
.doTest();
}
@Test
public void negativeAnonymous() {
testHelper
.addSourceLines(
"Test.java",
"""
| Test |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/internals/IdempotentCloser.java | {
"start": 2248,
"end": 6271
} | class ____ implements AutoCloseable {
private final AtomicBoolean isClosed;
/**
* Creates an {@code IdempotentCloser} that is not yet closed.
*/
public IdempotentCloser() {
this(false);
}
/**
* Creates an {@code IdempotentCloser} with the given initial state.
*
* @param isClosed Initial value for underlying state
*/
public IdempotentCloser(boolean isClosed) {
this.isClosed = new AtomicBoolean(isClosed);
}
/**
* This method serves as an assert that the {@link IdempotentCloser} is still open. If it is open, this method
* simply returns. If it is closed, a new {@link IllegalStateException} will be thrown using the supplied message.
*
* @param message {@link Supplier} that supplies the message for the exception
*/
public void assertOpen(Supplier<String> message) {
if (isClosed.get())
throw new IllegalStateException(message.get());
}
/**
* This method serves as an assert that the {@link IdempotentCloser} is still open. If it is open, this method
* simply returns. If it is closed, a new {@link IllegalStateException} will be thrown using the given message.
*
* @param message Message to use for the exception
*/
public void assertOpen(String message) {
if (isClosed.get())
throw new IllegalStateException(message);
}
public boolean isClosed() {
return isClosed.get();
}
/**
* Closes the resource in a thread-safe manner.
*
* <p/>
*
* After the execution has completed, calls to {@link #isClosed()} will return {@code false} and calls to
* {@link #assertOpen(String)} and {@link #assertOpen(Supplier)}
* will throw an {@link IllegalStateException}.
*/
@Override
public void close() {
close(null, null);
}
/**
* Closes the resource in a thread-safe manner.
*
* <p/>
*
* After the execution has completed, calls to {@link #isClosed()} will return {@code false} and calls to
* {@link #assertOpen(String)} and {@link #assertOpen(Supplier)}
* will throw an {@link IllegalStateException}.
*
* @param onInitialClose Optional {@link Runnable} to execute when the resource is closed. Note that the
* object will still be considered closed even if an exception is thrown during the course
* of its execution; can be {@code null}
*/
public void close(final Runnable onInitialClose) {
close(onInitialClose, null);
}
/**
* Closes the resource in a thread-safe manner.
*
* <p/>
*
* After the execution has completed, calls to {@link #isClosed()} will return {@code false} and calls to
* {@link #assertOpen(String)} and {@link #assertOpen(Supplier)}
* will throw an {@link IllegalStateException}.
*
* @param onInitialClose Optional {@link Runnable} to execute when the resource is closed. Note that the
* object will still be considered closed even if an exception is thrown during the course
* of its execution; can be {@code null}
* @param onSubsequentClose Optional {@link Runnable} to execute if this resource was previously closed. Note that
* no state will be affected if an exception is thrown during its execution; can be
* {@code null}
*/
public void close(final Runnable onInitialClose, final Runnable onSubsequentClose) {
if (isClosed.compareAndSet(false, true)) {
if (onInitialClose != null)
onInitialClose.run();
} else {
if (onSubsequentClose != null)
onSubsequentClose.run();
}
}
@Override
public String toString() {
return "IdempotentCloser{" +
"isClosed=" + isClosed +
'}';
}
} | IdempotentCloser |
java | google__dagger | javatests/dagger/internal/codegen/AssistedFactoryErrorsTest.java | {
"start": 19901,
"end": 21263
} | interface ____ {",
" @BindsInstance Builder foo(Foo foo);",
" @BindsInstance Builder fooFactory(Foo.Factory fooFactory);",
" FooComponent build();",
" }",
"}");
CompilerTests.daggerCompiler(foo, component)
.withProcessingOptions(compilerMode.processorOptions())
.compile(
subject -> {
subject.hasErrorCount(2);
subject
.hasErrorContaining(
"[test.Foo] Dagger does not support providing @AssistedInject types without a"
+ " qualifier.")
.onSource(component)
.onLine(10);
subject
.hasErrorContaining(
"[test.Foo.Factory] Dagger does not support providing @AssistedFactory "
+ "types.")
.onSource(component)
.onLine(11);
});
}
@Test
public void testProvidesAssistedBindingsAsOptional() {
Source foo =
CompilerTests.javaSource(
"test.Foo",
"package test;",
"",
"import dagger.assisted.Assisted;",
"import dagger.assisted.AssistedInject;",
"import dagger.assisted.AssistedFactory;",
"",
" | Builder |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/common/time/DateFormattersTests.java | {
"start": 1781,
"end": 82147
} | class ____ extends ESTestCase {
private void assertParseException(String input, String format) {
DateFormatter javaTimeFormatter = DateFormatter.forPattern(format);
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> javaTimeFormatter.parse(input));
assertThat(e.getMessage(), containsString(input));
assertThat(e.getMessage(), containsString(format));
assertThat(e.getCause(), instanceOf(DateTimeException.class));
}
private void assertParseException(String input, String format, int errorIndex) {
assertParseException(input, DateFormatter.forPattern(format), equalTo(errorIndex));
}
private void assertParseException(String input, DateFormatter formatter, int errorIndex) {
assertParseException(input, formatter, equalTo(errorIndex));
}
private void assertParseException(String input, DateFormatter formatter, Matcher<Integer> indexMatcher) {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> formatter.parse(input));
assertThat(e.getMessage(), containsString(input));
assertThat(e.getMessage(), containsString(formatter.pattern()));
assertThat(e.getCause(), instanceOf(DateTimeParseException.class));
assertThat(((DateTimeParseException) e.getCause()).getErrorIndex(), indexMatcher);
}
private void assertParses(String input, String format) {
DateFormatter javaFormatter = DateFormatter.forPattern(format);
assertParses(input, javaFormatter);
}
private void assertParses(String input, DateFormatter formatter) {
TemporalAccessor javaTimeAccessor = formatter.parse(input);
ZonedDateTime zonedDateTime = DateFormatters.from(javaTimeAccessor);
assertThat(zonedDateTime, notNullValue());
}
private void assertDateMathEquals(String text, String expected, String pattern) {
Locale locale = randomLocale(random());
assertDateMathEquals(text, expected, pattern, locale);
}
private void assertDateMathEquals(String text, String expected, String pattern, Locale locale) {
Instant gotInstant = dateMathToInstant(text, DateFormatter.forPattern(pattern), locale).truncatedTo(ChronoUnit.MILLIS);
Instant expectedInstant = DateFormatters.from(
DateFormatter.forPattern("strict_date_optional_time").withLocale(locale).parse(expected)
).toInstant().truncatedTo(ChronoUnit.MILLIS);
assertThat(gotInstant, equalTo(expectedInstant));
}
public void testWeekBasedDates() {
// the years and weeks this outputs depends on where the first day of the first week is for each year
DateFormatter dateFormatter = DateFormatters.forPattern("YYYY-ww");
assertThat(
DateFormatters.from(dateFormatter.parse("2016-02")),
equalTo(ZonedDateTime.of(2016, 01, 03, 0, 0, 0, 0, ZoneOffset.UTC))
);
assertThat(
DateFormatters.from(dateFormatter.parse("2015-02")),
equalTo(ZonedDateTime.of(2015, 01, 04, 0, 0, 0, 0, ZoneOffset.UTC))
);
dateFormatter = DateFormatters.forPattern("YYYY");
assertThat(DateFormatters.from(dateFormatter.parse("2016")), equalTo(ZonedDateTime.of(2015, 12, 27, 0, 0, 0, 0, ZoneOffset.UTC)));
assertThat(DateFormatters.from(dateFormatter.parse("2015")), equalTo(ZonedDateTime.of(2014, 12, 28, 0, 0, 0, 0, ZoneOffset.UTC)));
// the built-in formats use different week definitions (ISO instead of locale)
dateFormatter = DateFormatters.forPattern("weekyear_week");
assertThat(
DateFormatters.from(dateFormatter.parse("2016-W01")),
equalTo(ZonedDateTime.of(2016, 01, 04, 0, 0, 0, 0, ZoneOffset.UTC))
);
assertThat(
DateFormatters.from(dateFormatter.parse("2015-W01")),
equalTo(ZonedDateTime.of(2014, 12, 29, 0, 0, 0, 0, ZoneOffset.UTC))
);
dateFormatter = DateFormatters.forPattern("weekyear");
assertThat(DateFormatters.from(dateFormatter.parse("2016")), equalTo(ZonedDateTime.of(2016, 01, 04, 0, 0, 0, 0, ZoneOffset.UTC)));
assertThat(DateFormatters.from(dateFormatter.parse("2015")), equalTo(ZonedDateTime.of(2014, 12, 29, 0, 0, 0, 0, ZoneOffset.UTC)));
}
public void testEpochMillisParser() {
DateFormatter formatter = DateFormatters.forPattern("epoch_millis");
{
Instant instant = Instant.from(formatter.parse("12345"));
assertThat(instant.getEpochSecond(), is(12L));
assertThat(instant.getNano(), is(345_000_000));
assertThat(formatter.format(instant), is("12345"));
assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant));
}
{
Instant instant = Instant.from(formatter.parse("0"));
assertThat(instant.getEpochSecond(), is(0L));
assertThat(instant.getNano(), is(0));
assertThat(formatter.format(instant), is("0"));
assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant));
}
{
Instant instant = Instant.from(formatter.parse("0.1"));
assertThat(instant.getEpochSecond(), is(0L));
assertThat(instant.getNano(), is(100_000));
assertThat(formatter.format(instant), is("0.1"));
assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant));
}
{
Instant instant = Instant.from(formatter.parse("123.123456"));
assertThat(instant.getEpochSecond(), is(0L));
assertThat(instant.getNano(), is(123123456));
assertThat(formatter.format(instant), is("123.123456"));
assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant));
}
{
Instant instant = Instant.from(formatter.parse("-123.123456"));
assertThat(instant.getEpochSecond(), is(-1L));
assertThat(instant.getNano(), is(876876544));
assertThat(formatter.format(instant), is("-123.123456"));
assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant));
}
{
Instant instant = Instant.from(formatter.parse("-6789123.123456"));
assertThat(instant.getEpochSecond(), is(-6790L));
assertThat(instant.getNano(), is(876876544));
assertThat(formatter.format(instant), is("-6789123.123456"));
assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant));
}
{
Instant instant = Instant.from(formatter.parse("6789123.123456"));
assertThat(instant.getEpochSecond(), is(6789L));
assertThat(instant.getNano(), is(123123456));
assertThat(formatter.format(instant), is("6789123.123456"));
assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant));
}
{
Instant instant = Instant.from(formatter.parse("-6250000430768.25"));
assertThat(instant.getEpochSecond(), is(-6250000431L));
assertThat(instant.getNano(), is(231750000));
assertThat(formatter.format(instant), is("-6250000430768.25"));
assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant));
}
{
Instant instant = Instant.from(formatter.parse("-6250000430768.75"));
assertThat(instant.getEpochSecond(), is(-6250000431L));
assertThat(instant.getNano(), is(231250000));
assertThat(formatter.format(instant), is("-6250000430768.75"));
assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant));
}
{
Instant instant = Instant.from(formatter.parse("-6250000430768.00"));
assertThat(instant.getEpochSecond(), is(-6250000431L));
assertThat(instant.getNano(), is(232000000));
assertThat(formatter.format(instant), is("-6250000430768")); // remove .00 precision
assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant));
}
{
Instant instant = Instant.from(formatter.parse("-6250000431000.250000"));
assertThat(instant.getEpochSecond(), is(-6250000432L));
assertThat(instant.getNano(), is(999750000));
assertThat(formatter.format(instant), is("-6250000431000.25"));
assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant));
}
{
Instant instant = Instant.from(formatter.parse("-6250000431000.000001"));
assertThat(instant.getEpochSecond(), is(-6250000432L));
assertThat(instant.getNano(), is(999999999));
assertThat(formatter.format(instant), is("-6250000431000.000001"));
assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant));
}
{
Instant instant = Instant.from(formatter.parse("-6250000431000.75"));
assertThat(instant.getEpochSecond(), is(-6250000432L));
assertThat(instant.getNano(), is(999250000));
assertThat(formatter.format(instant), is("-6250000431000.75"));
assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant));
}
{
Instant instant = Instant.from(formatter.parse("-6250000431000.00"));
assertThat(instant.getEpochSecond(), is(-6250000431L));
assertThat(instant.getNano(), is(0));
assertThat(formatter.format(instant), is("-6250000431000"));
assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant));
}
{
Instant instant = Instant.from(formatter.parse("-6250000431000"));
assertThat(instant.getEpochSecond(), is(-6250000431L));
assertThat(instant.getNano(), is(0));
assertThat(formatter.format(instant), is("-6250000431000"));
assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant));
}
{
Instant instant = Instant.from(formatter.parse("-6250000430768"));
assertThat(instant.getEpochSecond(), is(-6250000431L));
assertThat(instant.getNano(), is(232000000));
assertThat(formatter.format(instant), is("-6250000430768"));
assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant));
}
{
Instant instant = Instant.from(formatter.parse("1680000430768"));
assertThat(instant.getEpochSecond(), is(1680000430L));
assertThat(instant.getNano(), is(768000000));
assertThat(formatter.format(instant), is("1680000430768"));
assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant));
}
{
Instant instant = Instant.from(formatter.parse("-0.12345"));
assertThat(instant.getEpochSecond(), is(-1L));
assertThat(instant.getNano(), is(999876550));
assertThat(formatter.format(instant), is("-0.12345"));
assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant));
}
{
Instant instant = Instant.from(formatter.parse("12345."));
assertThat(instant.getEpochSecond(), is(12L));
assertThat(instant.getNano(), is(345_000_000));
assertThat(formatter.format(instant), is("12345"));
assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant));
}
{
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> formatter.parse("12345.0."));
assertThat(e.getMessage(), is("failed to parse date field [12345.0.] with format [epoch_millis]"));
}
{
Instant instant = Instant.from(formatter.parse("-86400000"));
assertThat(instant.getEpochSecond(), is(-86400L));
assertThat(instant.getNano(), is(0));
assertThat(formatter.format(instant), is("-86400000"));
assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant));
}
{
Instant instant = Instant.from(formatter.parse("-86400000.999999"));
assertThat(instant.getEpochSecond(), is(-86401L));
assertThat(instant.getNano(), is(999000001));
assertThat(formatter.format(instant), is("-86400000.999999"));
assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant));
}
}
/**
* test that formatting a date with Long.MAX_VALUE or Long.MIN_VALUE doesn throw errors since we use these
* e.g. for sorting documents with `null` values first or last
*/
public void testPrintersLongMinMaxValue() {
for (FormatNames format : FormatNames.values()) {
DateFormatter formatter = DateFormatters.forPattern(format.getName());
formatter.format(DateFieldMapper.Resolution.MILLISECONDS.toInstant(DateUtils.MAX_MILLIS_BEFORE_9999));
formatter.format(DateFieldMapper.Resolution.MILLISECONDS.toInstant(DateUtils.MAX_MILLIS_BEFORE_MINUS_9999));
}
}
public void testInvalidEpochMilliParser() {
DateFormatter formatter = DateFormatters.forPattern("epoch_millis");
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> formatter.parse("invalid"));
assertThat(e.getMessage(), containsString("failed to parse date field [invalid] with format [epoch_millis]"));
e = expectThrows(IllegalArgumentException.class, () -> formatter.parse("123.1234567"));
assertThat(e.getMessage(), containsString("failed to parse date field [123.1234567] with format [epoch_millis]"));
}
public void testEpochSecondParserWithFraction() {
DateFormatter formatter = DateFormatters.forPattern("epoch_second");
TemporalAccessor accessor = formatter.parse("1234.1");
Instant instant = DateFormatters.from(accessor).toInstant();
assertThat(instant.getEpochSecond(), is(1234L));
assertThat(instant.getNano(), is(100_000_000));
assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant));
accessor = formatter.parse("1234");
instant = DateFormatters.from(accessor).toInstant();
assertThat(instant.getEpochSecond(), is(1234L));
assertThat(instant.getNano(), is(0));
assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant));
accessor = formatter.parse("1234.890");
instant = DateFormatters.from(accessor).toInstant();
assertThat(instant.getEpochSecond(), is(1234L));
assertThat(instant.getNano(), is(890_000_000));
assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant));
accessor = formatter.parse("0.1");
instant = DateFormatters.from(accessor).toInstant();
assertThat(instant.getEpochSecond(), is(0L));
assertThat(instant.getNano(), is(100_000_000));
assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant));
accessor = formatter.parse("0.890");
instant = DateFormatters.from(accessor).toInstant();
assertThat(instant.getEpochSecond(), is(0L));
assertThat(instant.getNano(), is(890_000_000));
assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant));
accessor = formatter.parse("0");
instant = DateFormatters.from(accessor).toInstant();
assertThat(instant.getEpochSecond(), is(0L));
assertThat(instant.getNano(), is(0));
assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant));
accessor = formatter.parse("-1234.1");
instant = DateFormatters.from(accessor).toInstant();
assertThat(instant.getEpochSecond(), is(-1235L));
assertThat(instant.getNano(), is(900_000_000));
assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant));
accessor = formatter.parse("-1234");
instant = DateFormatters.from(accessor).toInstant();
assertThat(instant.getEpochSecond(), is(-1234L));
assertThat(instant.getNano(), is(0));
assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant));
accessor = formatter.parse("-1234.890");
instant = DateFormatters.from(accessor).toInstant();
assertThat(instant.getEpochSecond(), is(-1235L));
assertThat(instant.getNano(), is(110_000_000));
assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant));
accessor = formatter.parse("-0.1");
instant = DateFormatters.from(accessor).toInstant();
assertThat(instant.getEpochSecond(), is(-1L));
assertThat(instant.getNano(), is(900_000_000));
assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant));
accessor = formatter.parse("-0.890");
instant = DateFormatters.from(accessor).toInstant();
assertThat(instant.getEpochSecond(), is(-1L));
assertThat(instant.getNano(), is(110_000_000));
assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant));
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> formatter.parse("abc"));
assertThat(e.getMessage(), is("failed to parse date field [abc] with format [epoch_second]"));
e = expectThrows(IllegalArgumentException.class, () -> formatter.parse("1234.abc"));
assertThat(e.getMessage(), is("failed to parse date field [1234.abc] with format [epoch_second]"));
e = expectThrows(IllegalArgumentException.class, () -> formatter.parse("1234.1234567890"));
assertThat(e.getMessage(), is("failed to parse date field [1234.1234567890] with format [epoch_second]"));
}
public void testEpochMilliParsersWithDifferentFormatters() {
DateFormatter formatter = DateFormatter.forPattern("strict_date_optional_time||epoch_millis");
TemporalAccessor accessor = formatter.parse("123");
assertThat(DateFormatters.from(accessor).toInstant().toEpochMilli(), is(123L));
assertThat(formatter.pattern(), is("strict_date_optional_time||epoch_millis"));
}
public void testParsersWithMultipleInternalFormats() throws Exception {
ZonedDateTime first = DateFormatters.from(
DateFormatters.forPattern("strict_date_optional_time_nanos").parse("2018-05-15T17:14:56+0100")
);
ZonedDateTime second = DateFormatters.from(
DateFormatters.forPattern("strict_date_optional_time_nanos").parse("2018-05-15T17:14:56+01:00")
);
assertThat(first, is(second));
}
public void testNanoOfSecondWidth() throws Exception {
ZonedDateTime first = DateFormatters.from(
DateFormatters.forPattern("strict_date_optional_time_nanos").parse("1970-01-01T00:00:00.1")
);
assertThat(first.getNano(), is(100000000));
ZonedDateTime second = DateFormatters.from(
DateFormatters.forPattern("strict_date_optional_time_nanos").parse("1970-01-01T00:00:00.000000001")
);
assertThat(second.getNano(), is(1));
}
public void testLocales() {
assertThat(DateFormatters.forPattern("strict_date_optional_time").locale(), is(Locale.ROOT));
Locale locale = randomLocale(random());
assertThat(DateFormatters.forPattern("strict_date_optional_time").withLocale(locale).locale(), is(locale));
}
public void testTimeZones() {
// zone is null by default due to different behaviours between java8 and above
assertThat(DateFormatters.forPattern("strict_date_optional_time").zone(), is(nullValue()));
ZoneId zoneId = randomZone();
assertThat(DateFormatters.forPattern("strict_date_optional_time").withZone(zoneId).zone(), is(zoneId));
}
public void testEqualsAndHashcode() {
assertThat(
DateFormatters.forPattern("strict_date_optional_time"),
sameInstance(DateFormatters.forPattern("strict_date_optional_time"))
);
assertThat(DateFormatters.forPattern("YYYY"), equalTo(DateFormatters.forPattern("YYYY")));
assertThat(DateFormatters.forPattern("YYYY").hashCode(), is(DateFormatters.forPattern("YYYY").hashCode()));
// different timezone, thus not equals
assertThat(DateFormatters.forPattern("YYYY").withZone(ZoneId.of("CET")), not(equalTo(DateFormatters.forPattern("YYYY"))));
// different locale, thus not equals
DateFormatter f1 = DateFormatters.forPattern("YYYY").withLocale(Locale.CANADA);
DateFormatter f2 = f1.withLocale(Locale.FRENCH);
assertThat(f1, not(equalTo(f2)));
// different pattern, thus not equals
assertThat(DateFormatters.forPattern("YYYY"), not(equalTo(DateFormatters.forPattern("YY"))));
DateFormatter epochSecondFormatter = DateFormatters.forPattern("epoch_second");
assertThat(epochSecondFormatter, sameInstance(DateFormatters.forPattern("epoch_second")));
assertThat(epochSecondFormatter, equalTo(DateFormatters.forPattern("epoch_second")));
assertThat(epochSecondFormatter.hashCode(), is(DateFormatters.forPattern("epoch_second").hashCode()));
DateFormatter epochMillisFormatter = DateFormatters.forPattern("epoch_millis");
assertThat(epochMillisFormatter.hashCode(), is(DateFormatters.forPattern("epoch_millis").hashCode()));
assertThat(epochMillisFormatter, sameInstance(DateFormatters.forPattern("epoch_millis")));
assertThat(epochMillisFormatter, equalTo(DateFormatters.forPattern("epoch_millis")));
}
public void testSupportBackwardsJava8Format() {
assertThat(DateFormatter.forPattern("8yyyy-MM-dd"), instanceOf(JavaDateFormatter.class));
// named formats too
assertThat(DateFormatter.forPattern("8date_optional_time"), instanceOf(JavaDateFormatter.class));
// named formats too
DateFormatter formatter = DateFormatter.forPattern("8date_optional_time||ww-MM-dd");
assertThat(formatter, instanceOf(JavaDateFormatter.class));
}
public void testEpochFormattingPositiveEpoch() {
long seconds = randomLongBetween(0, 130L * 365 * 86400); // from 1970 epoch till around 2100
long nanos = randomLongBetween(0, 999_999_999L);
Instant instant = Instant.ofEpochSecond(seconds, nanos);
DateFormatter millisFormatter = DateFormatter.forPattern("epoch_millis");
String millis = millisFormatter.format(instant);
Instant millisInstant = Instant.from(millisFormatter.parse(millis));
assertThat(millisInstant.toEpochMilli(), is(instant.toEpochMilli()));
assertThat(millisFormatter.format(Instant.ofEpochSecond(42, 0)), is("42000"));
assertThat(millisFormatter.format(Instant.ofEpochSecond(42, 123456789L)), is("42123.456789"));
DateFormatter secondsFormatter = DateFormatter.forPattern("epoch_second");
String formattedSeconds = secondsFormatter.format(instant);
Instant secondsInstant = Instant.from(secondsFormatter.parse(formattedSeconds));
assertThat(secondsInstant.getEpochSecond(), is(instant.getEpochSecond()));
assertThat(secondsFormatter.format(Instant.ofEpochSecond(42, 0)), is("42"));
}
public void testEpochFormattingNegativeEpoch() {
long seconds = randomLongBetween(-130L * 365 * 86400, 0); // around 1840 till 1970 epoch
long nanos = randomLongBetween(0, 999_999_999L);
Instant instant = Instant.ofEpochSecond(seconds, nanos);
DateFormatter millisFormatter = DateFormatter.forPattern("epoch_millis");
String millis = millisFormatter.format(instant);
Instant millisInstant = Instant.from(millisFormatter.parse(millis));
assertThat(millisInstant.toEpochMilli(), is(instant.toEpochMilli()));
assertThat(millisFormatter.format(Instant.ofEpochSecond(-42, 0)), is("-42000"));
assertThat(millisFormatter.format(Instant.ofEpochSecond(-42, 123456789L)), is("-41876.543211"));
DateFormatter secondsFormatter = DateFormatter.forPattern("epoch_second");
String formattedSeconds = secondsFormatter.format(instant);
Instant secondsInstant = Instant.from(secondsFormatter.parse(formattedSeconds));
assertThat(secondsInstant.getEpochSecond(), is(instant.getEpochSecond()));
assertThat(secondsFormatter.format(Instant.ofEpochSecond(42, 0)), is("42"));
}
public void testEpochAndIso8601RoundTripNegative() {
long seconds = randomLongBetween(-130L * 365 * 86400, 0); // around 1840 till 1970 epoch
long nanos = randomLongBetween(0, 999_999_999L);
Instant instant = Instant.ofEpochSecond(seconds, nanos);
DateFormatter isoFormatter = DateFormatters.forPattern("strict_date_optional_time_nanos");
DateFormatter millisFormatter = DateFormatter.forPattern("epoch_millis");
String millis = millisFormatter.format(instant);
String iso8601 = isoFormatter.format(instant);
Instant millisInstant = Instant.from(millisFormatter.parse(millis));
Instant isoInstant = Instant.from(isoFormatter.parse(iso8601));
assertThat(millisInstant.toEpochMilli(), is(isoInstant.toEpochMilli()));
assertThat(millisInstant.getEpochSecond(), is(isoInstant.getEpochSecond()));
assertThat(millisInstant.getNano(), is(isoInstant.getNano()));
}
public void testEpochAndIso8601RoundTripPositive() {
long seconds = randomLongBetween(0, 130L * 365 * 86400); // from 1970 epoch till around 2100
long nanos = randomLongBetween(0, 999_999_999L);
Instant instant = Instant.ofEpochSecond(seconds, nanos);
DateFormatter isoFormatter = DateFormatters.forPattern("strict_date_optional_time_nanos");
DateFormatter millisFormatter = DateFormatter.forPattern("epoch_millis");
String millis = millisFormatter.format(instant);
String iso8601 = isoFormatter.format(instant);
Instant millisInstant = Instant.from(millisFormatter.parse(millis));
Instant isoInstant = Instant.from(isoFormatter.parse(iso8601));
assertThat(millisInstant.toEpochMilli(), is(isoInstant.toEpochMilli()));
assertThat(millisInstant.getEpochSecond(), is(isoInstant.getEpochSecond()));
assertThat(millisInstant.getNano(), is(isoInstant.getNano()));
}
public void testParsingStrictNanoDates() {
DateFormatter formatter = DateFormatters.forPattern("strict_date_optional_time_nanos");
formatter.format(formatter.parse("2016-01-01T00:00:00.000"));
formatter.format(formatter.parse("2018-05-15T17:14:56"));
formatter.format(formatter.parse("2018-05-15T17:14:56Z"));
formatter.format(formatter.parse("2018-05-15T17:14:56+0100"));
formatter.format(formatter.parse("2018-05-15T17:14:56+01:00"));
formatter.format(formatter.parse("2018-05-15T17:14:56.123456789Z"));
formatter.format(formatter.parse("2018-05-15T17:14:56.123456789+0100"));
formatter.format(formatter.parse("2018-05-15T17:14:56.123456789+01:00"));
}
public void testIso8601Parsing() {
DateFormatter formatter = DateFormatters.forPattern("iso8601");
// timezone not allowed with just date
formatter.format(formatter.parse("2018-05-15"));
formatter.format(formatter.parse("2018-05-15T17"));
formatter.format(formatter.parse("2018-05-15T17Z"));
formatter.format(formatter.parse("2018-05-15T17+0100"));
formatter.format(formatter.parse("2018-05-15T17+01:00"));
formatter.format(formatter.parse("2018-05-15T17:14"));
formatter.format(formatter.parse("2018-05-15T17:14Z"));
formatter.format(formatter.parse("2018-05-15T17:14-0100"));
formatter.format(formatter.parse("2018-05-15T17:14-01:00"));
formatter.format(formatter.parse("2018-05-15T17:14:56"));
formatter.format(formatter.parse("2018-05-15T17:14:56Z"));
formatter.format(formatter.parse("2018-05-15T17:14:56+0100"));
formatter.format(formatter.parse("2018-05-15T17:14:56+01:00"));
// milliseconds can be separated using comma or decimal point
formatter.format(formatter.parse("2018-05-15T17:14:56.123"));
formatter.format(formatter.parse("2018-05-15T17:14:56.123Z"));
formatter.format(formatter.parse("2018-05-15T17:14:56.123-0100"));
formatter.format(formatter.parse("2018-05-15T17:14:56.123-01:00"));
formatter.format(formatter.parse("2018-05-15T17:14:56,123"));
formatter.format(formatter.parse("2018-05-15T17:14:56,123Z"));
formatter.format(formatter.parse("2018-05-15T17:14:56,123+0100"));
formatter.format(formatter.parse("2018-05-15T17:14:56,123+01:00"));
// microseconds can be separated using comma or decimal point
formatter.format(formatter.parse("2018-05-15T17:14:56.123456"));
formatter.format(formatter.parse("2018-05-15T17:14:56.123456Z"));
formatter.format(formatter.parse("2018-05-15T17:14:56.123456+0100"));
formatter.format(formatter.parse("2018-05-15T17:14:56.123456+01:00"));
formatter.format(formatter.parse("2018-05-15T17:14:56,123456"));
formatter.format(formatter.parse("2018-05-15T17:14:56,123456Z"));
formatter.format(formatter.parse("2018-05-15T17:14:56,123456-0100"));
formatter.format(formatter.parse("2018-05-15T17:14:56,123456-01:00"));
// nanoseconds can be separated using comma or decimal point
formatter.format(formatter.parse("2018-05-15T17:14:56.123456789"));
formatter.format(formatter.parse("2018-05-15T17:14:56.123456789Z"));
formatter.format(formatter.parse("2018-05-15T17:14:56.123456789-0100"));
formatter.format(formatter.parse("2018-05-15T17:14:56.123456789-01:00"));
formatter.format(formatter.parse("2018-05-15T17:14:56,123456789"));
formatter.format(formatter.parse("2018-05-15T17:14:56,123456789Z"));
formatter.format(formatter.parse("2018-05-15T17:14:56,123456789+0100"));
formatter.format(formatter.parse("2018-05-15T17:14:56,123456789+01:00"));
}
public void testRoundupFormatterWithEpochDates() {
assertRoundupFormatter("epoch_millis", "1234567890", 1234567890L);
// also check nanos of the epoch_millis formatter if it is rounded up to the nano second
var formatter = (JavaDateFormatter) DateFormatter.forPattern("8epoch_millis");
Instant epochMilliInstant = DateFormatters.from(formatter.roundupParse("1234567890")).toInstant();
assertThat(epochMilliInstant.getLong(ChronoField.NANO_OF_SECOND), is(890_999_999L));
assertRoundupFormatter("strict_date_optional_time||epoch_millis", "2018-10-10T12:13:14.123Z", 1539173594123L);
assertRoundupFormatter("strict_date_optional_time||epoch_millis", "1234567890", 1234567890L);
assertRoundupFormatter("strict_date_optional_time||epoch_millis", "2018-10-10", 1539215999999L);
assertRoundupFormatter("strict_date_optional_time||epoch_millis", "2019-01-25T15:37:17.346928Z", 1548430637346L);
assertRoundupFormatter("uuuu-MM-dd'T'HH:mm:ss.SSS||epoch_millis", "2018-10-10T12:13:14.123", 1539173594123L);
assertRoundupFormatter("uuuu-MM-dd'T'HH:mm:ss.SSS||epoch_millis", "1234567890", 1234567890L);
assertRoundupFormatter("epoch_second", "1234567890", 1234567890999L);
// also check nanos of the epoch_millis formatter if it is rounded up to the nano second
formatter = (JavaDateFormatter) DateFormatter.forPattern("8epoch_second");
Instant epochSecondInstant = DateFormatters.from(formatter.roundupParse("1234567890")).toInstant();
assertThat(epochSecondInstant.getLong(ChronoField.NANO_OF_SECOND), is(999_999_999L));
assertRoundupFormatter("strict_date_optional_time||epoch_second", "2018-10-10T12:13:14.123Z", 1539173594123L);
assertRoundupFormatter("strict_date_optional_time||epoch_second", "1234567890", 1234567890999L);
assertRoundupFormatter("strict_date_optional_time||epoch_second", "2018-10-10", 1539215999999L);
assertRoundupFormatter("uuuu-MM-dd'T'HH:mm:ss.SSS||epoch_second", "2018-10-10T12:13:14.123", 1539173594123L);
assertRoundupFormatter("uuuu-MM-dd'T'HH:mm:ss.SSS||epoch_second", "1234567890", 1234567890999L);
}
public void testYearWithoutMonthRoundUp() {
assertDateMathEquals("1500", "1500-01-01T23:59:59.999", "uuuu");
assertDateMathEquals("2022", "2022-01-01T23:59:59.999", "uuuu");
assertDateMathEquals("2022", "2022-01-01T23:59:59.999", "yyyy");
// weird locales can change this to epoch-based
assertDateMathEquals("2022", "2021-12-26T23:59:59.999", "YYYY", Locale.ROOT);
}
private void assertRoundupFormatter(String format, String input, long expectedMilliSeconds) {
JavaDateFormatter dateFormatter = (JavaDateFormatter) DateFormatter.forPattern(format);
dateFormatter.parse(input);
long millis = DateFormatters.from(dateFormatter.roundupParse(input)).toInstant().toEpochMilli();
assertThat(millis, is(expectedMilliSeconds));
}
public void testRoundupFormatterZone() {
ZoneId zoneId = randomZone();
String format = randomFrom(
"epoch_second",
"epoch_millis",
"strict_date_optional_time",
"uuuu-MM-dd'T'HH:mm:ss.SSS",
"strict_date_optional_time||date_optional_time"
);
JavaDateFormatter formatter = (JavaDateFormatter) DateFormatter.forPattern(format).withZone(zoneId);
assertThat(formatter.zone(), is(zoneId));
assertThat(List.of(formatter.roundupParsers), transformedItemsMatch(DateTimeParser::getZone, everyItem(is(zoneId))));
}
public void testRoundupFormatterLocale() {
Locale locale = randomLocale(random());
String format = randomFrom(
"epoch_second",
"epoch_millis",
"strict_date_optional_time",
"uuuu-MM-dd'T'HH:mm:ss.SSS",
"strict_date_optional_time||date_optional_time"
);
JavaDateFormatter formatter = (JavaDateFormatter) DateFormatter.forPattern(format).withLocale(locale);
assertThat(formatter.locale(), is(locale));
assertThat(List.of(formatter.roundupParsers), transformedItemsMatch(DateTimeParser::getLocale, everyItem(is(locale))));
}
public void test0MillisAreFormatted() {
DateFormatter formatter = DateFormatter.forPattern("strict_date_time");
Clock clock = Clock.fixed(ZonedDateTime.of(2019, 02, 8, 11, 43, 00, 0, ZoneOffset.UTC).toInstant(), ZoneOffset.UTC);
String formatted = formatter.formatMillis(clock.millis());
assertThat(formatted, is("2019-02-08T11:43:00.000Z"));
}
public void testFractionalSeconds() {
DateFormatter formatter = DateFormatters.forPattern("strict_date_optional_time");
{
Instant instant = Instant.from(formatter.parse("2019-05-06T14:52:37.1Z"));
assertThat(instant.getNano(), is(100_000_000));
}
{
Instant instant = Instant.from(formatter.parse("2019-05-06T14:52:37.12Z"));
assertThat(instant.getNano(), is(120_000_000));
}
{
Instant instant = Instant.from(formatter.parse("2019-05-06T14:52:37.123Z"));
assertThat(instant.getNano(), is(123_000_000));
}
{
Instant instant = Instant.from(formatter.parse("2019-05-06T14:52:37.1234Z"));
assertThat(instant.getNano(), is(123_400_000));
}
{
Instant instant = Instant.from(formatter.parse("2019-05-06T14:52:37.12345Z"));
assertThat(instant.getNano(), is(123_450_000));
}
{
Instant instant = Instant.from(formatter.parse("2019-05-06T14:52:37.123456Z"));
assertThat(instant.getNano(), is(123_456_000));
}
{
Instant instant = Instant.from(formatter.parse("2019-05-06T14:52:37.1234567Z"));
assertThat(instant.getNano(), is(123_456_700));
}
{
Instant instant = Instant.from(formatter.parse("2019-05-06T14:52:37.12345678Z"));
assertThat(instant.getNano(), is(123_456_780));
}
{
Instant instant = Instant.from(formatter.parse("2019-05-06T14:52:37.123456789Z"));
assertThat(instant.getNano(), is(123_456_789));
}
}
public void testIncorrectFormat() {
assertParseException("2021-01-01T23-35-00Z", "strict_date_optional_time||epoch_millis");
assertParseException("2021-01-01T23-35-00Z", "strict_date_optional_time");
}
public void testMinMillis() {
String javaFormatted = DateFormatter.forPattern("strict_date_optional_time").formatMillis(Long.MIN_VALUE);
assertThat(javaFormatted, equalTo("-292275055-05-16T16:47:04.192Z"));
}
public void testMinNanos() {
String javaFormatted = DateFormatter.forPattern("strict_date_optional_time").formatNanos(Long.MIN_VALUE);
assertThat(javaFormatted, equalTo("1677-09-21T00:12:43.145Z"));
// Note - since this is a negative value, the nanoseconds are being subtracted, which is why we get this value.
javaFormatted = DateFormatter.forPattern("strict_date_optional_time_nanos").formatNanos(Long.MIN_VALUE);
assertThat(javaFormatted, equalTo("1677-09-21T00:12:43.145224192Z"));
}
public void testMaxNanos() {
String javaFormatted = DateFormatter.forPattern("strict_date_optional_time").formatNanos(Long.MAX_VALUE);
assertThat(javaFormatted, equalTo("2262-04-11T23:47:16.854Z"));
javaFormatted = DateFormatter.forPattern("strict_date_optional_time_nanos").formatNanos(Long.MAX_VALUE);
assertThat(javaFormatted, equalTo("2262-04-11T23:47:16.854775807Z"));
}
public void testYearParsing() {
// this one is considered a year
assertParses("1234", "strict_date_optional_time||epoch_millis");
// this one is considered a 12345milliseconds since epoch
assertParses("12345", "strict_date_optional_time||epoch_millis");
}
public void testTimezoneParsing() {
assertParses("2016-11-30T00+01", "strict_date_optional_time");
assertParses("2016-11-30T00+0100", "strict_date_optional_time");
assertParses("2016-11-30T00+01:00", "strict_date_optional_time");
}
public void testPartialTimeParsing() {
/*
This does not work in Joda as it reports 2016-11-30T01:00:00Z
because StrictDateOptionalTime confuses +01 with an hour (which is a signed fixed length digit)
assertSameDateAs("2016-11-30T+01", "strict_date_optional_time", "strict_date_optional_time");
ES java.time implementation does not suffer from this,
but we intentionally not allow parsing timezone without a time part as it is not allowed in iso8601
*/
assertParseException("2016-11-30T+01", "strict_date_optional_time", 11);
assertParses("2016-11-30T12+01", "strict_date_optional_time");
assertParses("2016-11-30T12:00+01", "strict_date_optional_time");
assertParses("2016-11-30T12:00:00+01", "strict_date_optional_time");
assertParses("2016-11-30T12:00:00.000+01", "strict_date_optional_time");
// without timezone
assertParses("2016-11-30T", "strict_date_optional_time");
assertParses("2016-11-30T12", "strict_date_optional_time");
assertParses("2016-11-30T12:00", "strict_date_optional_time");
assertParses("2016-11-30T12:00:00", "strict_date_optional_time");
assertParses("2016-11-30T12:00:00.000", "strict_date_optional_time");
}
// date_optional part of a parser names "strict_date_optional_time" or "date_optional"time
// means that date part can be partially parsed.
public void testPartialDateParsing() {
assertParses("2001", "strict_date_optional_time_nanos");
assertParses("2001-01", "strict_date_optional_time_nanos");
assertParses("2001-01-01", "strict_date_optional_time_nanos");
assertParses("2001", "strict_date_optional_time");
assertParses("2001-01", "strict_date_optional_time");
assertParses("2001-01-01", "strict_date_optional_time");
assertParses("2001", "date_optional_time");
assertParses("2001-01", "date_optional_time");
assertParses("2001-01-01", "date_optional_time");
assertParses("2001", "iso8601");
assertParses("2001-01", "iso8601");
assertParses("2001-01-01", "iso8601");
assertParses("9999", "date_optional_time||epoch_second");
}
public void testCompositeDateMathParsing() {
// in all these examples the second pattern will be used
assertDateMathEquals("2014-06-06T12:01:02.123", "2014-06-06T12:01:02.123", "yyyy-MM-dd'T'HH:mm:ss||yyyy-MM-dd'T'HH:mm:ss.SSS");
assertDateMathEquals("2014-06-06T12:01:02.123", "2014-06-06T12:01:02.123", "strict_date_time_no_millis||yyyy-MM-dd'T'HH:mm:ss.SSS");
assertDateMathEquals(
"2014-06-06T12:01:02.123",
"2014-06-06T12:01:02.123",
"yyyy-MM-dd'T'HH:mm:ss+HH:MM||yyyy-MM-dd'T'HH:mm:ss.SSS"
);
}
public void testExceptionWhenCompositeParsingFailsDateMath() {
// both parsing failures should contain pattern and input text in exception
// both patterns fail parsing the input text due to only 2 digits of millis. Hence full text was not parsed.
String pattern = "yyyy-MM-dd'T'HH:mm:ss||yyyy-MM-dd'T'HH:mm:ss.SS";
String text = "2014-06-06T12:01:02.123";
ElasticsearchParseException e1 = expectThrows(
ElasticsearchParseException.class,
() -> dateMathToInstant(text, DateFormatter.forPattern(pattern), randomLocale(random()))
);
assertThat(e1.getMessage(), containsString(pattern));
assertThat(e1.getMessage(), containsString(text));
}
private Instant dateMathToInstant(String text, DateFormatter dateFormatter, Locale locale) {
DateFormatter javaFormatter = dateFormatter.withLocale(locale);
DateMathParser javaDateMath = javaFormatter.toDateMathParser();
return javaDateMath.parse(text, () -> 0, true, null);
}
public void testDayOfWeek() {
ZonedDateTime now = LocalDateTime.of(2009, 11, 15, 1, 32, 8, 328402).atZone(ZoneOffset.UTC); // Sunday
DateFormatter javaFormatter = DateFormatter.forPattern("8e").withZone(ZoneOffset.UTC);
assertThat(javaFormatter.format(now), equalTo("1"));
}
public void testStartOfWeek() {
ZonedDateTime now = LocalDateTime.of(2019, 5, 26, 1, 32, 8, 328402).atZone(ZoneOffset.UTC);
DateFormatter javaFormatter = DateFormatter.forPattern("8YYYY-ww").withZone(ZoneOffset.UTC);
assertThat(javaFormatter.format(now), equalTo("2019-22"));
}
// these parsers should allow both ',' and '.' as a decimal point
public void testDecimalPointParsing() {
assertParses("2001-01-01T00:00:00.123Z", "strict_date_optional_time");
assertParses("2001-01-01T00:00:00,123Z", "strict_date_optional_time");
assertParses("2001-01-01T00:00:00.123Z", "date_optional_time");
assertParses("2001-01-01T00:00:00,123Z", "date_optional_time");
// only java.time has nanos parsing, but the results for 3digits should be the same
DateFormatter javaFormatter = DateFormatter.forPattern("strict_date_optional_time_nanos");
assertParses("2001-01-01T00:00:00.123Z", javaFormatter);
assertParses("2001-01-01T00:00:00,123Z", javaFormatter);
assertParseException("2001-01-01T00:00:00.123,456Z", "strict_date_optional_time", 23);
assertParseException("2001-01-01T00:00:00.123,456Z", "date_optional_time", 23);
// This should fail, but java is ok with this because the field has the same value
// assertJavaTimeParseException("2001-01-01T00:00:00.123,123Z", "strict_date_optional_time_nanos");
// for historical reasons,
// despite the use of a locale with , separator these formatters still expect only . decimals
DateFormatter formatter = DateFormatter.forPattern("strict_date_time").withLocale(Locale.FRANCE);
assertParses("2020-01-01T12:00:00.0Z", formatter);
assertParseException("2020-01-01T12:00:00,0Z", formatter, 19);
formatter = DateFormatter.forPattern("strict_date_hour_minute_second_fraction").withLocale(Locale.GERMANY);
assertParses("2020-01-01T12:00:00.0", formatter);
assertParseException("2020-01-01T12:00:00,0", formatter, 19);
formatter = DateFormatter.forPattern("strict_date_hour_minute_second_millis").withLocale(Locale.ITALY);
assertParses("2020-01-01T12:00:00.0", formatter);
assertParseException("2020-01-01T12:00:00,0", formatter, 19);
}
public void testTimeZoneFormatting() {
assertParses("2001-01-01T00:00:00Z", "date_time_no_millis");
// the following fail under java 8 but work under java 10, needs investigation
assertParses("2001-01-01T00:00:00-0800", "date_time_no_millis");
assertParses("2001-01-01T00:00:00+1030", "date_time_no_millis");
assertParses("2001-01-01T00:00:00-08", "date_time_no_millis");
assertParses("2001-01-01T00:00:00+10:30", "date_time_no_millis");
// different timezone parsing styles require a different number of letters
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyyMMdd'T'HHmmss.SSSXXX", Locale.ROOT);
formatter.parse("20181126T121212.123Z");
formatter.parse("20181126T121212.123-08:30");
DateTimeFormatter formatter2 = DateTimeFormatter.ofPattern("yyyyMMdd'T'HHmmss.SSSXXXX", Locale.ROOT);
formatter2.parse("20181126T121212.123+1030");
formatter2.parse("20181126T121212.123-0830");
// ... and can be combined, note that this is not an XOR, so one could append both timezones with this example
DateTimeFormatter formatter3 = DateTimeFormatter.ofPattern("yyyyMMdd'T'HHmmss.SSS[XXXX][XXX]", Locale.ROOT);
formatter3.parse("20181126T121212.123Z");
formatter3.parse("20181126T121212.123-08:30");
formatter3.parse("20181126T121212.123+1030");
formatter3.parse("20181126T121212.123-0830");
}
public void testCustomTimeFormats() {
assertParses("2010 12 06 11:05:15", "yyyy dd MM HH:mm:ss");
assertParses("12/06", "dd/MM");
assertParses("Nov 24 01:29:01 -0800", "MMM dd HH:mm:ss Z");
}
public void testCustomLocales() {
// also ensure that locale based dates are the same
DateFormatter formatter = DateFormatter.forPattern("E, d MMM yyyy HH:mm:ss Z").withLocale(LocaleUtils.parse("fr"));
assertParses("mar., 5 déc. 2000 02:55:00 -0800", formatter);
assertParses("mer., 6 déc. 2000 02:55:00 -0800", formatter);
assertParses("jeu., 7 déc. 2000 00:00:00 -0800", formatter);
assertParses("ven., 8 déc. 2000 00:00:00 -0800", formatter);
}
public void testFormatsValidParsing() {
assertParses("1522332219", "epoch_second");
assertParses("0", "epoch_second");
assertParses("1", "epoch_second");
assertParses("1522332219321", "epoch_millis");
assertParses("0", "epoch_millis");
assertParses("1", "epoch_millis");
assertParses("20181126", "basic_date");
assertParses("20181126T121212.123Z", "basic_date_time");
assertParses("20181126T121212.123+10:00", "basic_date_time");
assertParses("20181126T121212.123-0800", "basic_date_time");
assertParses("20181126T121212Z", "basic_date_time_no_millis");
assertParses("20181126T121212+01:00", "basic_date_time_no_millis");
assertParses("20181126T121212+0100", "basic_date_time_no_millis");
assertParses("2018363", "basic_ordinal_date");
assertParses("2018363T121212.1Z", "basic_ordinal_date_time");
assertParses("2018363T121212.123Z", "basic_ordinal_date_time");
assertParses("2018363T121212.123456789Z", "basic_ordinal_date_time");
assertParses("2018363T121212.123+0100", "basic_ordinal_date_time");
assertParses("2018363T121212.123+01:00", "basic_ordinal_date_time");
assertParses("2018363T121212Z", "basic_ordinal_date_time_no_millis");
assertParses("2018363T121212+0100", "basic_ordinal_date_time_no_millis");
assertParses("2018363T121212+01:00", "basic_ordinal_date_time_no_millis");
assertParses("121212.1Z", "basic_time");
assertParses("121212.123Z", "basic_time");
assertParses("121212.123456789Z", "basic_time");
assertParses("121212.1+0100", "basic_time");
assertParses("121212.123+0100", "basic_time");
assertParses("121212.123+01:00", "basic_time");
assertParses("121212Z", "basic_time_no_millis");
assertParses("121212+0100", "basic_time_no_millis");
assertParses("121212+01:00", "basic_time_no_millis");
assertParses("T121212.1Z", "basic_t_time");
assertParses("T121212.123Z", "basic_t_time");
assertParses("T121212.123456789Z", "basic_t_time");
assertParses("T121212.1+0100", "basic_t_time");
assertParses("T121212.123+0100", "basic_t_time");
assertParses("T121212.123+01:00", "basic_t_time");
assertParses("T121212Z", "basic_t_time_no_millis");
assertParses("T121212+0100", "basic_t_time_no_millis");
assertParses("T121212+01:00", "basic_t_time_no_millis");
assertParses("2018W313", "basic_week_date");
assertParses("1W313", "basic_week_date");
assertParses("18W313", "basic_week_date");
assertParses("2018W313T121212.1Z", "basic_week_date_time");
assertParses("2018W313T121212.123Z", "basic_week_date_time");
assertParses("2018W313T121212.123456789Z", "basic_week_date_time");
assertParses("2018W313T121212.123+0100", "basic_week_date_time");
assertParses("2018W313T121212.123+01:00", "basic_week_date_time");
assertParses("2018W313T121212Z", "basic_week_date_time_no_millis");
assertParses("2018W313T121212+0100", "basic_week_date_time_no_millis");
assertParses("2018W313T121212+01:00", "basic_week_date_time_no_millis");
assertParses("2018-12-31", "date");
assertParses("18-5-6", "date");
assertParses("10000-5-6", "date");
assertParses("2018-12-31T12", "date_hour");
assertParses("2018-12-31T8", "date_hour");
assertParses("2018-12-31T12:12", "date_hour_minute");
assertParses("2018-12-31T8:3", "date_hour_minute");
assertParses("2018-12-31T12:12:12", "date_hour_minute_second");
assertParses("2018-12-31T12:12:1", "date_hour_minute_second");
assertParses("2018-12-31T12:12:12.1", "date_hour_minute_second_fraction");
assertParses("2018-12-31T12:12:12.123", "date_hour_minute_second_fraction");
assertParses("2018-12-31T12:12:12.123456789", "date_hour_minute_second_fraction");
assertParses("2018-12-31T12:12:12.1", "date_hour_minute_second_millis");
assertParses("2018-12-31T12:12:12.123", "date_hour_minute_second_millis");
assertParseException("2018-12-31T12:12:12.123456789", "date_hour_minute_second_millis", 23);
assertParses("2018-12-31T12:12:12.1", "date_hour_minute_second_millis");
assertParses("2018-12-31T12:12:12.1", "date_hour_minute_second_fraction");
assertParses("2018-05", "date_optional_time");
assertParses("2018-05-30", "date_optional_time");
assertParses("2018-05-30T20", "date_optional_time");
assertParses("2018-05-30T20:21", "date_optional_time");
assertParses("2018-05-30T20:21:23", "date_optional_time");
assertParses("2018-05-30T20:21:23.1", "date_optional_time");
assertParses("2018-05-30T20:21:23.123", "date_optional_time");
assertParses("2018-05-30T20:21:23.123456789", "date_optional_time");
assertParses("2018-05-30T20:21:23.123Z", "date_optional_time");
assertParses("2018-05-30T20:21:23.123456789Z", "date_optional_time");
assertParses("2018-05-30T20:21:23.1+0100", "date_optional_time");
assertParses("2018-05-30T20:21:23.123+0100", "date_optional_time");
assertParses("2018-05-30T20:21:23.1+01:00", "date_optional_time");
assertParses("2018-05-30T20:21:23.123+01:00", "date_optional_time");
assertParses("2018-12-1", "date_optional_time");
assertParses("2018-12-31T10:15:30", "date_optional_time");
assertParses("2018-12-31T10:15:3", "date_optional_time");
assertParses("2018-12-31T10:5:30", "date_optional_time");
assertParses("2018-12-31T1:15:30", "date_optional_time");
assertParses("2018-12-31T10:15:30.1Z", "date_time");
assertParses("2018-12-31T10:15:30.123Z", "date_time");
assertParses("2018-12-31T10:15:30.123456789Z", "date_time");
assertParses("2018-12-31T10:15:30.1+0100", "date_time");
assertParses("2018-12-31T10:15:30.123+0100", "date_time");
assertParses("2018-12-31T10:15:30.123+01:00", "date_time");
assertParses("2018-12-31T10:15:30.1+01:00", "date_time");
assertParses("2018-12-31T10:15:30.11Z", "date_time");
assertParses("2018-12-31T10:15:30.11+0100", "date_time");
assertParses("2018-12-31T10:15:30.11+01:00", "date_time");
assertParses("2018-12-31T10:15:3.1Z", "date_time");
assertParses("2018-12-31T10:15:3.123Z", "date_time");
assertParses("2018-12-31T10:15:3.123456789Z", "date_time");
assertParses("2018-12-31T10:15:3.1+0100", "date_time");
assertParses("2018-12-31T10:15:3.123+0100", "date_time");
assertParses("2018-12-31T10:15:3.123+01:00", "date_time");
assertParses("2018-12-31T10:15:3.1+01:00", "date_time");
assertParses("2018-12-31T10:15:30Z", "date_time_no_millis");
assertParses("2018-12-31T10:15:30+0100", "date_time_no_millis");
assertParses("2018-12-31T10:15:30+01:00", "date_time_no_millis");
assertParses("2018-12-31T10:5:30Z", "date_time_no_millis");
assertParses("2018-12-31T10:5:30+0100", "date_time_no_millis");
assertParses("2018-12-31T10:5:30+01:00", "date_time_no_millis");
assertParses("2018-12-31T10:15:3Z", "date_time_no_millis");
assertParses("2018-12-31T10:15:3+0100", "date_time_no_millis");
assertParses("2018-12-31T10:15:3+01:00", "date_time_no_millis");
assertParses("2018-12-31T1:15:30Z", "date_time_no_millis");
assertParses("2018-12-31T1:15:30+0100", "date_time_no_millis");
assertParses("2018-12-31T1:15:30+01:00", "date_time_no_millis");
assertParses("12", "hour");
assertParses("01", "hour");
assertParses("1", "hour");
assertParses("12:12", "hour_minute");
assertParses("12:01", "hour_minute");
assertParses("12:1", "hour_minute");
assertParses("12:12:12", "hour_minute_second");
assertParses("12:12:01", "hour_minute_second");
assertParses("12:12:1", "hour_minute_second");
assertParses("12:12:12.123", "hour_minute_second_fraction");
assertParses("12:12:12.123456789", "hour_minute_second_fraction");
assertParses("12:12:12.1", "hour_minute_second_fraction");
assertParseException("12:12:12", "hour_minute_second_fraction", 8);
assertParses("12:12:12.123", "hour_minute_second_millis");
assertParseException("12:12:12.123456789", "hour_minute_second_millis", 12);
assertParses("12:12:12.1", "hour_minute_second_millis");
assertParseException("12:12:12", "hour_minute_second_millis", 8);
assertParses("2018-128", "ordinal_date");
assertParses("2018-1", "ordinal_date");
assertParses("2018-128T10:15:30.1Z", "ordinal_date_time");
assertParses("2018-128T10:15:30.123Z", "ordinal_date_time");
assertParses("2018-128T10:15:30.123456789Z", "ordinal_date_time");
assertParses("2018-128T10:15:30.123+0100", "ordinal_date_time");
assertParses("2018-128T10:15:30.123+01:00", "ordinal_date_time");
assertParses("2018-1T10:15:30.1Z", "ordinal_date_time");
assertParses("2018-1T10:15:30.123Z", "ordinal_date_time");
assertParses("2018-1T10:15:30.123456789Z", "ordinal_date_time");
assertParses("2018-1T10:15:30.123+0100", "ordinal_date_time");
assertParses("2018-1T10:15:30.123+01:00", "ordinal_date_time");
assertParses("2018-128T10:15:30Z", "ordinal_date_time_no_millis");
assertParses("2018-128T10:15:30+0100", "ordinal_date_time_no_millis");
assertParses("2018-128T10:15:30+01:00", "ordinal_date_time_no_millis");
assertParses("2018-1T10:15:30Z", "ordinal_date_time_no_millis");
assertParses("2018-1T10:15:30+0100", "ordinal_date_time_no_millis");
assertParses("2018-1T10:15:30+01:00", "ordinal_date_time_no_millis");
assertParses("10:15:30.1Z", "time");
assertParses("10:15:30.123Z", "time");
assertParses("10:15:30.123456789Z", "time");
assertParses("10:15:30.123+0100", "time");
assertParses("10:15:30.123+01:00", "time");
assertParses("1:15:30.1Z", "time");
assertParses("1:15:30.123Z", "time");
assertParses("1:15:30.123+0100", "time");
assertParses("1:15:30.123+01:00", "time");
assertParses("10:1:30.1Z", "time");
assertParses("10:1:30.123Z", "time");
assertParses("10:1:30.123+0100", "time");
assertParses("10:1:30.123+01:00", "time");
assertParses("10:15:3.1Z", "time");
assertParses("10:15:3.123Z", "time");
assertParses("10:15:3.123+0100", "time");
assertParses("10:15:3.123+01:00", "time");
assertParseException("10:15:3.1", "time", 9);
assertParseException("10:15:3Z", "time", 7);
assertParses("10:15:30Z", "time_no_millis");
assertParses("10:15:30+0100", "time_no_millis");
assertParses("10:15:30+01:00", "time_no_millis");
assertParses("01:15:30Z", "time_no_millis");
assertParses("01:15:30+0100", "time_no_millis");
assertParses("01:15:30+01:00", "time_no_millis");
assertParses("1:15:30Z", "time_no_millis");
assertParses("1:15:30+0100", "time_no_millis");
assertParses("1:15:30+01:00", "time_no_millis");
assertParses("10:5:30Z", "time_no_millis");
assertParses("10:5:30+0100", "time_no_millis");
assertParses("10:5:30+01:00", "time_no_millis");
assertParses("10:15:3Z", "time_no_millis");
assertParses("10:15:3+0100", "time_no_millis");
assertParses("10:15:3+01:00", "time_no_millis");
assertParseException("10:15:3", "time_no_millis", 7);
assertParses("T10:15:30.1Z", "t_time");
assertParses("T10:15:30.123Z", "t_time");
assertParses("T10:15:30.123456789Z", "t_time");
assertParses("T10:15:30.1+0100", "t_time");
assertParses("T10:15:30.123+0100", "t_time");
assertParses("T10:15:30.123+01:00", "t_time");
assertParses("T10:15:30.1+01:00", "t_time");
assertParses("T1:15:30.123Z", "t_time");
assertParses("T1:15:30.123+0100", "t_time");
assertParses("T1:15:30.123+01:00", "t_time");
assertParses("T10:1:30.123Z", "t_time");
assertParses("T10:1:30.123+0100", "t_time");
assertParses("T10:1:30.123+01:00", "t_time");
assertParses("T10:15:3.123Z", "t_time");
assertParses("T10:15:3.123+0100", "t_time");
assertParses("T10:15:3.123+01:00", "t_time");
assertParseException("T10:15:3.1", "t_time", 10);
assertParseException("T10:15:3Z", "t_time", 8);
assertParses("T10:15:30Z", "t_time_no_millis");
assertParses("T10:15:30+0100", "t_time_no_millis");
assertParses("T10:15:30+01:00", "t_time_no_millis");
assertParses("T1:15:30Z", "t_time_no_millis");
assertParses("T1:15:30+0100", "t_time_no_millis");
assertParses("T1:15:30+01:00", "t_time_no_millis");
assertParses("T10:1:30Z", "t_time_no_millis");
assertParses("T10:1:30+0100", "t_time_no_millis");
assertParses("T10:1:30+01:00", "t_time_no_millis");
assertParses("T10:15:3Z", "t_time_no_millis");
assertParses("T10:15:3+0100", "t_time_no_millis");
assertParses("T10:15:3+01:00", "t_time_no_millis");
assertParseException("T10:15:3", "t_time_no_millis", 8);
assertParses("2012-W48-6", "week_date");
assertParses("2012-W01-6", "week_date");
assertParses("2012-W1-6", "week_date");
assertParseException("2012-W1-8", "week_date", 0);
assertParses("2012-W48-6T10:15:30.1Z", "week_date_time");
assertParses("2012-W48-6T10:15:30.123Z", "week_date_time");
assertParses("2012-W48-6T10:15:30.123456789Z", "week_date_time");
assertParses("2012-W48-6T10:15:30.1+0100", "week_date_time");
assertParses("2012-W48-6T10:15:30.123+0100", "week_date_time");
assertParses("2012-W48-6T10:15:30.1+01:00", "week_date_time");
assertParses("2012-W48-6T10:15:30.123+01:00", "week_date_time");
assertParses("2012-W1-6T10:15:30.1Z", "week_date_time");
assertParses("2012-W1-6T10:15:30.123Z", "week_date_time");
assertParses("2012-W1-6T10:15:30.1+0100", "week_date_time");
assertParses("2012-W1-6T10:15:30.123+0100", "week_date_time");
assertParses("2012-W1-6T10:15:30.1+01:00", "week_date_time");
assertParses("2012-W1-6T10:15:30.123+01:00", "week_date_time");
assertParses("2012-W48-6T10:15:30Z", "week_date_time_no_millis");
assertParses("2012-W48-6T10:15:30+0100", "week_date_time_no_millis");
assertParses("2012-W48-6T10:15:30+01:00", "week_date_time_no_millis");
assertParses("2012-W1-6T10:15:30Z", "week_date_time_no_millis");
assertParses("2012-W1-6T10:15:30+0100", "week_date_time_no_millis");
assertParses("2012-W1-6T10:15:30+01:00", "week_date_time_no_millis");
assertParses("2012", "year");
assertParses("1", "year");
assertParses("-2000", "year");
assertParses("2012-12", "year_month");
assertParses("1-1", "year_month");
assertParses("2012-12-31", "year_month_day");
assertParses("1-12-31", "year_month_day");
assertParses("2012-1-31", "year_month_day");
assertParses("2012-12-1", "year_month_day");
assertParses("2018", "weekyear");
assertParses("1", "weekyear");
assertParses("2017", "weekyear");
assertParses("2018-W29", "weekyear_week");
assertParses("2018-W1", "weekyear_week");
assertParses("2012-W31-5", "weekyear_week_day");
assertParses("2012-W1-1", "weekyear_week_day");
}
public void testCompositeParsing() {
// in all these examples the second pattern will be used
assertParses("2014-06-06T12:01:02.123", "yyyy-MM-dd'T'HH:mm:ss||yyyy-MM-dd'T'HH:mm:ss.SSS");
assertParses("2014-06-06T12:01:02.123", "strict_date_time_no_millis||yyyy-MM-dd'T'HH:mm:ss.SSS");
assertParses("2014-06-06T12:01:02.123", "yyyy-MM-dd'T'HH:mm:ss+HH:MM||yyyy-MM-dd'T'HH:mm:ss.SSS");
}
public void testExceptionWhenCompositeParsingFails() {
assertParseException("2014-06-06T12:01:02.123", "yyyy-MM-dd'T'HH:mm:ss||yyyy-MM-dd'T'HH:mm:ss.SS", 19);
}
public void testStrictParsing() {
assertParses("2018W313", "strict_basic_week_date");
assertParseException("18W313", "strict_basic_week_date", 0);
assertParses("2018W313T121212.1Z", "strict_basic_week_date_time");
assertParses("2018W313T121212.123Z", "strict_basic_week_date_time");
assertParses("2018W313T121212.123456789Z", "strict_basic_week_date_time");
assertParses("2018W313T121212.1+0100", "strict_basic_week_date_time");
assertParses("2018W313T121212.123+0100", "strict_basic_week_date_time");
assertParses("2018W313T121212.1+01:00", "strict_basic_week_date_time");
assertParses("2018W313T121212.123+01:00", "strict_basic_week_date_time");
assertParseException("2018W313T12128.123Z", "strict_basic_week_date_time", 13);
assertParseException("2018W313T12128.123456789Z", "strict_basic_week_date_time", 13);
assertParseException("2018W313T81212.123Z", "strict_basic_week_date_time", 13);
assertParseException("2018W313T12812.123Z", "strict_basic_week_date_time", 13);
assertParseException("2018W313T12812.1Z", "strict_basic_week_date_time", 13);
assertParses("2018W313T121212Z", "strict_basic_week_date_time_no_millis");
assertParses("2018W313T121212+0100", "strict_basic_week_date_time_no_millis");
assertParses("2018W313T121212+01:00", "strict_basic_week_date_time_no_millis");
assertParseException("2018W313T12128Z", "strict_basic_week_date_time_no_millis", 13);
assertParseException("2018W313T12128+0100", "strict_basic_week_date_time_no_millis", 13);
assertParseException("2018W313T12128+01:00", "strict_basic_week_date_time_no_millis", 13);
assertParseException("2018W313T81212Z", "strict_basic_week_date_time_no_millis", 13);
assertParseException("2018W313T81212+0100", "strict_basic_week_date_time_no_millis", 13);
assertParseException("2018W313T81212+01:00", "strict_basic_week_date_time_no_millis", 13);
assertParseException("2018W313T12812Z", "strict_basic_week_date_time_no_millis", 13);
assertParseException("2018W313T12812+0100", "strict_basic_week_date_time_no_millis", 13);
assertParseException("2018W313T12812+01:00", "strict_basic_week_date_time_no_millis", 13);
assertParses("2018-12-31", "strict_date");
assertParseException("10000-12-31", "strict_date", 0);
assertParseException("2018-8-31", "strict_date", 5);
assertParses("2018-12-31T12", "strict_date_hour");
assertParseException("2018-12-31T8", "strict_date_hour", 11);
assertParses("2018-12-31T12:12", "strict_date_hour_minute");
assertParseException("2018-12-31T8:3", "strict_date_hour_minute", 11);
assertParses("2018-12-31T12:12:12", "strict_date_hour_minute_second");
assertParseException("2018-12-31T12:12:1", "strict_date_hour_minute_second", 17);
assertParses("2018-12-31T12:12:12.1", "strict_date_hour_minute_second_fraction");
assertParses("2018-12-31T12:12:12.123", "strict_date_hour_minute_second_fraction");
assertParses("2018-12-31T12:12:12.123456789", "strict_date_hour_minute_second_fraction");
assertParses("2018-12-31T12:12:12.123", "strict_date_hour_minute_second_millis");
assertParses("2018-12-31T12:12:12.1", "strict_date_hour_minute_second_millis");
assertParses("2018-12-31T12:12:12.1", "strict_date_hour_minute_second_fraction");
assertParseException("2018-12-31T12:12:12", "strict_date_hour_minute_second_millis", 19);
assertParseException("2018-12-31T12:12:12", "strict_date_hour_minute_second_fraction", 19);
assertParses("2018-12-31", "strict_date_optional_time");
assertParseException("2018-12-1", "strict_date_optional_time", 8);
assertParseException("2018-1-31", "strict_date_optional_time", 5);
assertParseException("10000-01-31", "strict_date_optional_time", 4);
assertParses("2010-01-05T02:00", "strict_date_optional_time");
assertParses("2018-12-31T10:15:30", "strict_date_optional_time");
assertParses("2018-12-31T10:15:30Z", "strict_date_optional_time");
assertParses("2018-12-31T10:15:30+0100", "strict_date_optional_time");
assertParses("2018-12-31T10:15:30+01:00", "strict_date_optional_time");
assertParseException("2018-12-31T10:15:3", "strict_date_optional_time", 17);
assertParseException("2018-12-31T10:5:30", "strict_date_optional_time", 14);
assertParseException("2018-12-31T9:15:30", "strict_date_optional_time", 11);
assertParses("2015-01-04T00:00Z", "strict_date_optional_time");
assertParses("2018-12-31T10:15:30.1Z", "strict_date_time");
assertParses("2018-12-31T10:15:30.123Z", "strict_date_time");
assertParses("2018-12-31T10:15:30.123456789Z", "strict_date_time");
assertParses("2018-12-31T10:15:30.1+0100", "strict_date_time");
assertParses("2018-12-31T10:15:30.123+0100", "strict_date_time");
assertParses("2018-12-31T10:15:30.1+01:00", "strict_date_time");
assertParses("2018-12-31T10:15:30.123+01:00", "strict_date_time");
assertParses("2018-12-31T10:15:30.11Z", "strict_date_time");
assertParses("2018-12-31T10:15:30.11+0100", "strict_date_time");
assertParses("2018-12-31T10:15:30.11+01:00", "strict_date_time");
assertParseException("2018-12-31T10:15:3.123Z", "strict_date_time", 17);
assertParseException("2018-12-31T10:5:30.123Z", "strict_date_time", 14);
assertParseException("2018-12-31T1:15:30.123Z", "strict_date_time", 11);
assertParses("2018-12-31T10:15:30Z", "strict_date_time_no_millis");
assertParses("2018-12-31T10:15:30+0100", "strict_date_time_no_millis");
assertParses("2018-12-31T10:15:30+01:00", "strict_date_time_no_millis");
assertParseException("2018-12-31T10:5:30Z", "strict_date_time_no_millis", 14);
assertParseException("2018-12-31T10:15:3Z", "strict_date_time_no_millis", 17);
assertParseException("2018-12-31T1:15:30Z", "strict_date_time_no_millis", 11);
assertParses("12", "strict_hour");
assertParses("01", "strict_hour");
assertParseException("1", "strict_hour", 0);
assertParses("12:12", "strict_hour_minute");
assertParses("12:01", "strict_hour_minute");
assertParseException("12:1", "strict_hour_minute", 3);
assertParses("12:12:12", "strict_hour_minute_second");
assertParses("12:12:01", "strict_hour_minute_second");
assertParseException("12:12:1", "strict_hour_minute_second", 6);
assertParses("12:12:12.123", "strict_hour_minute_second_fraction");
assertParses("12:12:12.123456789", "strict_hour_minute_second_fraction");
assertParses("12:12:12.1", "strict_hour_minute_second_fraction");
assertParseException("12:12:12", "strict_hour_minute_second_fraction", 8);
assertParses("12:12:12.123", "strict_hour_minute_second_millis");
assertParses("12:12:12.1", "strict_hour_minute_second_millis");
assertParseException("12:12:12", "strict_hour_minute_second_millis", 8);
assertParses("2018-128", "strict_ordinal_date");
assertParseException("2018-1", "strict_ordinal_date", 5);
assertParses("2018-128T10:15:30.1Z", "strict_ordinal_date_time");
assertParses("2018-128T10:15:30.123Z", "strict_ordinal_date_time");
assertParses("2018-128T10:15:30.123456789Z", "strict_ordinal_date_time");
assertParses("2018-128T10:15:30.1+0100", "strict_ordinal_date_time");
assertParses("2018-128T10:15:30.123+0100", "strict_ordinal_date_time");
assertParses("2018-128T10:15:30.1+01:00", "strict_ordinal_date_time");
assertParses("2018-128T10:15:30.123+01:00", "strict_ordinal_date_time");
assertParseException("2018-1T10:15:30.123Z", "strict_ordinal_date_time", 5);
assertParses("2018-128T10:15:30Z", "strict_ordinal_date_time_no_millis");
assertParses("2018-128T10:15:30+0100", "strict_ordinal_date_time_no_millis");
assertParses("2018-128T10:15:30+01:00", "strict_ordinal_date_time_no_millis");
assertParseException("2018-1T10:15:30Z", "strict_ordinal_date_time_no_millis", 5);
assertParses("10:15:30.1Z", "strict_time");
assertParses("10:15:30.123Z", "strict_time");
assertParses("10:15:30.123456789Z", "strict_time");
assertParses("10:15:30.123+0100", "strict_time");
assertParses("10:15:30.123+01:00", "strict_time");
assertParseException("1:15:30.123Z", "strict_time", 0);
assertParseException("10:1:30.123Z", "strict_time", 3);
assertParseException("10:15:3.123Z", "strict_time", 6);
assertParseException("10:15:3.1", "strict_time", 6);
assertParseException("10:15:3Z", "strict_time", 6);
assertParses("10:15:30Z", "strict_time_no_millis");
assertParses("10:15:30+0100", "strict_time_no_millis");
assertParses("10:15:30+01:00", "strict_time_no_millis");
assertParses("01:15:30Z", "strict_time_no_millis");
assertParses("01:15:30+0100", "strict_time_no_millis");
assertParses("01:15:30+01:00", "strict_time_no_millis");
assertParseException("1:15:30Z", "strict_time_no_millis", 0);
assertParseException("10:5:30Z", "strict_time_no_millis", 3);
assertParseException("10:15:3Z", "strict_time_no_millis", 6);
assertParseException("10:15:3", "strict_time_no_millis", 6);
assertParses("T10:15:30.1Z", "strict_t_time");
assertParses("T10:15:30.123Z", "strict_t_time");
assertParses("T10:15:30.123456789Z", "strict_t_time");
assertParses("T10:15:30.1+0100", "strict_t_time");
assertParses("T10:15:30.123+0100", "strict_t_time");
assertParses("T10:15:30.1+01:00", "strict_t_time");
assertParses("T10:15:30.123+01:00", "strict_t_time");
assertParseException("T1:15:30.123Z", "strict_t_time", 1);
assertParseException("T10:1:30.123Z", "strict_t_time", 4);
assertParseException("T10:15:3.123Z", "strict_t_time", 7);
assertParseException("T10:15:3.1", "strict_t_time", 7);
assertParseException("T10:15:3Z", "strict_t_time", 7);
assertParses("T10:15:30Z", "strict_t_time_no_millis");
assertParses("T10:15:30+0100", "strict_t_time_no_millis");
assertParses("T10:15:30+01:00", "strict_t_time_no_millis");
assertParseException("T1:15:30Z", "strict_t_time_no_millis", 1);
assertParseException("T10:1:30Z", "strict_t_time_no_millis", 4);
assertParseException("T10:15:3Z", "strict_t_time_no_millis", 7);
assertParseException("T10:15:3", "strict_t_time_no_millis", 7);
assertParses("2012-W48-6", "strict_week_date");
assertParses("2012-W01-6", "strict_week_date");
assertParseException("2012-W1-6", "strict_week_date", 6);
assertParseException("2012-W1-8", "strict_week_date", 6);
assertParses("2012-W48-6", "strict_week_date");
assertParses("2012-W01-6", "strict_week_date");
assertParseException("2012-W1-6", "strict_week_date", 6);
assertParseException("2012-W01-8", "strict_week_date");
assertParses("2012-W48-6T10:15:30.1Z", "strict_week_date_time");
assertParses("2012-W48-6T10:15:30.123Z", "strict_week_date_time");
assertParses("2012-W48-6T10:15:30.123456789Z", "strict_week_date_time");
assertParses("2012-W48-6T10:15:30.1+0100", "strict_week_date_time");
assertParses("2012-W48-6T10:15:30.123+0100", "strict_week_date_time");
assertParses("2012-W48-6T10:15:30.1+01:00", "strict_week_date_time");
assertParses("2012-W48-6T10:15:30.123+01:00", "strict_week_date_time");
assertParseException("2012-W1-6T10:15:30.123Z", "strict_week_date_time", 6);
assertParses("2012-W48-6T10:15:30Z", "strict_week_date_time_no_millis");
assertParses("2012-W48-6T10:15:30+0100", "strict_week_date_time_no_millis");
assertParses("2012-W48-6T10:15:30+01:00", "strict_week_date_time_no_millis");
assertParseException("2012-W1-6T10:15:30Z", "strict_week_date_time_no_millis", 6);
assertParses("2012", "strict_year");
assertParseException("1", "strict_year", 0);
assertParses("-2000", "strict_year");
assertParses("2012-12", "strict_year_month");
assertParseException("1-1", "strict_year_month", 0);
assertParses("2012-12-31", "strict_year_month_day");
assertParseException("1-12-31", "strict_year_month_day", 0);
assertParseException("2012-1-31", "strict_year_month_day", 4);
assertParseException("2012-12-1", "strict_year_month_day", 7);
assertParses("2018", "strict_weekyear");
assertParseException("1", "strict_weekyear", 0);
assertParses("2018", "strict_weekyear");
assertParses("2017", "strict_weekyear");
assertParseException("1", "strict_weekyear", 0);
assertParses("2018-W29", "strict_weekyear_week");
assertParses("2018-W01", "strict_weekyear_week");
assertParseException("2018-W1", "strict_weekyear_week", 6);
assertParses("2012-W31-5", "strict_weekyear_week_day");
assertParseException("2012-W1-1", "strict_weekyear_week_day", 6);
}
public void testDateFormatterWithLocale() {
Locale locale = randomLocale(random());
String pattern = randomBoolean() ? "strict_date_optional_time||date_time" : "date_time||strict_date_optional_time";
DateFormatter formatter = DateFormatter.forPattern(pattern).withLocale(locale);
assertThat(formatter.pattern(), is(pattern));
assertThat(formatter.locale(), is(locale));
}
public void testSeveralTimeFormats() {
{
String format = "year_month_day||ordinal_date";
DateFormatter javaFormatter = DateFormatter.forPattern(format);
assertParses("2018-12-12", javaFormatter);
assertParses("2018-128", javaFormatter);
}
{
String format = "strict_date_optional_time||dd-MM-yyyy";
DateFormatter javaFormatter = DateFormatter.forPattern(format);
assertParses("31-01-2014", javaFormatter);
}
}
public void testParsingLocalDateFromYearOfEra() {
// with strict resolving, YearOfEra expect an era, otherwise it won't resolve to a date
assertParses("2018363", DateFormatter.forPattern("uuuuDDD"));
}
public void testParsingMissingTimezone() {
long millisJava = DateFormatter.forPattern("8yyyy-MM-dd HH:mm:ss").parseMillis("2018-02-18 17:47:17");
long millisJoda = DateFormatter.forPattern("yyyy-MM-dd HH:mm:ss").parseMillis("2018-02-18 17:47:17");
assertThat(millisJava, is(millisJoda));
}
// see https://bugs.openjdk.org/browse/JDK-8193877
public void testNoClassCastException() {
String input = "DpNKOGqhjZ";
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> DateFormatter.forPattern(input));
assertThat(e.getCause(), instanceOf(ClassCastException.class));
assertThat(e.getMessage(), containsString(input));
}
public void testXContentElasticsearchExtensionDefaultFormatter() {
final var formatter = DateFormatter.forPattern("strict_date_optional_time_nanos");
assertSame(XContentElasticsearchExtension.DEFAULT_FORMATTER, formatter);
assertEquals("2025-09-12T08:12:12.123Z", formatter.format(Instant.ofEpochMilli(1757664732123L)));
assertEquals("2025-09-12T08:12:12.000Z", formatter.format(Instant.ofEpochMilli(1757664732000L)));
assertEquals("2025-09-12T08:12:00.000Z", formatter.format(Instant.ofEpochMilli(1757664720000L)));
assertEquals("2025-09-12T08:00:00.000Z", formatter.format(Instant.ofEpochMilli(1757664000000L)));
assertEquals("2025-09-12T00:00:00.000Z", formatter.format(Instant.ofEpochMilli(1757635200000L)));
// NB differs from Instant.toString():
assertEquals("2025-09-12T08:12:12.123Z", Instant.ofEpochMilli(1757664732123L).toString());
assertEquals("2025-09-12T08:12:00Z", Instant.ofEpochMilli(1757664720000L).toString());
}
}
| DateFormattersTests |
java | google__dagger | dagger-compiler/main/java/dagger/internal/codegen/compileroption/ProcessingEnvironmentCompilerOptions.java | {
"start": 14156,
"end": 20353
} | enum ____ implements EnumOption<ValidationType> {
DISABLE_INTER_COMPONENT_SCOPE_VALIDATION(),
NULLABLE_VALIDATION(ERROR, WARNING),
PRIVATE_MEMBER_VALIDATION(ERROR, WARNING),
STATIC_MEMBER_VALIDATION(ERROR, WARNING),
/** Whether to validate full binding graphs for components, subcomponents, and modules. */
FULL_BINDING_GRAPH_VALIDATION(NONE, ERROR, WARNING) {
@Override
public ImmutableList<String> aliases() {
return ImmutableList.of("dagger.moduleBindingValidation");
}
},
/**
* How to report conflicting scoped bindings when validating partial binding graphs associated
* with modules.
*/
MODULE_HAS_DIFFERENT_SCOPES_VALIDATION(ERROR, WARNING),
/**
* How to report that an explicit binding in a subcomponent conflicts with an {@code @Inject}
* constructor used in an ancestor component.
*/
EXPLICIT_BINDING_CONFLICTS_WITH_INJECT(ERROR, WARNING, NONE),
;
final ValidationType defaultType;
final ImmutableSet<ValidationType> validTypes;
Validation() {
this(ERROR, WARNING, NONE);
}
Validation(ValidationType defaultType, ValidationType... moreValidTypes) {
this.defaultType = defaultType;
this.validTypes = immutableEnumSet(defaultType, moreValidTypes);
}
@Override
public ValidationType defaultValue() {
return defaultType;
}
@Override
public Set<ValidationType> validValues() {
return validTypes;
}
@Override
public String toString() {
return optionName(this);
}
}
private static String optionName(Enum<? extends EnumOption<?>> option) {
return "dagger." + UPPER_UNDERSCORE.to(LOWER_CAMEL, option.name());
}
/** The supported command-line options. */
public static ImmutableSet<String> supportedOptions() {
// need explicit type parameter to avoid a runtime stream error
return ImmutableSet.<String>builder()
.addAll(
Stream.<CommandLineOption[]>of(
KeyOnlyOption.values(), Feature.values(), Validation.values())
.flatMap(Arrays::stream)
.flatMap(CommandLineOption::allNames)
.collect(toImmutableSet()))
.add(KEYS_PER_COMPONENT_SHARD)
.build();
}
/**
* Returns the value for the option as set on the command line by any name, or the default value
* if not set.
*
* <p>If more than one name is used to set the value, but all names specify the same value,
* reports a warning and returns that value.
*
* <p>If more than one name is used to set the value, and not all names specify the same value,
* reports an error and returns the default value.
*/
private <T extends Enum<T>> T parseOption(EnumOption<T> option) {
@SuppressWarnings("unchecked") // we only put covariant values into the map
T value = (T) enumOptions.computeIfAbsent(option, this::parseOptionUncached);
return value;
}
private boolean isSetOnCommandLine(Feature feature) {
return getUsedNames(feature).count() > 0;
}
private <T extends Enum<T>> T parseOptionUncached(EnumOption<T> option) {
ImmutableMap<String, T> values = parseOptionWithAllNames(option);
// If no value is specified, return the default value.
if (values.isEmpty()) {
return option.defaultValue();
}
// If all names have the same value, return that.
if (values.asMultimap().inverse().keySet().size() == 1) {
// Warn if an option was set with more than one name. That would be an error if the values
// differed.
if (values.size() > 1) {
reportUseOfDifferentNamesForOption(Diagnostic.Kind.WARNING, option, values.keySet());
}
return values.values().asList().get(0);
}
// If different names have different values, report an error and return the default
// value.
reportUseOfDifferentNamesForOption(Diagnostic.Kind.ERROR, option, values.keySet());
return option.defaultValue();
}
private void reportUseOfDifferentNamesForOption(
Diagnostic.Kind diagnosticKind, EnumOption<?> option, ImmutableSet<String> usedNames) {
messager.printMessage(
diagnosticKind,
String.format(
"Only one of the equivalent options (%s) should be used; prefer -A%s",
usedNames.stream().map(name -> "-A" + name).collect(joining(", ")), option));
}
private <T extends Enum<T>> ImmutableMap<String, T> parseOptionWithAllNames(
EnumOption<T> option) {
@SuppressWarnings("unchecked") // map is covariant
ImmutableMap<String, T> aliasValues =
(ImmutableMap<String, T>)
allCommandLineOptions.computeIfAbsent(option, this::parseOptionWithAllNamesUncached);
return aliasValues;
}
private <T extends Enum<T>> ImmutableMap<String, T> parseOptionWithAllNamesUncached(
EnumOption<T> option) {
ImmutableMap.Builder<String, T> values = ImmutableMap.builder();
getUsedNames(option)
.forEach(
name -> parseOptionWithName(option, name).ifPresent(value -> values.put(name, value)));
return values.build();
}
private <T extends Enum<T>> Optional<T> parseOptionWithName(EnumOption<T> option, String key) {
checkArgument(options.containsKey(key), "key %s not found", key);
String stringValue = options.get(key);
if (stringValue == null) {
messager.printMessage(Diagnostic.Kind.ERROR, "Processor option -A" + key + " needs a value");
} else {
try {
T value =
Enum.valueOf(option.defaultValue().getDeclaringClass(), Ascii.toUpperCase(stringValue));
if (option.validValues().contains(value)) {
return Optional.of(value);
}
} catch (IllegalArgumentException e) {
// handled below
}
messager.printMessage(
Diagnostic.Kind.ERROR,
String.format(
"Processor option -A%s may only have the values %s (case insensitive), found: %s",
key, option.validValues(), stringValue));
}
return Optional.empty();
}
private Stream<String> getUsedNames(CommandLineOption option) {
return option.allNames().filter(options::containsKey);
}
}
| Validation |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/scheduler/adaptive/allocator/SharedSlotTest.java | {
"start": 10840,
"end": 11449
} | class ____ implements LogicalSlot.Payload {
private final Consumer<Throwable> failConsumer;
public TestLogicalSlotPayload() {
this.failConsumer = ignored -> {};
}
public TestLogicalSlotPayload(Consumer<Throwable> failConsumer) {
this.failConsumer = failConsumer;
}
@Override
public void fail(Throwable cause) {
failConsumer.accept(cause);
}
@Override
public CompletableFuture<?> getTerminalStateFuture() {
return new CompletableFuture<>();
}
}
}
| TestLogicalSlotPayload |
java | spring-projects__spring-boot | module/spring-boot-mongodb/src/main/java/org/springframework/boot/mongodb/testcontainers/DeprecatedMongoDbContainerConnectionDetailsFactory.java | {
"start": 1433,
"end": 1675
} | class ____
extends AbstractMongoContainerConnectionDetailsFactory<MongoDBContainer> {
DeprecatedMongoDbContainerConnectionDetailsFactory() {
super(MongoDBContainer::getReplicaSetUrl);
}
}
| DeprecatedMongoDbContainerConnectionDetailsFactory |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/pattern/PatternParser.java | {
"start": 1769,
"end": 2116
} | class ____ {
static final String DISABLE_ANSI = "disableAnsi";
static final String NO_CONSOLE_NO_ANSI = "noConsoleNoAnsi";
/**
* Escape character for format specifier.
*/
private static final char ESCAPE_CHAR = '%';
/**
* The states the parser can be in while parsing the pattern.
*/
private | PatternParser |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/path/JSONPath_between_int.java | {
"start": 1609,
"end": 2105
} | class ____ {
private Integer id;
private String name;
public Entity(Integer id, String name){
this.id = id;
this.name = name;
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
}
| Entity |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/common/LocalTimeOffset.java | {
"start": 9248,
"end": 10353
} | class ____ extends LocalTimeOffset {
private final LocalTimeOffset previous;
private final long startUtcMillis;
private Transition(long millis, LocalTimeOffset previous, long startUtcMillis) {
super(millis);
this.previous = previous;
this.startUtcMillis = startUtcMillis;
}
/**
* The offset before the this one.
*/
public LocalTimeOffset previous() {
return previous;
}
@Override
protected final boolean containsUtcMillis(long utcMillis) {
return utcMillis >= startUtcMillis;
}
@Override
protected final LocalTimeOffset offsetContaining(long utcMillis) {
if (containsUtcMillis(utcMillis)) {
return this;
}
return previous.offsetContaining(utcMillis);
}
/**
* The time that this offset started in milliseconds since epoch.
*/
public long startUtcMillis() {
return startUtcMillis;
}
}
public static | Transition |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/models/AttributeNature.java | {
"start": 295,
"end": 417
} | enum ____ {
BASIC,
EMBEDDED,
ANY,
TO_ONE,
ELEMENT_COLLECTION,
MANY_TO_ANY,
MANY_TO_MANY,
ONE_TO_MANY
}
| AttributeNature |
java | micronaut-projects__micronaut-core | context/src/main/java/io/micronaut/scheduling/io/watch/event/WatchEventType.java | {
"start": 916,
"end": 1763
} | enum ____ {
/**
* A file / directory was created.
*/
CREATE,
/**
* A file / directory was modified.
*/
MODIFY,
/**
* A file / directory was deleted.
*/
DELETE;
/**
* Produces a {@link WatchEventType} for the given {@link WatchEvent#kind()}.
*
* @param kind The kind
* @return The event type
*/
public static WatchEventType of(WatchEvent.Kind kind) {
if (kind == StandardWatchEventKinds.ENTRY_CREATE) {
return CREATE;
} else if (kind == StandardWatchEventKinds.ENTRY_MODIFY) {
return MODIFY;
} else if (kind == StandardWatchEventKinds.ENTRY_DELETE) {
return DELETE;
} else {
throw new IllegalArgumentException("Unsupported watch event kind: " + kind);
}
}
}
| WatchEventType |
java | reactor__reactor-core | reactor-core/src/test/java/reactor/guide/GuideTests.java | {
"start": 2688,
"end": 14731
} | class ____ {
@Test
@SuppressWarnings("unchecked")
public void introFutureHell() {
CompletableFuture<List<String>> ids = ifhIds(); // <1>
CompletableFuture<List<String>> result = ids.thenComposeAsync(l -> { // <2>
Stream<CompletableFuture<String>> zip =
l.stream().map(i -> { // <3>
CompletableFuture<String> nameTask = ifhName(i); // <4>
CompletableFuture<Integer> statTask = ifhStat(i); // <5>
return nameTask.thenCombineAsync(statTask, (name, stat) -> "Name " + name + " has stats " + stat); // <6>
});
List<CompletableFuture<String>> combinationList = zip.collect(Collectors.toList()); // <7>
CompletableFuture<String>[] combinationArray = combinationList.toArray(new CompletableFuture[combinationList.size()]);
CompletableFuture<Void> allDone = CompletableFuture.allOf(combinationArray); // <8>
return allDone.thenApply(v -> combinationList.stream()
.map(CompletableFuture::join) // <9>
.collect(Collectors.toList()));
});
List<String> results = result.join(); // <10>
assertThat(results).contains(
"Name NameJoe has stats 103",
"Name NameBart has stats 104",
"Name NameHenry has stats 105",
"Name NameNicole has stats 106",
"Name NameABSLAJNFOAJNFOANFANSF has stats 121");
}
@Test
public void introFutureHellReactorVersion() {
Flux<String> ids = ifhrIds(); // <1>
Flux<String> combinations =
ids.flatMap(id -> { // <2>
Mono<String> nameTask = ifhrName(id); // <3>
Mono<Integer> statTask = ifhrStat(id); // <4>
return nameTask.zipWith(statTask, // <5>
(name, stat) -> "Name " + name + " has stats " + stat);
});
Mono<List<String>> result = combinations.collectList(); // <6>
List<String> results = result.block(); // <7>
assertThat(results).containsExactly( // <8>
"Name NameJoe has stats 103",
"Name NameBart has stats 104",
"Name NameHenry has stats 105",
"Name NameNicole has stats 106",
"Name NameABSLAJNFOAJNFOANFANSF has stats 121"
);
}
private CompletableFuture<String> ifhName(String id) {
CompletableFuture<String> f = new CompletableFuture<>();
f.complete("Name" + id);
return f;
}
private CompletableFuture<Integer> ifhStat(String id) {
CompletableFuture<Integer> f = new CompletableFuture<>();
f.complete(id.length() + 100);
return f;
}
private CompletableFuture<List<String>> ifhIds() {
CompletableFuture<List<String>> ids = new CompletableFuture<>();
ids.complete(Arrays.asList("Joe", "Bart", "Henry", "Nicole", "ABSLAJNFOAJNFOANFANSF"));
return ids;
}
private Flux<String> ifhrIds() {
return Flux.just("Joe", "Bart", "Henry", "Nicole", "ABSLAJNFOAJNFOANFANSF");
}
private Mono<String> ifhrName(String id) {
return Mono.just("Name" + id);
}
private Mono<Integer> ifhrStat(String id) {
return Mono.just(id.length() + 100);
}
@Test
public void advancedComposedNow() {
Function<Flux<String>, Flux<String>> filterAndMap =
f -> f.filter(color -> !color.equals("orange"))
.map(String::toUpperCase);
Flux.fromIterable(Arrays.asList("blue", "green", "orange", "purple"))
.doOnNext(System.out::println)
.transform(filterAndMap)
.subscribe(d -> System.out.println("Subscriber to Transformed MapAndFilter: "+d));
}
@Test
public void advancedComposedDefer() {
AtomicInteger ai = new AtomicInteger();
Function<Flux<String>, Flux<String>> filterAndMap = f -> {
if (ai.incrementAndGet() == 1) {
return f.filter(color -> !color.equals("orange"))
.map(String::toUpperCase);
}
return f.filter(color -> !color.equals("purple"))
.map(String::toUpperCase);
};
Flux<String> composedFlux =
Flux.fromIterable(Arrays.asList("blue", "green", "orange", "purple"))
.doOnNext(System.out::println)
.transformDeferred(filterAndMap);
composedFlux.subscribe(d -> System.out.println("Subscriber 1 to Composed MapAndFilter :"+d));
composedFlux.subscribe(d -> System.out.println("Subscriber 2 to Composed MapAndFilter: "+d));
}
@Test
public void advancedCold() {
Flux<String> source = Flux.fromIterable(Arrays.asList("blue", "green", "orange", "purple"))
.map(String::toUpperCase);
source.subscribe(d -> System.out.println("Subscriber 1: "+d));
source.subscribe(d -> System.out.println("Subscriber 2: "+d));
}
@Test
public void advancedHot() {
Sinks.Many<String> hotSource = Sinks.unsafe().many().multicast().directBestEffort();
Flux<String> hotFlux = hotSource.asFlux().map(String::toUpperCase);
hotFlux.subscribe(d -> System.out.println("Subscriber 1 to Hot Source: "+d));
hotSource.emitNext("blue", FAIL_FAST); // <1>
hotSource.tryEmitNext("green").orThrow(); // <2>
hotFlux.subscribe(d -> System.out.println("Subscriber 2 to Hot Source: "+d));
hotSource.emitNext("orange", FAIL_FAST);
hotSource.emitNext("purple", FAIL_FAST);
hotSource.emitComplete(FAIL_FAST);
}
@Test
public void advancedConnectable() throws InterruptedException {
Flux<Integer> source = Flux.range(1, 3)
.doOnSubscribe(s -> System.out.println("subscribed to source"));
ConnectableFlux<Integer> co = source.publish();
co.subscribe(System.out::println, e -> {}, () -> {});
co.subscribe(System.out::println, e -> {}, () -> {});
System.out.println("done subscribing");
Thread.sleep(500);
System.out.println("will now connect");
co.connect();
}
@Test
public void advancedConnectableAutoConnect() throws InterruptedException {
Flux<Integer> source = Flux.range(1, 3)
.doOnSubscribe(s -> System.out.println("subscribed to source"));
Flux<Integer> autoCo = source.publish().autoConnect(2);
autoCo.subscribe(System.out::println, e -> {}, () -> {});
System.out.println("subscribed first");
Thread.sleep(500);
System.out.println("subscribing second");
autoCo.subscribe(System.out::println, e -> {}, () -> {});
}
@Test
public void advancedBatchingGrouping() {
StepVerifier.create(
Flux.just(1, 3, 5, 2, 4, 6, 11, 12, 13)
.groupBy(i -> i % 2 == 0 ? "even" : "odd")
.concatMap(g -> g.defaultIfEmpty(-1) //if empty groups, show them
.map(String::valueOf) //map to string
.startWith(g.key())) //start with the group's key
)
.expectNext("odd", "1", "3", "5", "11", "13")
.expectNext("even", "2", "4", "6", "12")
.verifyComplete();
}
@Test
public void advancedBatchingWindowingSizeOverlap() {
StepVerifier.create(
Flux.range(1, 10)
.window(5, 3) //overlapping windows
.concatMap(g -> g.defaultIfEmpty(-1)) //show empty windows as -1
)
.expectNext(1, 2, 3, 4, 5)
.expectNext(4, 5, 6, 7, 8)
.expectNext(7, 8, 9, 10)
.expectNext(10)
.verifyComplete();
}
@Test
public void advancedBatchingWindowing() {
StepVerifier.create(
Flux.just(1, 3, 5, 2, 4, 6, 11, 12, 13)
.windowWhile(i -> i % 2 == 0)
.concatMap(g -> g.defaultIfEmpty(-1))
)
.expectNext(-1, -1, -1) //respectively triggered by odd 1 3 5
.expectNext(2, 4, 6) // triggered by 11
.expectNext(12) // triggered by 13
// however, no empty completion window is emitted (would contain extra matching elements)
.verifyComplete();
}
@Test
public void advancedBatchingBufferingSizeOverlap() {
StepVerifier.create(
Flux.range(1, 10)
.buffer(5, 3) //overlapping buffers
)
.expectNext(Arrays.asList(1, 2, 3, 4, 5))
.expectNext(Arrays.asList(4, 5, 6, 7, 8))
.expectNext(Arrays.asList(7, 8, 9, 10))
.expectNext(Collections.singletonList(10))
.verifyComplete();
}
@Test
public void advancedBatchingBuffering() {
StepVerifier.create(
Flux.just(1, 3, 5, 2, 4, 6, 11, 12, 13)
.bufferWhile(i -> i % 2 == 0)
)
.expectNext(Arrays.asList(2, 4, 6)) // triggered by 11
.expectNext(Collections.singletonList(12)) // triggered by 13
.verifyComplete();
}
@Test
public void advancedParallelJustDivided() {
Flux.range(1, 10)
.parallel(2) //<1>
.subscribe(i -> System.out.println(Thread.currentThread().getName() + " -> " + i));
}
@Test
public void advancedParallelParallelized() {
Flux.range(1, 10)
.parallel(2)
.runOn(Schedulers.parallel())
.subscribe(i -> System.out.println(Thread.currentThread().getName() + " -> " + i));
}
private Flux<String> someStringSource() {
return Flux.just("foo", "bar", "baz").hide();
}
@Test
public void baseSubscriberFineTuneBackpressure() {
Flux<String> source = someStringSource();
source.map(String::toUpperCase)
.subscribe(new BaseSubscriber<String>() { // <1>
@Override
protected void hookOnSubscribe(Subscription subscription) {
// <2>
request(1); // <3>
}
@Override
protected void hookOnNext(String value) {
request(1); // <4>
}
//<5>
});
}
private String doSomethingDangerous(long i) {
if (i < 5)
return String.valueOf(i);
throw new IllegalArgumentException("boom" + i);
}
private String doSecondTransform(String i) {
return "item" + i;
}
@Test
public void errorHandlingOnError() {
Flux<String> s = Flux.range(1, 10)
.map(v -> doSomethingDangerous(v)) // <1>
.map(v -> doSecondTransform(v)); // <2>
s.subscribe(value -> System.out.println("RECEIVED " + value), // <3>
error -> System.err.println("CAUGHT " + error) // <4>
);
StepVerifier.create(s)
.expectNext("item1")
.expectNext("item2")
.expectNext("item3")
.expectNext("item4")
.verifyErrorMessage("boom5");
}
@Test
public void errorHandlingTryCatch() {
try {
for (int i = 1; i < 11; i++) {
String v1 = doSomethingDangerous(i); // <1>
String v2 = doSecondTransform(v1); // <2>
System.out.println("RECEIVED " + v2);
}
} catch (Throwable t) {
System.err.println("CAUGHT " + t); // <3>
}
}
@Test
public void errorHandlingReturn() {
Flux<String> flux =
Flux.just(10)
.map(this::doSomethingDangerous)
.onErrorReturn("RECOVERED");
StepVerifier.create(flux)
.expectNext("RECOVERED")
.verifyComplete();
}
@Test
public void errorHandlingReturnFilter() {
Flux<String> flux =
Flux.just(10)
.map(this::doSomethingDangerous)
.onErrorReturn(e -> e.getMessage().equals("boom10"), "recovered10");
StepVerifier.create(flux)
.expectNext("recovered10")
.verifyComplete();
flux =
Flux.just(9)
.map(this::doSomethingDangerous)
.onErrorReturn(e -> e.getMessage().equals("boom10"), "recovered10");
StepVerifier.create(flux)
.verifyErrorMessage("boom9");
}
private Flux<String> callExternalService(String key) {
if (key.equals("key2"))
return Flux.error(new IllegalStateException("boom"));
if (key.startsWith("timeout"))
return Flux.error(new TimeoutException());
if (key.startsWith("unknown"))
return Flux.error(new UnknownKeyException());
return Flux.just(key.replace("key", "value"));
}
private Flux<String> getFromCache(String key) {
return Flux.just("outdated" + key);
}
@Test
public void errorHandlingOnErrorResume() {
Flux<String> flux =
Flux.just("key1", "key2")
.flatMap(k ->
callExternalService(k) // <1>
.onErrorResume(e -> getFromCache(k)) // <2>
);
StepVerifier.create(flux)
.expectNext("value1", "outdatedkey2")
.verifyComplete();
}
private | GuideTests |
java | apache__rocketmq | tools/src/main/java/org/apache/rocketmq/tools/command/controller/ReElectMasterSubCommand.java | {
"start": 1403,
"end": 4202
} | class ____ implements SubCommand {
@Override
public String commandName() {
return "electMaster";
}
@Override
public String commandDesc() {
return "Re-elect the specified broker as master.";
}
@Override
public Options buildCommandlineOptions(Options options) {
Option opt = new Option("a", "controllerAddress", true, "The address of controller");
opt.setRequired(true);
options.addOption(opt);
opt = new Option("b", "brokerId", true, "The id of the broker which requires to become master");
opt.setRequired(true);
options.addOption(opt);
opt = new Option("bn", "brokerName", true, "The broker name of the replicas that require to be manipulated");
opt.setRequired(true);
options.addOption(opt);
opt = new Option("c", "clusterName", true, "the clusterName of broker");
opt.setRequired(true);
options.addOption(opt);
return options;
}
@Override
public void execute(CommandLine commandLine, Options options, RPCHook rpcHook) throws SubCommandException {
DefaultMQAdminExt defaultMQAdminExt = new DefaultMQAdminExt(rpcHook);
defaultMQAdminExt.setInstanceName(Long.toString(System.currentTimeMillis()));
String controllerAddress = commandLine.getOptionValue("a").trim();
String clusterName = commandLine.getOptionValue('c').trim();
String brokerName = commandLine.getOptionValue("bn").trim();
Long brokerId = Long.valueOf(commandLine.getOptionValue("b").trim());
try {
defaultMQAdminExt.start();
final Pair<ElectMasterResponseHeader, BrokerMemberGroup> pair = defaultMQAdminExt.electMaster(controllerAddress, clusterName, brokerName, brokerId);
final ElectMasterResponseHeader metaData = pair.getObject1();
final BrokerMemberGroup brokerMemberGroup = pair.getObject2();
System.out.printf("\n#ClusterName\t%s", clusterName);
System.out.printf("\n#BrokerName\t%s", brokerName);
System.out.printf("\n#BrokerMasterAddr\t%s", metaData.getMasterAddress());
System.out.printf("\n#MasterEpoch\t%s", metaData.getMasterEpoch());
System.out.printf("\n#SyncStateSetEpoch\t%s\n", metaData.getSyncStateSetEpoch());
if (null != brokerMemberGroup && null != brokerMemberGroup.getBrokerAddrs()) {
brokerMemberGroup.getBrokerAddrs().forEach((key, value) -> System.out.printf("\t#Broker\t%d\t%s\n", key, value));
}
} catch (Exception e) {
throw new SubCommandException(this.getClass().getSimpleName() + " command failed", e);
} finally {
defaultMQAdminExt.shutdown();
}
}
}
| ReElectMasterSubCommand |
java | hibernate__hibernate-orm | hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/basic/RelationTargetNotFoundConfigTest.java | {
"start": 5983,
"end": 6387
} | class ____ {
@Id
private Integer id;
private String name;
FooBar() {
// Required by JPA
}
FooBar(Integer id, String name) {
this.id = id;
this.name = name;
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
}
| FooBar |
java | quarkusio__quarkus | independent-projects/arc/processor/src/main/java/io/quarkus/arc/processor/InvokerInfo.java | {
"start": 432,
"end": 3699
} | class ____ implements InjectionTargetInfo {
private final BeanDeployment beanDeployment;
final BeanInfo targetBean;
final ClassInfo targetBeanClass;
final MethodInfo method;
final boolean instanceLookup;
final boolean[] argumentLookups;
final Injection argumentInjection;
final boolean usesLookup;
final InvocationTransformer instanceTransformer;
final InvocationTransformer[] argumentTransformers;
final InvocationTransformer returnValueTransformer;
final InvocationTransformer exceptionTransformer;
final InvocationTransformer invocationWrapper;
final String className;
final String wrapperClassName;
final String lazyClassName;
InvokerInfo(InvokerBuilder builder, Injection argumentInjection, BeanDeployment beanDeployment) {
assert builder.argumentTransformers.length == builder.targetMethod.parametersCount();
assert builder.argumentLookups.length == builder.targetMethod.parametersCount();
this.beanDeployment = beanDeployment;
this.targetBean = builder.targetBean;
this.targetBeanClass = builder.targetBeanClass;
this.method = builder.targetMethod;
this.instanceLookup = builder.instanceLookup;
this.argumentLookups = builder.argumentLookups;
this.argumentInjection = argumentInjection;
boolean usesLookup = builder.instanceLookup;
for (boolean argumentLookup : builder.argumentLookups) {
usesLookup |= argumentLookup;
}
this.usesLookup = usesLookup;
this.instanceTransformer = builder.instanceTransformer;
this.argumentTransformers = builder.argumentTransformers;
this.returnValueTransformer = builder.returnValueTransformer;
this.exceptionTransformer = builder.exceptionTransformer;
this.invocationWrapper = builder.invocationWrapper;
String prefix = builder.targetMethod.declaringClass().name() + "_" + builder.targetMethod.name();
String hash = methodHash(builder);
this.className = prefix + "_Invoker_" + hash;
this.wrapperClassName = invocationWrapper != null ? prefix + "_InvokerWrapper_" + hash : null;
this.lazyClassName = usesLookup ? prefix + "_LazyInvoker_" + hash : null;
}
private static String methodHash(InvokerBuilder builder) {
StringBuilder str = new StringBuilder();
str.append(builder.targetBean.getIdentifier());
str.append(builder.targetBeanClass.name());
str.append(builder.targetMethod.declaringClass().name());
str.append(builder.targetMethod.name());
str.append(builder.targetMethod.returnType().name());
for (Type parameterType : builder.targetMethod.parameterTypes()) {
str.append(parameterType.name());
}
str.append(builder.instanceTransformer);
str.append(Arrays.toString(builder.argumentTransformers));
str.append(builder.returnValueTransformer);
str.append(builder.exceptionTransformer);
str.append(builder.invocationWrapper);
str.append(builder.instanceLookup);
str.append(Arrays.toString(builder.argumentLookups));
return Hashes.sha1_base64(str.toString());
}
/**
* Returns the | InvokerInfo |
java | apache__flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/PartitionQueryOperation.java | {
"start": 1353,
"end": 3064
} | class ____ implements QueryOperation {
private final List<ResolvedExpression> partitionExpressions;
private final QueryOperation child;
public PartitionQueryOperation(
List<ResolvedExpression> partitionExpressions, QueryOperation child) {
this.partitionExpressions = partitionExpressions;
this.child = child;
}
public int[] getPartitionKeys() {
return partitionExpressions.stream()
.map(FieldReferenceExpression.class::cast)
.map(FieldReferenceExpression::getFieldIndex)
.mapToInt(Integer::intValue)
.toArray();
}
@Override
public String asSerializableString(SqlFactory sqlFactory) {
return String.format(
"(%s\n) PARTITION BY (%s)",
OperationUtils.indent(child.asSerializableString(sqlFactory)),
partitionExpressions.stream()
.map(expr -> expr.asSerializableString(sqlFactory))
.collect(Collectors.joining(", ")));
}
@Override
public String asSummaryString() {
Map<String, Object> args = new LinkedHashMap<>();
args.put("partition", partitionExpressions);
return OperationUtils.formatWithChildren(
"Partition", args, getChildren(), Operation::asSummaryString);
}
@Override
public ResolvedSchema getResolvedSchema() {
return child.getResolvedSchema();
}
@Override
public List<QueryOperation> getChildren() {
return List.of(child);
}
@Override
public <T> T accept(QueryOperationVisitor<T> visitor) {
return visitor.visit(this);
}
}
| PartitionQueryOperation |
java | apache__camel | components/camel-paho-mqtt5/src/generated/java/org/apache/camel/component/paho/mqtt5/PahoMqtt5EndpointUriFactory.java | {
"start": 520,
"end": 3457
} | class ____ extends org.apache.camel.support.component.EndpointUriFactorySupport implements EndpointUriFactory {
private static final String BASE = ":topic";
private static final Set<String> PROPERTY_NAMES;
private static final Set<String> SECRET_PROPERTY_NAMES;
private static final Map<String, String> MULTI_VALUE_PREFIXES;
static {
Set<String> props = new HashSet<>(34);
props.add("automaticReconnect");
props.add("bridgeErrorHandler");
props.add("brokerUrl");
props.add("cleanStart");
props.add("client");
props.add("clientId");
props.add("connectionTimeout");
props.add("customWebSocketHeaders");
props.add("exceptionHandler");
props.add("exchangePattern");
props.add("executorServiceTimeout");
props.add("filePersistenceDirectory");
props.add("httpsHostnameVerificationEnabled");
props.add("keepAliveInterval");
props.add("lazyStartProducer");
props.add("manualAcksEnabled");
props.add("maxReconnectDelay");
props.add("password");
props.add("persistence");
props.add("qos");
props.add("receiveMaximum");
props.add("retained");
props.add("serverURIs");
props.add("sessionExpiryInterval");
props.add("socketFactory");
props.add("sslClientProps");
props.add("sslHostnameVerifier");
props.add("topic");
props.add("userName");
props.add("willMqttProperties");
props.add("willPayload");
props.add("willQos");
props.add("willRetained");
props.add("willTopic");
PROPERTY_NAMES = Collections.unmodifiableSet(props);
Set<String> secretProps = new HashSet<>(2);
secretProps.add("password");
secretProps.add("userName");
SECRET_PROPERTY_NAMES = Collections.unmodifiableSet(secretProps);
MULTI_VALUE_PREFIXES = Collections.emptyMap();
}
@Override
public boolean isEnabled(String scheme) {
return "paho-mqtt5".equals(scheme);
}
@Override
public String buildUri(String scheme, Map<String, Object> properties, boolean encode) throws URISyntaxException {
String syntax = scheme + BASE;
String uri = syntax;
Map<String, Object> copy = new HashMap<>(properties);
uri = buildPathParameter(syntax, uri, "topic", null, true, copy);
uri = buildQueryParameters(uri, copy, encode);
return uri;
}
@Override
public Set<String> propertyNames() {
return PROPERTY_NAMES;
}
@Override
public Set<String> secretPropertyNames() {
return SECRET_PROPERTY_NAMES;
}
@Override
public Map<String, String> multiValuePrefixes() {
return MULTI_VALUE_PREFIXES;
}
@Override
public boolean isLenientProperties() {
return false;
}
}
| PahoMqtt5EndpointUriFactory |
java | apache__flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/resource/ResourceManager.java | {
"start": 22902,
"end": 23350
} | class ____.",
resourceUri.getResourceType().name(),
url);
}
resourceInfos.put(resourceUri, url);
LOG.info("Register resource [{}] successfully.", resourceUri.getUri());
});
}
/**
* Resource with reference counter, when the counter is 0, it means the resource can be removed.
*/
static | path |
java | reactor__reactor-core | reactor-core/src/test/java/reactor/core/publisher/InnerProducerTest.java | {
"start": 846,
"end": 1314
} | class ____ {
@Test
public void scanDefaultMethod() {
CoreSubscriber<String> actual = new LambdaSubscriber<>(null, null, null, null);
InnerProducer<String> test = new InnerProducer<String>() {
@Override
public CoreSubscriber<? super String> actual() {
return actual;
}
@Override
public void request(long n) { }
@Override
public void cancel() { }
};
assertThat(test.scan(Scannable.Attr.ACTUAL)).isSameAs(actual);
}
}
| InnerProducerTest |
java | quarkusio__quarkus | extensions/opentelemetry/deployment/src/test/java/io/quarkus/opentelemetry/deployment/traces/OpenTelemetrySamplerBeanTest.java | {
"start": 1843,
"end": 1981
} | class ____ {
@Produces
public Sampler sampler() {
return Sampler.alwaysOff();
}
}
}
| OtelConfiguration |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/web/servlet/assertj/MockMvcTesterIntegrationTests.java | {
"start": 11408,
"end": 12254
} | class ____ {
@Test
void hasViewName() {
assertThat(mvc.get().uri("/persons/{0}", "Andy")).hasViewName("persons/index");
}
@Test
void viewNameWithCustomAssertion() {
assertThat(mvc.get().uri("/persons/{0}", "Andy")).viewName().startsWith("persons");
}
@Test
void containsAttributes() {
assertThat(mvc.post().uri("/persons").param("name", "Andy")).model()
.containsOnlyKeys("name").containsEntry("name", "Andy");
}
@Test
void hasErrors() {
assertThat(mvc.post().uri("/persons")).model().hasErrors();
}
@Test
void hasAttributeErrors() {
assertThat(mvc.post().uri("/persons")).model().hasAttributeErrors("person");
}
@Test
void hasAttributeErrorsCount() {
assertThat(mvc.post().uri("/persons")).model().extractingBindingResult("person").hasErrorsCount(1);
}
}
@Nested
| ModelAndViewTests |
java | assertj__assertj-core | assertj-core/src/main/java/org/assertj/core/data/TemporalUnitOffset.java | {
"start": 939,
"end": 1042
} | class ____ {@link TemporalOffset} on basis of {@link TemporalUnit}.
* @since 3.7.0
*/
public abstract | for |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/annotation/ConfigurationWithFactoryBeanEarlyDeductionTests.java | {
"start": 7306,
"end": 7359
} | class ____<T> extends AbstractMyBean {
}
static | MyBean |
java | apache__dubbo | dubbo-compatible/src/main/java/com/alibaba/dubbo/registry/NotifyListener.java | {
"start": 1067,
"end": 1581
} | class ____ implements NotifyListener {
private org.apache.dubbo.registry.NotifyListener listener;
public CompatibleNotifyListener(org.apache.dubbo.registry.NotifyListener listener) {
this.listener = listener;
}
@Override
public void notify(List<URL> urls) {
if (listener != null) {
listener.notify(urls.stream().map(url -> url.getOriginalURL()).collect(Collectors.toList()));
}
}
}
| CompatibleNotifyListener |
java | spring-projects__spring-framework | spring-webmvc/src/main/java/org/springframework/web/servlet/view/groovy/GroovyMarkupViewResolver.java | {
"start": 1048,
"end": 1445
} | class ____ all views created by this resolver can be specified
* via {@link #setViewClass(Class)}.
*
* <p><b>Note:</b> When chaining ViewResolvers this resolver will check for the
* existence of the specified template resources and only return a non-null
* {@code View} object if a template is actually found.
*
* @author Brian Clozel
* @since 4.1
* @see GroovyMarkupConfigurer
*/
public | for |
java | quarkusio__quarkus | extensions/vertx-http/deployment/src/test/java/io/quarkus/vertx/http/router/ReqContextActivationTerminationTest.java | {
"start": 1029,
"end": 1813
} | class ____ {
private static int counter;
void observeRouter(@Observes StartupEvent startup, Router router) {
router.get("/boom").handler(ctx -> {
// context starts as inactive; we perform manual activation/termination and assert
Assertions.assertEquals(false, Arc.container().requestContext().isActive());
Arc.container().requestContext().activate();
Assertions.assertEquals(true, Arc.container().requestContext().isActive());
Arc.container().requestContext().terminate();
Assertions.assertEquals(false, Arc.container().requestContext().isActive());
ctx.response().setStatusCode(200).end("ok");
});
}
}
}
| BeanWithObserver |
java | apache__dubbo | dubbo-plugin/dubbo-mcp/src/test/java/org/apache/dubbo/mcp/tool/DubboOpenApiToolConverterTest.java | {
"start": 1892,
"end": 7557
} | class ____ {
@Mock
private DefaultOpenAPIService openApiService;
@Mock
private ServiceDescriptor serviceDescriptor;
@Mock
private URL serviceUrl;
private DubboOpenApiToolConverter converter;
@BeforeEach
void setUp() {
MockitoAnnotations.openMocks(this);
converter = new DubboOpenApiToolConverter(openApiService);
}
@Test
void testConverterConstruction() {
assertNotNull(converter);
}
@Test
void testConvertToTools_WithNullOpenAPI() {
when(serviceDescriptor.getInterfaceName()).thenReturn("TestService");
when(openApiService.getOpenAPI(any(OpenAPIRequest.class))).thenReturn(null);
Map<String, McpSchema.Tool> result = converter.convertToTools(serviceDescriptor, serviceUrl, null);
assertTrue(result.isEmpty());
}
@Test
void testConvertToTools_WithEmptyPaths() {
when(serviceDescriptor.getInterfaceName()).thenReturn("TestService");
OpenAPI openAPI = new OpenAPI();
openAPI.setPaths(new HashMap<>());
when(openApiService.getOpenAPI(any(OpenAPIRequest.class))).thenReturn(openAPI);
Map<String, McpSchema.Tool> result = converter.convertToTools(serviceDescriptor, serviceUrl, null);
assertTrue(result.isEmpty());
}
@Test
void testConvertToTools_WithValidOperation() {
when(serviceDescriptor.getInterfaceName()).thenReturn("TestService");
OpenAPI openAPI = createMockOpenAPI();
when(openApiService.getOpenAPI(any(OpenAPIRequest.class))).thenReturn(openAPI);
Map<String, McpSchema.Tool> result = converter.convertToTools(serviceDescriptor, serviceUrl, null);
assertFalse(result.isEmpty());
assertTrue(result.containsKey("testOperation"));
McpSchema.Tool tool = result.get("testOperation");
assertEquals("testOperation", tool.name());
assertNotNull(tool.description());
assertNotNull(tool.inputSchema());
}
@Test
void testConvertToTools_WithCustomToolConfig() {
when(serviceDescriptor.getInterfaceName()).thenReturn("TestService");
OpenAPI openAPI = createMockOpenAPI();
when(openApiService.getOpenAPI(any(OpenAPIRequest.class))).thenReturn(openAPI);
McpServiceFilter.McpToolConfig toolConfig = new McpServiceFilter.McpToolConfig();
toolConfig.setToolName("customTool");
toolConfig.setDescription("Custom description");
Map<String, McpSchema.Tool> result = converter.convertToTools(serviceDescriptor, serviceUrl, toolConfig);
assertFalse(result.isEmpty());
McpSchema.Tool tool = result.values().iterator().next();
assertEquals("customTool", tool.name());
assertEquals("Custom description", tool.description());
}
@Test
void testGetOperationByToolName_WithExistingTool() {
when(serviceDescriptor.getInterfaceName()).thenReturn("TestService");
OpenAPI openAPI = createMockOpenAPI();
when(openApiService.getOpenAPI(any(OpenAPIRequest.class))).thenReturn(openAPI);
converter.convertToTools(serviceDescriptor, serviceUrl, null);
Operation operation = converter.getOperationByToolName("testOperation");
assertNotNull(operation);
assertEquals("testOperation", operation.getOperationId());
}
@Test
void testGetOperationByToolName_WithNonExistentTool() {
Operation operation = converter.getOperationByToolName("nonExistent");
assertNull(operation);
}
@Test
void testConvertToTools_WithParameter() {
when(serviceDescriptor.getInterfaceName()).thenReturn("TestService");
OpenAPI openAPI = createMockOpenAPIWithParameters();
when(openApiService.getOpenAPI(any(OpenAPIRequest.class))).thenReturn(openAPI);
Map<String, McpSchema.Tool> result = converter.convertToTools(serviceDescriptor, serviceUrl, null);
assertFalse(result.isEmpty());
McpSchema.Tool tool = result.get("testOperation");
assertNotNull(tool);
assertNotNull(tool.inputSchema());
}
@Test
void testConvertToTools_WithException() {
when(serviceDescriptor.getInterfaceName()).thenReturn("TestService");
when(openApiService.getOpenAPI(any(OpenAPIRequest.class))).thenThrow(new RuntimeException("Test exception"));
assertThrows(RuntimeException.class, () -> {
converter.convertToTools(serviceDescriptor, serviceUrl, null);
});
}
private OpenAPI createMockOpenAPI() {
OpenAPI openAPI = new OpenAPI();
Map<String, PathItem> paths = new HashMap<>();
PathItem pathItem = new PathItem();
Map<HttpMethods, Operation> operations = new HashMap<>();
Operation operation = new Operation();
operation.setOperationId("testOperation");
operation.setSummary("Test operation summary");
operation.setDescription("Test operation description");
operations.put(HttpMethods.GET, operation);
pathItem.setOperations(operations);
paths.put("/test", pathItem);
openAPI.setPaths(paths);
return openAPI;
}
private OpenAPI createMockOpenAPIWithParameters() {
OpenAPI openAPI = createMockOpenAPI();
PathItem pathItem = openAPI.getPaths().get("/test");
Operation operation = pathItem.getOperations().get(HttpMethods.GET);
Parameter parameter = new Parameter("testParam", Parameter.In.QUERY);
parameter.setSchema(new Schema());
operation.setParameters(java.util.Arrays.asList(parameter));
return openAPI;
}
}
| DubboOpenApiToolConverterTest |
java | elastic__elasticsearch | x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/InferenceEndpointRegistry.java | {
"start": 1498,
"end": 6486
} | class ____ {
private static final Setting<Boolean> INFERENCE_ENDPOINT_CACHE_ENABLED = Setting.boolSetting(
"xpack.inference.endpoint.cache.enabled",
true,
Setting.Property.NodeScope,
Setting.Property.Dynamic
);
private static final Setting<Integer> INFERENCE_ENDPOINT_CACHE_WEIGHT = Setting.intSetting(
"xpack.inference.endpoint.cache.weight",
25,
Setting.Property.NodeScope
);
private static final Setting<TimeValue> INFERENCE_ENDPOINT_CACHE_EXPIRY = Setting.timeSetting(
"xpack.inference.endpoint.cache.expiry_time",
TimeValue.timeValueMinutes(15),
TimeValue.timeValueMinutes(1),
TimeValue.timeValueHours(1),
Setting.Property.NodeScope
);
public static Collection<? extends Setting<?>> getSettingsDefinitions() {
return List.of(INFERENCE_ENDPOINT_CACHE_ENABLED, INFERENCE_ENDPOINT_CACHE_WEIGHT, INFERENCE_ENDPOINT_CACHE_EXPIRY);
}
private static final Logger log = LogManager.getLogger(InferenceEndpointRegistry.class);
private static final Cache.Stats EMPTY = new Cache.Stats(0, 0, 0);
private final ModelRegistry modelRegistry;
private final InferenceServiceRegistry serviceRegistry;
private final ProjectResolver projectResolver;
private final Cache<InferenceIdAndProject, Model> cache;
private final ClusterService clusterService;
private final FeatureService featureService;
private volatile boolean cacheEnabledViaSetting;
public InferenceEndpointRegistry(
ClusterService clusterService,
Settings settings,
ModelRegistry modelRegistry,
InferenceServiceRegistry serviceRegistry,
ProjectResolver projectResolver,
FeatureService featureService
) {
this.modelRegistry = modelRegistry;
this.serviceRegistry = serviceRegistry;
this.projectResolver = projectResolver;
this.cache = CacheBuilder.<InferenceIdAndProject, Model>builder()
.setMaximumWeight(INFERENCE_ENDPOINT_CACHE_WEIGHT.get(settings))
.setExpireAfterWrite(INFERENCE_ENDPOINT_CACHE_EXPIRY.get(settings))
.build();
this.clusterService = clusterService;
this.featureService = featureService;
this.cacheEnabledViaSetting = INFERENCE_ENDPOINT_CACHE_ENABLED.get(settings);
clusterService.getClusterSettings()
.addSettingsUpdateConsumer(INFERENCE_ENDPOINT_CACHE_ENABLED, enabled -> this.cacheEnabledViaSetting = enabled);
}
public void getEndpoint(String inferenceEntityId, ActionListener<Model> listener) {
var key = new InferenceIdAndProject(inferenceEntityId, projectResolver.getProjectId());
var cachedModel = cacheEnabled() ? cache.get(key) : null;
if (cachedModel != null) {
log.trace("Retrieved [{}] from cache.", inferenceEntityId);
listener.onResponse(cachedModel);
} else {
loadFromIndex(key, listener);
}
}
void invalidateAll(ProjectId projectId) {
if (cacheEnabled()) {
var cacheKeys = cache.keys().iterator();
while (cacheKeys.hasNext()) {
if (cacheKeys.next().projectId.equals(projectId)) {
cacheKeys.remove();
}
}
}
}
private void loadFromIndex(InferenceIdAndProject idAndProject, ActionListener<Model> listener) {
modelRegistry.getModelWithSecrets(idAndProject.inferenceEntityId(), listener.delegateFailureAndWrap((l, unparsedModel) -> {
var service = serviceRegistry.getService(unparsedModel.service())
.orElseThrow(
() -> new ResourceNotFoundException(
"Unknown service [{}] for model [{}]",
unparsedModel.service(),
idAndProject.inferenceEntityId()
)
);
var model = service.parsePersistedConfigWithSecrets(
unparsedModel.inferenceEntityId(),
unparsedModel.taskType(),
unparsedModel.settings(),
unparsedModel.secrets()
);
if (cacheEnabled()) {
cache.put(idAndProject, model);
}
l.onResponse(model);
}));
}
public Cache.Stats stats() {
return cacheEnabled() ? cache.stats() : EMPTY;
}
public int cacheCount() {
return cacheEnabled() ? cache.count() : 0;
}
public boolean cacheEnabled() {
return cacheEnabledViaSetting && cacheEnabledViaFeature();
}
private boolean cacheEnabledViaFeature() {
var state = clusterService.state();
return state.clusterRecovered() && featureService.clusterHasFeature(state, InferenceFeatures.INFERENCE_ENDPOINT_CACHE);
}
private record InferenceIdAndProject(String inferenceEntityId, ProjectId projectId) {}
}
| InferenceEndpointRegistry |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/generated/GeneratedNoOpUpdateTest.java | {
"start": 4755,
"end": 5465
} | class ____ {
@Id
@GeneratedValue
private Long id;
@ManyToOne
private Pizza pizza;
private String name;
public void setName(final String name) {
this.name = name;
}
public String getName() {
return name;
}
public void setPizza(final Pizza pizza) {
this.pizza = pizza;
}
}
private static void waitALittle(SessionFactoryScope scope) {
boolean waitLonger =
// informix clock has low resolution on Mac
scope.getSessionFactory().getJdbcServices().getDialect()
instanceof InformixDialect;
try {
Thread.sleep( waitLonger ? 1_200 : 2 );
}
catch (InterruptedException e) {
throw new HibernateError( "Unexpected wakeup from test sleep" );
}
}
}
| Topping |
java | apache__kafka | group-coordinator/src/test/java/org/apache/kafka/coordinator/group/modern/consumer/ResolvedRegularExpressionTest.java | {
"start": 1052,
"end": 2201
} | class ____ {
@Test
public void testConstructor() {
ResolvedRegularExpression resolvedRegularExpression = new ResolvedRegularExpression(
Set.of("foo", "bar"),
10L,
12345L
);
assertEquals(Set.of("foo", "bar"), resolvedRegularExpression.topics());
assertEquals(10L, resolvedRegularExpression.version());
assertEquals(12345L, resolvedRegularExpression.timestamp());
}
@Test
public void testEquals() {
assertEquals(
new ResolvedRegularExpression(
Set.of("foo", "bar"),
10L,
12345L
),
new ResolvedRegularExpression(
Set.of("foo", "bar"),
10L,
12345L
)
);
assertNotEquals(
new ResolvedRegularExpression(
Set.of("foo", "bar"),
10L,
12345L
),
new ResolvedRegularExpression(
Set.of("foo"),
10L,
12345L
)
);
}
}
| ResolvedRegularExpressionTest |
java | mockito__mockito | mockito-core/src/test/java/org/mockito/internal/creation/bytebuddy/InlineDelegateByteBuddyMockMakerTest.java | {
"start": 27047,
"end": 27183
} | class ____ {
final Object p1;
private Outer(Object p1) {
this.p1 = p1;
}
private static | Outer |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sinh.java | {
"start": 1019,
"end": 2672
} | class ____ extends AbstractTrigonometricFunction {
public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Sinh", Sinh::new);
@FunctionInfo(
returnType = "double",
description = "Returns the {wikipedia}/Hyperbolic_functions[hyperbolic sine] of a number.",
examples = @Example(file = "floats", tag = "sinh")
)
public Sinh(
Source source,
@Param(
name = "number",
type = { "double", "integer", "long", "unsigned_long" },
description = "Numeric expression. If `null`, the function returns `null`."
) Expression angle
) {
super(source, angle);
}
private Sinh(StreamInput in) throws IOException {
super(in);
}
@Override
public String getWriteableName() {
return ENTRY.name;
}
@Override
protected EvalOperator.ExpressionEvaluator.Factory doubleEvaluator(EvalOperator.ExpressionEvaluator.Factory field) {
return new SinhEvaluator.Factory(source(), field);
}
@Override
public Expression replaceChildren(List<Expression> newChildren) {
return new Sinh(source(), newChildren.get(0));
}
@Override
protected NodeInfo<? extends Expression> info() {
return NodeInfo.create(this, Sinh::new, field());
}
@Evaluator(warnExceptions = ArithmeticException.class)
static double process(double val) {
double res = Math.sinh(val);
if (Double.isNaN(res) || Double.isInfinite(res)) {
throw new ArithmeticException("sinh overflow");
}
return res;
}
}
| Sinh |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/buildextension/interceptor/bindings/AdditionalInterceptorBindingsPredicateTest.java | {
"start": 1042,
"end": 3035
} | class ____ {
@RegisterExtension
public ArcTestContainer container = ArcTestContainer.builder()
.beanClasses(SomeBean.class, SomeOtherBean.class,
MyInterceptor.class, ToBeBinding.class,
ToBeBindingWithBindingField.class, MyInterceptorForBindingField.class,
ToBeBindingWithNonBindingField.class, MyInterceptorForNonBindingField.class)
.interceptorBindingRegistrars(new MyBindingRegistrar())
.build();
@Test
public void testBindingWasRegistered() {
MyInterceptor.INTERCEPTOR_TRIGGERED = false;
assertAfterCall(SomeBean.class, () -> MyInterceptor.INTERCEPTOR_TRIGGERED, true);
}
@Test
public void testBindingWasRegisteredWithNonBindingField() {
MyInterceptorForNonBindingField.INTERCEPTOR_TRIGGERED = false;
assertAfterCall(SomeBean.class, () -> MyInterceptorForNonBindingField.INTERCEPTOR_TRIGGERED, true);
}
@Test
public void testBindingWasNotRegisteredWithMismatchingBindingField() {
MyInterceptorForBindingField.INTERCEPTOR_TRIGGERED = false;
assertAfterCall(SomeBean.class, () -> MyInterceptorForBindingField.INTERCEPTOR_TRIGGERED, false);
}
@Test
public void testBindingWasRegisteredWithMatchingBindingField() {
MyInterceptorForBindingField.INTERCEPTOR_TRIGGERED = false;
assertAfterCall(SomeOtherBean.class, () -> MyInterceptorForBindingField.INTERCEPTOR_TRIGGERED, true);
}
private void assertAfterCall(Class<? extends Pingable> beanClass, Supplier<Boolean> check, boolean expected) {
Assertions.assertTrue(Arc.container().instance(beanClass).isAvailable());
Assertions.assertFalse(check.get());
Arc.container().instance(beanClass).get().ping();
Assertions.assertEquals(expected, check.get());
}
@Inherited
@Target({ TYPE, METHOD, FIELD, PARAMETER })
@Retention(RUNTIME)
@ | AdditionalInterceptorBindingsPredicateTest |
java | spring-projects__spring-boot | buildpack/spring-boot-buildpack-platform/src/test/java/org/springframework/boot/buildpack/platform/build/PrintStreamBuildLogTests.java | {
"start": 1674,
"end": 4083
} | class ____ {
@Test
void printsExpectedOutput() throws Exception {
TestPrintStream out = new TestPrintStream();
PrintStreamBuildLog log = new PrintStreamBuildLog(out);
BuildRequest request = mock(BuildRequest.class);
ImageReference name = ImageReference.of("my-app:latest");
ImageReference builderImageReference = ImageReference.of("cnb/builder");
ImagePlatform platform = ImagePlatform.of("linux/arm64/v1");
Image builderImage = mock(Image.class);
given(builderImage.getDigests()).willReturn(Collections.singletonList("00000001"));
ImageReference runImageReference = ImageReference.of("cnb/runner");
Image runImage = mock(Image.class);
given(runImage.getDigests()).willReturn(Collections.singletonList("00000002"));
given(request.getName()).willReturn(name);
ImageReference tag = ImageReference.of("my-app:1.0");
given(request.getTags()).willReturn(Collections.singletonList(tag));
log.start(request);
Consumer<TotalProgressEvent> pullBuildImageConsumer = log.pullingImage(builderImageReference, null,
ImageType.BUILDER);
pullBuildImageConsumer.accept(new TotalProgressEvent(100));
log.pulledImage(builderImage, ImageType.BUILDER);
Consumer<TotalProgressEvent> pullRunImageConsumer = log.pullingImage(runImageReference, platform,
ImageType.RUNNER);
pullRunImageConsumer.accept(new TotalProgressEvent(100));
log.pulledImage(runImage, ImageType.RUNNER);
log.executingLifecycle(request, LifecycleVersion.parse("0.5"), Cache.volume(VolumeName.of("pack-abc.cache")));
Consumer<LogUpdateEvent> phase1Consumer = log.runningPhase(request, "alphabet");
phase1Consumer.accept(mockLogEvent("one"));
phase1Consumer.accept(mockLogEvent("two"));
phase1Consumer.accept(mockLogEvent("three"));
Consumer<LogUpdateEvent> phase2Consumer = log.runningPhase(request, "basket");
phase2Consumer.accept(mockLogEvent("spring"));
phase2Consumer.accept(mockLogEvent("boot"));
log.executedLifecycle(request);
log.taggedImage(tag);
String expected = FileCopyUtils.copyToString(new InputStreamReader(
getClass().getResourceAsStream("print-stream-build-log.txt"), StandardCharsets.UTF_8));
assertThat(out.toString()).isEqualToIgnoringNewLines(expected);
}
private LogUpdateEvent mockLogEvent(String string) {
LogUpdateEvent event = mock(LogUpdateEvent.class);
given(event.toString()).willReturn(string);
return event;
}
static | PrintStreamBuildLogTests |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/jobgraph/JobVertexResourceRequirements.java | {
"start": 1418,
"end": 3822
} | class ____ implements Serializable {
private static final String FIELD_NAME_LOWER_BOUND = "lowerBound";
private static final String FIELD_NAME_UPPER_BOUND = "upperBound";
@JsonProperty(FIELD_NAME_LOWER_BOUND)
private final int lowerBound;
@JsonProperty(FIELD_NAME_UPPER_BOUND)
private final int upperBound;
@JsonCreator
public Parallelism(
@JsonProperty(FIELD_NAME_LOWER_BOUND) int lowerBound,
@JsonProperty(FIELD_NAME_UPPER_BOUND) int upperBound) {
this.lowerBound = lowerBound;
this.upperBound = upperBound;
}
public int getLowerBound() {
return lowerBound;
}
public int getUpperBound() {
return upperBound;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final Parallelism that = (Parallelism) o;
return lowerBound == that.lowerBound && upperBound == that.upperBound;
}
@Override
public int hashCode() {
return Objects.hash(lowerBound, upperBound);
}
@Override
public String toString() {
return "Parallelism{" + "lowerBound=" + lowerBound + ", upperBound=" + upperBound + '}';
}
}
@JsonProperty(FIELD_NAME_PARALLELISM)
private final Parallelism parallelism;
public JobVertexResourceRequirements(
@JsonProperty(FIELD_NAME_PARALLELISM) Parallelism parallelism) {
this.parallelism = checkNotNull(parallelism);
}
public Parallelism getParallelism() {
return parallelism;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final JobVertexResourceRequirements that = (JobVertexResourceRequirements) o;
return parallelism.equals(that.parallelism);
}
@Override
public int hashCode() {
return Objects.hash(parallelism);
}
@Override
public String toString() {
return "JobVertexResourceRequirements{" + "parallelism=" + parallelism + '}';
}
}
| Parallelism |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/convert/ApplicationConversionServiceTests.java | {
"start": 17833,
"end": 17934
} | interface ____<S, T> extends Converter<S, T>, ConditionalConverter {
}
}
| ConditionalConverterConverter |
java | google__guava | android/guava/src/com/google/common/util/concurrent/Callables.java | {
"start": 1135,
"end": 4400
} | class ____ {
private Callables() {}
/** Creates a {@code Callable} which immediately returns a preset value each time it is called. */
public static <T extends @Nullable Object> Callable<T> returning(@ParametricNullness T value) {
return () -> value;
}
/**
* Creates an {@link AsyncCallable} from a {@link Callable}.
*
* <p>The {@link AsyncCallable} returns the {@link ListenableFuture} resulting from {@link
* ListeningExecutorService#submit(Callable)}.
*
* @since 20.0
*/
@J2ktIncompatible
@GwtIncompatible
public static <T extends @Nullable Object> AsyncCallable<T> asAsyncCallable(
Callable<T> callable, ListeningExecutorService listeningExecutorService) {
checkNotNull(callable);
checkNotNull(listeningExecutorService);
return () -> listeningExecutorService.submit(callable);
}
/**
* Wraps the given callable such that for the duration of {@link Callable#call} the thread that is
* running will have the given name.
*
* @param callable The callable to wrap
* @param nameSupplier The supplier of thread names, {@link Supplier#get get} will be called once
* for each invocation of the wrapped callable.
*/
@J2ktIncompatible
@GwtIncompatible // threads
static <T extends @Nullable Object> Callable<T> threadRenaming(
Callable<T> callable, Supplier<String> nameSupplier) {
checkNotNull(nameSupplier);
checkNotNull(callable);
return () -> {
Thread currentThread = Thread.currentThread();
String oldName = currentThread.getName();
boolean restoreName = trySetName(nameSupplier.get(), currentThread);
try {
return callable.call();
} finally {
if (restoreName) {
boolean unused = trySetName(oldName, currentThread);
}
}
};
}
/**
* Wraps the given runnable such that for the duration of {@link Runnable#run} the thread that is
* running with have the given name.
*
* @param task The Runnable to wrap
* @param nameSupplier The supplier of thread names, {@link Supplier#get get} will be called once
* for each invocation of the wrapped callable.
*/
@J2ktIncompatible
@GwtIncompatible // threads
static Runnable threadRenaming(Runnable task, Supplier<String> nameSupplier) {
checkNotNull(nameSupplier);
checkNotNull(task);
return () -> {
Thread currentThread = Thread.currentThread();
String oldName = currentThread.getName();
boolean restoreName = trySetName(nameSupplier.get(), currentThread);
try {
task.run();
} finally {
if (restoreName) {
boolean unused = trySetName(oldName, currentThread);
}
}
};
}
/** Tries to set name of the given {@link Thread}, returns true if successful. */
@J2ktIncompatible
@GwtIncompatible // threads
private static boolean trySetName(String threadName, Thread currentThread) {
/*
* setName should usually succeed, but the security manager can prohibit it. Is there a way to
* see if we have the modifyThread permission without catching an exception?
*/
try {
currentThread.setName(threadName);
return true;
} catch (SecurityException e) {
return false;
}
}
}
| Callables |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/dialect/informix/Informix.java | {
"start": 131,
"end": 227
} | class ____ {
public static final SQLDialect DIALECT = SQLDialect.of(DbType.informix);
}
| Informix |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/api/DataTypes.java | {
"start": 41320,
"end": 41517
} | class ____ provided, the system will convert a structured
* object to a JVM object at the edges of the table ecosystem (e.g. when bridging to a function
* or connector). The implementation | is |
java | reactor__reactor-core | reactor-test/src/main/java/reactor/test/StepVerifier.java | {
"start": 3636,
"end": 18085
} | interface ____ {
/**
* Default verification timeout (see {@link #verify()}) is "no timeout".
*
* @see #setDefaultTimeout(Duration)
* @see #resetDefaultTimeout()
*/
Duration DEFAULT_VERIFY_TIMEOUT = Duration.ZERO;
/**
* Set the {@link #verify()} timeout for all {@link StepVerifier} created through the
* factory methods ({@link #create(Publisher)}, {@link #withVirtualTime(Supplier)}, etc.).
* <p>
* This affects ALL such verifiers created after this call, until a call to either
* this method or {@link #resetDefaultTimeout()}.
*
* @param timeout the timeout to use for {@link #verify()} calls on all {@link StepVerifier}
* created through the factory methods after this call. {@literal null} is interpreted
* as a call to {@link #resetDefaultTimeout()}.
*/
static void setDefaultTimeout(@Nullable Duration timeout) {
DefaultStepVerifierBuilder.defaultVerifyTimeout =
timeout == null ? DEFAULT_VERIFY_TIMEOUT : timeout;
}
/**
* Reset the {@link #verify()} timeout to the "unlimited" default.
* <p>
* This affects ALL such verifiers created after this call, until a call to
* {@link #setDefaultTimeout(Duration)}.
*/
static void resetDefaultTimeout() {
setDefaultTimeout(null);
}
/**
* Prepare a new {@code StepVerifier} in an uncontrolled environment:
* {@link Step#thenAwait} will block in real time.
* Each {@link #verify()} will fully (re)play the scenario.
*
* @param publisher the publisher to subscribe to and verify
*
* @return a builder for expectation declaration and ultimately verification.
*/
static <T> FirstStep<T> create(Publisher<? extends T> publisher) {
return create(publisher, Long.MAX_VALUE);
}
/**
* Prepare a new {@code StepVerifier} in an uncontrolled environment:
* {@link Step#thenAwait} will block in real time.
* Each {@link #verify()} will fully (re)play the scenario.
* The verification will request a specified amount of values.
*
* @param publisher the publisher to subscribe to and verify
* @param n the amount of items to request
*
* @return a builder for expectation declaration and ultimately verification.
*/
static <T> FirstStep<T> create(Publisher<? extends T> publisher, long n) {
return create(publisher, StepVerifierOptions.create().initialRequest(n));
}
/**
* Prepare a new {@code StepVerifier} in an uncontrolled environment:
* {@link Step#thenAwait} will block in real time.
* Each {@link #verify()} will fully (re)play the scenario.
* The verification will request a specified amount of values according to
* the {@link StepVerifierOptions options} passed.
*
* @param publisher the publisher to subscribe to
* @param options the options for the verification
*
* @return a builder for expectation declaration and ultimately verification.
*/
static <T> FirstStep<T> create(Publisher<? extends T> publisher, StepVerifierOptions options) {
return DefaultStepVerifierBuilder.newVerifier(options, () -> publisher);
}
/**
* Prepare a new {@code StepVerifier} in a controlled environment using
* {@link VirtualTimeScheduler} to manipulate a virtual clock via
* {@link Step#thenAwait}. The scheduler is injected into all {@link Schedulers} factories,
* which means that any operator created within the lambda without a specific scheduler
* will use virtual time.
* Each {@link #verify()} will fully (re)play the scenario.
* The verification will request an unbounded amount of values.
* <p>
* Note that virtual time, {@link Step#thenAwait(Duration)} sources that are
* subscribed on a different {@link reactor.core.scheduler.Scheduler} (eg. a source
* that is initialized outside of the lambda with a dedicated Scheduler) and
* delays introduced within the data path (eg. an interval in a flatMap) are not
* always compatible, as this can perform the clock move BEFORE the interval schedules
* itself, resulting in the interval never playing out.
*
* @param scenarioSupplier a mandatory supplier of the {@link Publisher} to subscribe
* to and verify. In order for operators to use virtual time, they must be invoked
* from within the lambda.
* @param <T> the type of the subscriber
*
* @return a builder for expectation declaration and ultimately verification.
*/
static <T> FirstStep<T> withVirtualTime(Supplier<? extends Publisher<? extends T>> scenarioSupplier) {
return withVirtualTime(scenarioSupplier, Long.MAX_VALUE);
}
/**
* Prepare a new {@code StepVerifier} in a controlled environment using
* {@link VirtualTimeScheduler} to manipulate a virtual clock via
* {@link Step#thenAwait}. The scheduler is injected into all {@link Schedulers} factories,
* which means that any operator created within the lambda without a specific scheduler
* will use virtual time.
* Each {@link #verify()} will fully (re)play the scenario.
* The verification will request a specified amount of values.
* <p>
* Note that virtual time, {@link Step#thenAwait(Duration)} sources that are
* subscribed on a different {@link reactor.core.scheduler.Scheduler} (eg. a source
* that is initialized outside of the lambda with a dedicated Scheduler) and
* delays introduced within the data path (eg. an interval in a flatMap) are not
* always compatible, as this can perform the clock move BEFORE the interval schedules
* itself, resulting in the interval never playing out.
*
* @param scenarioSupplier a mandatory supplier of the {@link Publisher} to subscribe
* to and verify. In order for operators to use virtual time, they must be invoked
* from within the lambda.
* @param n the amount of items to request (must be >= 0)
* @param <T> the type of the subscriber
*
* @return a builder for expectation declaration and ultimately verification.
*/
static <T> FirstStep<T> withVirtualTime(Supplier<? extends Publisher<? extends T>> scenarioSupplier,
long n) {
return withVirtualTime(scenarioSupplier, () -> VirtualTimeScheduler.getOrSet(true), n);
}
/**
* Prepare a new {@code StepVerifier} in a controlled environment using
* a user-provided {@link VirtualTimeScheduler} to manipulate a virtual clock via
* {@link Step#thenAwait}. The scheduler is injected into all {@link Schedulers} factories,
* which means that any operator created within the lambda without a specific scheduler
* will use virtual time.
* Each {@link #verify()} will fully (re)play the scenario.
* The verification will request a specified amount of values.
* <p>
* Note that virtual time, {@link Step#thenAwait(Duration)} sources that are
* subscribed on a different {@link reactor.core.scheduler.Scheduler} (eg. a source
* that is initialized outside of the lambda with a dedicated Scheduler) and
* delays introduced within the data path (eg. an interval in a flatMap) are not
* always compatible, as this can perform the clock move BEFORE the interval schedules
* itself, resulting in the interval never playing out.
*
* @param scenarioSupplier a mandatory supplier of the {@link Publisher} to subscribe
* to and verify. In order for operators to use virtual time, they must be invoked
* from within the lambda.
* @param vtsLookup the supplier of the {@link VirtualTimeScheduler} to inject and
* manipulate during verification.
* @param n the amount of items to request (must be >= 0)
* @param <T> the type of the subscriber
*
* @return a builder for expectation declaration and ultimately verification.
*/
static <T> FirstStep<T> withVirtualTime(
Supplier<? extends Publisher<? extends T>> scenarioSupplier,
Supplier<? extends VirtualTimeScheduler> vtsLookup,
long n) {
return withVirtualTime(scenarioSupplier, StepVerifierOptions.create()
.initialRequest(n)
.virtualTimeSchedulerSupplier(vtsLookup));
}
/**
* Prepare a new {@code StepVerifier} in a controlled environment using
* a user-provided {@link VirtualTimeScheduler} to manipulate a virtual clock via
* {@link Step#thenAwait}. The scheduler is injected into all {@link Schedulers} factories,
* which means that any operator created within the lambda without a specific scheduler
* will use virtual time.
* Each {@link #verify()} will fully (re)play the scenario.
* The verification will request a specified amount of values according to
* the provided {@link StepVerifierOptions options}.
* <p>
* If no {@link VirtualTimeScheduler} {@link Supplier} is set in the options, this
* method will make a {@link StepVerifierOptions#copy() copy} of said options and
* set up the default supplier (like the one in {@link #withVirtualTime(Supplier)}).
* <p>
* Note that virtual time, {@link Step#thenAwait(Duration)} sources that are
* subscribed on a different {@link reactor.core.scheduler.Scheduler} (eg. a source
* that is initialized outside of the lambda with a dedicated Scheduler) and
* delays introduced within the data path (eg. an interval in a flatMap) are not
* always compatible, as this can perform the clock move BEFORE the interval schedules
* itself, resulting in the interval never playing out.
*
* @param scenarioSupplier a mandatory supplier of the {@link Publisher} to subscribe
* to and verify. In order for operators to use virtual time, they must be invoked
* from within the lambda.
* @param options the verification options, including the supplier of the
* {@link VirtualTimeScheduler} to inject and manipulate during verification
* (see note above in case options doesn't define such a supplier)
* @param <T> the type of the subscriber
*
* @return a builder for expectation declaration and ultimately verification.
*/
static <T> FirstStep<T> withVirtualTime(
Supplier<? extends Publisher<? extends T>> scenarioSupplier,
StepVerifierOptions options) {
DefaultStepVerifierBuilder.checkPositive(options.getInitialRequest());
Objects.requireNonNull(scenarioSupplier, "scenarioSupplier");
//force the default VTS supplier if the provided options doesn't define a VTS supplier.
//note we make a copy just in case the original options are reused.
if (options.getVirtualTimeSchedulerSupplier() == null) {
options = options
.copy()
.virtualTimeSchedulerSupplier(() -> VirtualTimeScheduler.getOrSet(true));
}
return DefaultStepVerifierBuilder.newVerifier(options, scenarioSupplier);
}
/**
* Activate debug logging of a description of the test scenario, as well as
* some details about certain verification steps.
*
* @return the verifier for final {@link #verify()} call
*/
StepVerifier log();
/**
* Trigger the subscription and prepare for verifications but doesn't block. Calling one
* of the {@link #verify()} methods afterwards will block until the sequence is validated
* and throw if assertions fail.
* <p>
* Calling this method more than once in a row should be a NO-OP, returning the same
* instance as the first call.
*
* @return a {@link StepVerifier} that is in progress but on which one can chose to block later.
*/
StepVerifier verifyLater();
/**
* Verify the signals received by this subscriber. Unless a default timeout has been
* set before construction of the {@link StepVerifier} via {@link StepVerifier#setDefaultTimeout(Duration)},
* this method will <strong>block</strong> until the stream has been terminated
* (either through {@link Subscriber#onComplete()}, {@link Subscriber#onError(Throwable)} or
* {@link Subscription#cancel()}). Depending on the declared expectations and actions,
* notably in case of undersized manual requests, such a verification could also block
* indefinitely.
*
* @return the actual {@link Duration} the verification took.
* @throws AssertionError in case of expectation failures
* @see #verify(Duration)
* @see #setDefaultTimeout(Duration)
*/
Duration verify() throws AssertionError;
/**
* Verify the signals received by this subscriber. This method will
* <strong>block</strong> for up to the given duration or until the stream has been
* terminated (either through {@link Subscriber#onComplete()},
* {@link Subscriber#onError(Throwable)} or {@link Subscription#cancel()}). Use
* {@link Duration#ZERO} for an unlimited wait for termination.
*
* @param duration the maximum duration to wait for the sequence to terminate, or
* {@link Duration#ZERO} for unlimited wait.
* @return the actual {@link Duration} the verification took.
* @throws AssertionError in case of expectation failures, or when the verification
* times out
*/
Duration verify(Duration duration) throws AssertionError;
/**
* {@link #verify() Verifies} the signals received by this subscriber, then exposes
* various {@link Assertions assertion methods} on the final state.
* <p>
* Note that like {@link #verify()}, this method will <strong>block</strong> until
* the stream has been terminated (either through {@link Subscriber#onComplete()},
* {@link Subscriber#onError(Throwable)} or {@link Subscription#cancel()}).
* Depending on the declared expectations and actions, notably in case of undersized
* manual requests, such a verification could also block indefinitely. Use
* {@link #setDefaultTimeout(Duration)} to globally add a timeout on verify()-derived
* methods.
*
* @return the actual {@link Duration} the verification took.
* @throws AssertionError in case of expectation failures
*/
Assertions verifyThenAssertThat();
/**
* {@link #verify() Verifies} the signals received by this subscriber, then exposes
* various {@link Assertions assertion methods} on the final state.
* <p>
* Note that like {@link #verify()}, this method will <strong>block</strong> until
* the stream has been terminated (either through {@link Subscriber#onComplete()},
* {@link Subscriber#onError(Throwable)} or {@link Subscription#cancel()}).
* Depending on the declared expectations and actions, notably in case of undersized
* manual requests, such a verification could also block indefinitely. As a consequence
* you can use the {@link Duration} {@code duration} parameter to set a timeout.
*
* @param duration the maximum duration to wait for the sequence to terminate, or
* {@link Duration#ZERO} for unlimited wait.
* @return {@link Assertions} for chaining post-verification state assertions
*/
Assertions verifyThenAssertThat(Duration duration);
/**
* Define a builder for terminal states.
*/
| StepVerifier |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/query/sqm/tree/insert/SqmConflictClause.java | {
"start": 986,
"end": 7958
} | class ____<T> implements SqmVisitableNode, JpaConflictClause<T> {
private final SqmInsertStatement<T> insertStatement;
private final SqmRoot<T> excludedRoot;
private @Nullable String constraintName;
private @Nullable List<SqmPath<?>> constraintPaths;
private @Nullable SqmConflictUpdateAction<T> updateAction;
public SqmConflictClause(SqmInsertStatement<T> insertStatement) {
this.insertStatement = insertStatement;
this.excludedRoot = new SqmRoot<>(
insertStatement.getTarget().getManagedType(),
"excluded",
false,
insertStatement.nodeBuilder()
);
}
private SqmConflictClause(
SqmInsertStatement<T> insertStatement,
SqmRoot<T> excludedRoot,
@Nullable String constraintName,
@Nullable List<SqmPath<?>> constraintPaths,
@Nullable SqmConflictUpdateAction<T> updateAction) {
this.insertStatement = insertStatement;
this.excludedRoot = excludedRoot;
this.constraintName = constraintName;
this.constraintPaths = constraintPaths == null ? null : Collections.unmodifiableList( constraintPaths );
this.updateAction = updateAction;
}
@Override
public SqmRoot<T> getExcludedRoot() {
return excludedRoot;
}
@Override
public @Nullable String getConstraintName() {
return constraintName;
}
@Override
public SqmConflictClause<T> conflictOnConstraint(@Nullable String constraintName) {
if ( constraintPaths != null && !constraintPaths.isEmpty() ) {
throw new IllegalStateException( "Constraint paths were already set: " + constraintPaths );
}
this.constraintName = constraintName;
return this;
}
@Override
public JpaConflictClause<T> conflictOnConstraintAttributes(String... attributes) {
final ArrayList<SqmPath<?>> paths = new ArrayList<>( attributes.length );
for ( String attribute : attributes ) {
paths.add( insertStatement.getTarget().get( attribute ) );
}
return conflictOnConstraintPaths( paths );
}
@Override
public JpaConflictClause<T> conflictOnConstraintAttributes(SingularAttribute<T, ?>... attributes) {
final ArrayList<SqmPath<?>> paths = new ArrayList<>( attributes.length );
for ( SingularAttribute<T, ?> attribute : attributes ) {
paths.add( insertStatement.getTarget().get( attribute ) );
}
return conflictOnConstraintPaths( paths );
}
@Override
public SqmConflictClause<T> conflictOnConstraintPaths(Path<?>... paths) {
return conflictOnConstraintPaths( Arrays.asList( paths ) );
}
@Override
public SqmConflictClause<T> conflictOnConstraintPaths(List<? extends Path<?>> paths) {
if ( constraintName != null ) {
throw new IllegalStateException( "Constraint name was already set: " + constraintName );
}
//noinspection unchecked
this.constraintPaths = (List<SqmPath<?>>) Collections.unmodifiableList( paths );
return this;
}
@Override
public List<SqmPath<?>> getConstraintPaths() {
return constraintPaths == null
? Collections.emptyList()
: constraintPaths;
}
@Override
public SqmConflictUpdateAction<T> createConflictUpdateAction() {
return new SqmConflictUpdateAction<>( insertStatement );
}
@Override
public @Nullable SqmConflictUpdateAction<T> getConflictAction() {
return updateAction;
}
@Override
public JpaConflictClause<T> onConflictDo(@Nullable JpaConflictUpdateAction<T> action) {
this.updateAction = (SqmConflictUpdateAction<T>) action;
return this;
}
@Override
public SqmConflictUpdateAction<T> onConflictDoUpdate() {
final SqmConflictUpdateAction<T> conflictUpdateAction = createConflictUpdateAction();
onConflictDo( conflictUpdateAction );
return conflictUpdateAction;
}
@Override
public NodeBuilder nodeBuilder() {
return insertStatement.nodeBuilder();
}
@Override
public SqmConflictClause<T> copy(SqmCopyContext context) {
final SqmConflictClause<T> existing = context.getCopy( this );
if ( existing != null ) {
return existing;
}
return context.registerCopy(
this,
new SqmConflictClause<>(
insertStatement.copy( context ),
excludedRoot.copy( context ),
constraintName,
constraintPaths == null ? null : copyOf( constraintPaths, context ),
updateAction == null ? null : updateAction.copy( context )
)
);
}
private List<SqmPath<?>> copyOf(List<SqmPath<?>> constraintPaths, SqmCopyContext context) {
if ( constraintPaths.isEmpty() ) {
return constraintPaths;
}
final ArrayList<SqmPath<?>> copies = new ArrayList<>( constraintPaths.size() );
for ( SqmPath<?> constraintPath : constraintPaths ) {
copies.add( constraintPath.copy( context ) );
}
return copies;
}
@Override
public <X> X accept(SemanticQueryWalker<X> walker) {
return walker.visitConflictClause( this );
}
public void appendHqlString(StringBuilder hql, SqmRenderContext context) {
hql.append( " on conflict" );
final List<SqmPath<?>> constraintPaths = getConstraintPaths();
if ( constraintName != null ) {
hql.append( " on constraint " );
hql.append( constraintName );
}
else if ( !constraintPaths.isEmpty() ) {
char separator = '(';
for ( SqmPath<?> path : constraintPaths ) {
hql.append( separator );
appendUnqualifiedPath( hql, path );
separator = ',';
}
hql.append( ')' );
}
if ( updateAction == null ) {
hql.append( " do nothing" );
}
else {
updateAction.appendHqlString( hql, context );
}
}
private static void appendUnqualifiedPath(StringBuilder sb, SqmPath<?> path) {
final SqmPath<?> lhs = path.getLhs();
if ( lhs == null ) {
// Skip rendering the root
return;
}
appendUnqualifiedPath( sb, lhs );
if ( lhs.getLhs() != null ) {
sb.append( '.' );
}
sb.append( path.getReferencedPathSource().getPathName() );
}
@Override
public boolean equals(@Nullable Object object) {
return object instanceof SqmConflictClause<?> that
&& excludedRoot.equals( that.excludedRoot )
&& Objects.equals( constraintName, that.constraintName )
&& Objects.equals( constraintPaths, that.constraintPaths )
&& Objects.equals( updateAction, that.updateAction );
}
@Override
public int hashCode() {
int result = excludedRoot.hashCode();
result = 31 * result + Objects.hashCode( constraintName );
result = 31 * result + Objects.hashCode( constraintPaths );
result = 31 * result + Objects.hashCode( updateAction );
return result;
}
@Override
public boolean isCompatible(Object object) {
return object instanceof SqmConflictClause<?> that
&& excludedRoot.isCompatible( that.excludedRoot )
&& Objects.equals( constraintName, that.constraintName )
&& SqmCacheable.areCompatible( constraintPaths, that.constraintPaths )
&& SqmCacheable.areCompatible( updateAction, that.updateAction );
}
@Override
public int cacheHashCode() {
int result = excludedRoot.cacheHashCode();
result = 31 * result + Objects.hashCode( constraintName );
result = 31 * result + SqmCacheable.cacheHashCode( constraintPaths );
result = 31 * result + SqmCacheable.cacheHashCode( updateAction );
return result;
}
}
| SqmConflictClause |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/longpredicate/LongPredicateAssert_accepts_Test.java | {
"start": 1414,
"end": 4001
} | class ____ extends LongPredicateAssertBaseTest {
@Test
void should_fail_when_predicate_is_null() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> assertThat((LongPredicate) null).accepts(1L, 2L))
.withMessage(actualIsNull());
}
@Test
void should_fail_when_predicate_does_not_accept_value() {
LongPredicate predicate = val -> val <= 2;
Predicate<Long> wrapPredicate = predicate::test;
long expectedValue = 3;
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> assertThat(predicate).accepts(expectedValue))
.withMessage(shouldAccept(wrapPredicate, expectedValue,
PredicateDescription.GIVEN).create());
}
@Test
void should_fail_when_predicate_does_not_accept_value_with_string_description() {
LongPredicate predicate = val -> val <= 2;
Predicate<Long> wrapPredicate = predicate::test;
long expectedValue = 3;
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> assertThat(predicate).as("test").accepts(expectedValue))
.withMessage("[test] " + shouldAccept(wrapPredicate, expectedValue,
PredicateDescription.GIVEN).create());
}
@Test
void should_pass_when_predicate_accepts_value() {
LongPredicate predicate = val -> val <= 2;
assertThat(predicate).accepts(1);
}
@Test
void should_fail_when_predicate_does_not_accept_values() {
LongPredicate predicate = val -> val <= 2;
long[] matchValues = new long[] { 1L, 2L, 3L };
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> assertThat(predicate).accepts(matchValues))
.withMessage(elementsShouldMatch(matchValues, 3L,
PredicateDescription.GIVEN).create());
}
@Test
void should_pass_when_predicate_accepts_all_values() {
LongPredicate predicate = val -> val <= 2;
assertThat(predicate).accepts(1L, 2L);
}
@Override
protected LongPredicateAssert invoke_api_method() {
return assertions.accepts(1L, 2L);
}
@Override
protected void verify_internal_effects() {
verify(iterables).assertAllMatch(getInfo(assertions), newArrayList(1L, 2L), wrapped, PredicateDescription.GIVEN);
}
}
| LongPredicateAssert_accepts_Test |
java | apache__camel | components/camel-ai/camel-langchain4j-chat/src/main/java/org/apache/camel/component/langchain4j/chat/LangChain4jChatConfiguration.java | {
"start": 1135,
"end": 2462
} | class ____ implements Cloneable {
@UriParam
@Metadata(required = true, defaultValue = "CHAT_SINGLE_MESSAGE")
private LangChain4jChatOperations chatOperation = LangChain4jChatOperations.CHAT_SINGLE_MESSAGE;
@UriParam(label = "advanced")
@Metadata(autowired = true)
private ChatModel chatModel;
public LangChain4jChatConfiguration() {
}
/**
* Operation in case of Endpoint of type CHAT. The value is one of the values of
* org.apache.camel.component.langchain4j.chat.LangChain4jChatOperations
*
* @return
*/
public LangChain4jChatOperations getChatOperation() {
return chatOperation;
}
public void setChatOperation(LangChain4jChatOperations chatOperation) {
this.chatOperation = chatOperation;
}
/**
* Chat Model of type dev.langchain4j.model.chat.ChatModel
*
* @return
*/
public ChatModel getChatModel() {
return chatModel;
}
public void setChatModel(ChatModel chatModel) {
this.chatModel = chatModel;
}
public LangChain4jChatConfiguration copy() {
try {
return (LangChain4jChatConfiguration) super.clone();
} catch (CloneNotSupportedException e) {
throw new RuntimeCamelException(e);
}
}
}
| LangChain4jChatConfiguration |
java | apache__spark | sql/api/src/main/java/org/apache/spark/api/java/function/MapGroupsWithStateFunction.java | {
"start": 1272,
"end": 1421
} | interface ____<K, V, S, R> extends Serializable {
R call(K key, Iterator<V> values, GroupState<S> state) throws Exception;
}
| MapGroupsWithStateFunction |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/text/translate/LookupTranslator.java | {
"start": 1345,
"end": 3594
} | class ____ extends CharSequenceTranslator {
private final HashMap<String, String> lookupMap;
private final HashSet<Character> prefixSet;
private final int shortest;
private final int longest;
/**
* Define the lookup table to be used in translation
*
* Note that, as of Lang 3.1, the key to the lookup table is converted to a
* java.lang.String. This is because we need the key to support hashCode and
* equals(Object), allowing it to be the key for a HashMap. See LANG-882.
*
* @param lookup CharSequence[][] table of size [*][2]
*/
public LookupTranslator(final CharSequence[]... lookup) {
lookupMap = new HashMap<>();
prefixSet = new HashSet<>();
int tmpShortest = Integer.MAX_VALUE;
int tmpLongest = 0;
if (lookup != null) {
for (final CharSequence[] seq : lookup) {
this.lookupMap.put(seq[0].toString(), seq[1].toString());
this.prefixSet.add(seq[0].charAt(0));
final int sz = seq[0].length();
if (sz < tmpShortest) {
tmpShortest = sz;
}
if (sz > tmpLongest) {
tmpLongest = sz;
}
}
}
this.shortest = tmpShortest;
this.longest = tmpLongest;
}
/**
* {@inheritDoc}
*/
@Override
public int translate(final CharSequence input, final int index, final Writer out) throws IOException {
// check if translation exists for the input at position index
if (prefixSet.contains(input.charAt(index))) {
int max = longest;
if (index + longest > input.length()) {
max = input.length() - index;
}
// implement greedy algorithm by trying maximum match first
for (int i = max; i >= shortest; i--) {
final CharSequence subSeq = input.subSequence(index, index + i);
final String result = lookupMap.get(subSeq.toString());
if (result != null) {
out.write(result);
return i;
}
}
}
return 0;
}
}
| LookupTranslator |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/overallbuckets/OverallBucketsAggregator.java | {
"start": 615,
"end": 3127
} | class ____ implements OverallBucketsProcessor {
private final long bucketSpanSeconds;
private final long bucketSpanMillis;
private double maxOverallScore = 0.0;
private Map<String, Double> maxScoreByJob = new TreeMap<>();
private boolean isInterim = false;
private Long startTime;
private final List<OverallBucket> aggregated = new ArrayList<>();
public OverallBucketsAggregator(TimeValue bucketSpan) {
bucketSpanSeconds = bucketSpan.seconds();
bucketSpanMillis = bucketSpan.millis();
}
@Override
public synchronized void process(List<OverallBucket> buckets) {
if (buckets.isEmpty()) {
return;
}
if (startTime == null) {
startTime = Intervals.alignToFloor(buckets.get(0).getTimestamp().getTime(), bucketSpanMillis);
}
long bucketTime;
for (OverallBucket bucket : buckets) {
bucketTime = bucket.getTimestamp().getTime();
if (bucketTime >= startTime + bucketSpanMillis) {
aggregated.add(outputBucket());
startNextBucket(bucketTime);
}
processBucket(bucket);
}
}
private OverallBucket outputBucket() {
List<OverallBucket.JobInfo> jobs = new ArrayList<>(maxScoreByJob.size());
maxScoreByJob.forEach((key, value) -> jobs.add(new OverallBucket.JobInfo(key, value)));
return new OverallBucket(new Date(startTime), bucketSpanSeconds, maxOverallScore, jobs, isInterim);
}
private void startNextBucket(long bucketTime) {
maxOverallScore = 0.0;
maxScoreByJob.clear();
isInterim = false;
startTime = Intervals.alignToFloor(bucketTime, bucketSpanMillis);
}
private void processBucket(OverallBucket bucket) {
maxOverallScore = Math.max(maxOverallScore, bucket.getOverallScore());
bucket.getJobs().forEach(j -> {
double currentMax = maxScoreByJob.computeIfAbsent(j.getJobId(), k -> 0.0);
if (j.getMaxAnomalyScore() > currentMax) {
maxScoreByJob.put(j.getJobId(), j.getMaxAnomalyScore());
}
});
isInterim |= bucket.isInterim();
}
@Override
public synchronized List<OverallBucket> finish() {
if (startTime != null) {
aggregated.add(outputBucket());
}
return aggregated;
}
@Override
public synchronized int size() {
return aggregated.size();
}
}
| OverallBucketsAggregator |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/state/filesystem/CheckpointStateOutputStreamTest.java | {
"start": 2365,
"end": 2464
} | class ____ {
@TempDir private java.nio.file.Path tmp;
private | CheckpointStateOutputStreamTest |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/persistent/PersistentTaskNodeNotAssignedException.java | {
"start": 784,
"end": 1274
} | class ____ extends ElasticsearchException {
public PersistentTaskNodeNotAssignedException(String persistentTaskName) {
super("PersistentTask [{}] has not been yet assigned to a node on this cluster", persistentTaskName);
}
public PersistentTaskNodeNotAssignedException(StreamInput in) throws IOException {
super(in);
}
@Override
public RestStatus status() {
return RestStatus.SERVICE_UNAVAILABLE;
}
}
| PersistentTaskNodeNotAssignedException |
java | apache__flink | flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/factories/TestProcedureCatalogFactory.java | {
"start": 8713,
"end": 9571
} | class ____ implements Procedure {
@ProcedureHint(
input = {@DataTypeHint("STRING"), @DataTypeHint("INT")},
output = @DataTypeHint("STRING"),
argumentNames = {"c", "d"})
public String[] call(ProcedureContext procedureContext, String arg1, Integer arg2) {
return new String[] {arg1 + ", " + arg2};
}
@ProcedureHint(
input = {@DataTypeHint("STRING"), @DataTypeHint("STRING")},
output = @DataTypeHint("STRING"),
argumentNames = {"c", "d"})
public String[] call(ProcedureContext procedureContext, String arg1, String arg2) {
return new String[] {arg1 + ", " + arg2};
}
}
/** A procedure with named arguments and optional arguments. */
public static | NamedArgumentsProcedureWithOverload |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/async/RingBufferLogEventHandler4.java | {
"start": 1218,
"end": 1270
} | class ____ works with Disruptor 4.x.
* * </p>
*/
| only |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/SslOptions.java | {
"start": 4911,
"end": 30869
} | class ____ {
private SslProvider sslProvider = DEFAULT_SSL_PROVIDER;
private String keyStoreType;
private URL keystore;
private char[] keystorePassword = new char[0];
private URL truststore;
private char[] truststorePassword = new char[0];
private String[] protocols = null;
private String[] cipherSuites = null;
private Consumer<SslContextBuilder> sslContextBuilderCustomizer = contextBuilder -> {
};
private Supplier<SSLParameters> sslParametersSupplier = SSLParameters::new;
private KeystoreAction keymanager = KeystoreAction.NO_OP;
private KeystoreAction trustmanager = KeystoreAction.NO_OP;
private Duration sslHandshakeTimeout = Duration.ofSeconds(10);
private Builder() {
}
/**
* Sets the cipher suites to use.
*
* @param cipherSuites cipher suites to use.
* @return {@code this}
* @since 5.3
*/
public Builder cipherSuites(String... cipherSuites) {
LettuceAssert.notNull(cipherSuites, "Cipher suites must not be null");
this.cipherSuites = cipherSuites;
return this;
}
/**
* Use the JDK SSL provider for SSL connections.
*
* @return {@code this}
*/
public Builder jdkSslProvider() {
return sslProvider(SslProvider.JDK);
}
/**
* Use the OpenSSL provider for SSL connections. The OpenSSL provider requires the
* <a href="https://netty.io/wiki/forked-tomcat-native.html">{@code netty-tcnative}</a> dependency with the OpenSSL JNI
* binary.
*
* @return {@code this}
* @throws IllegalStateException if OpenSSL is not available
*/
public Builder openSslProvider() {
return sslProvider(SslProvider.OPENSSL);
}
private Builder sslProvider(SslProvider sslProvider) {
if (sslProvider == SslProvider.OPENSSL) {
if (!OpenSsl.isAvailable()) {
throw new IllegalStateException("OpenSSL SSL Provider is not available");
}
}
this.sslProvider = sslProvider;
return this;
}
/**
* Sets a timeout for the SSL handshake.
*
* @param timeout {@link Duration}.
* @return {@code this}
* @since 5.3.2
*/
public Builder handshakeTimeout(Duration timeout) {
LettuceAssert.notNull(timeout, "SSL Handshake Timeout must not be null");
this.sslHandshakeTimeout = timeout;
return this;
}
/**
* Sets the KeyStore type. Defaults to {@link KeyStore#getDefaultType()} if not set.
*
* @param keyStoreType the keystore type to use, must not be {@code null}.
* @return {@code this}
* @since 5.3
*/
public Builder keyStoreType(String keyStoreType) {
LettuceAssert.notNull(keyStoreType, "KeyStoreType must not be null");
this.keyStoreType = keyStoreType;
return this;
}
/**
* Sets the Keystore file to load client certificates. The key store file must be supported by
* {@link java.security.KeyStore} which is {@link KeyStore#getDefaultType()} by default. The keystore is reloaded on
* each connection attempt that allows to replace certificates during runtime.
*
* @param keystore the keystore file, must not be {@code null}.
* @return {@code this}
* @since 4.4
*/
public Builder keystore(File keystore) {
return keystore(keystore, new char[0]);
}
/**
* Sets the Keystore file to load client certificates. The keystore file must be supported by
* {@link java.security.KeyStore} which is {@link KeyStore#getDefaultType()} by default. The keystore is reloaded on
* each connection attempt that allows to replace certificates during runtime.
*
* @param keystore the keystore file, must not be {@code null}.
* @param keystorePassword the keystore password. May be empty to omit password and the keystore integrity check.
* @return {@code this}
* @since 4.4
*/
public Builder keystore(File keystore, char[] keystorePassword) {
LettuceAssert.notNull(keystore, "Keystore must not be null");
LettuceAssert.isTrue(keystore.exists(), () -> String.format("Keystore file %s does not exist", truststore));
LettuceAssert.isTrue(keystore.isFile(), () -> String.format("Keystore %s is not a file", truststore));
return keystore(Resource.from(keystore), keystorePassword);
}
/**
* Sets the Keystore resource to load client certificates. The keystore file must be supported by
* {@link java.security.KeyStore} which is {@link KeyStore#getDefaultType()} by default. The keystore is reloaded on
* each connection attempt that allows to replace certificates during runtime.
*
* @param keystore the keystore URL, must not be {@code null}.
* @return {@code this}
* @since 4.4
*/
public Builder keystore(URL keystore) {
return keystore(keystore, null);
}
/**
* Sets the Keystore resource to load client certificates. The keystore file must be supported by
* {@link java.security.KeyStore} which is {@link KeyStore#getDefaultType()} by default. The keystore is reloaded on
* each connection attempt that allows to replace certificates during runtime.
*
* @param keystore the keystore file, must not be {@code null}.
* @return {@code this}
* @since 4.4
*/
public Builder keystore(URL keystore, char[] keystorePassword) {
LettuceAssert.notNull(keystore, "Keystore must not be null");
this.keystore = keystore;
return keystore(Resource.from(keystore), keystorePassword);
}
/**
* Sets the key file and its certificate to use for client authentication. The key is reloaded on each connection
* attempt that allows to replace certificates during runtime.
*
* @param keyCertChainFile an X.509 certificate chain file in PEM format.
* @param keyFile a PKCS#8 private key file in PEM format.
* @param keyPassword the password of the {@code keyFile}, or {@code null} if it's not password-protected.
* @return {@code this}
* @since 5.3
*/
public Builder keyManager(File keyCertChainFile, File keyFile, char[] keyPassword) {
LettuceAssert.notNull(keyCertChainFile, "Key certificate file must not be null");
LettuceAssert.notNull(keyFile, "Key file must not be null");
LettuceAssert.isTrue(keyCertChainFile.exists(),
() -> String.format("Key certificate file %s does not exist", keyCertChainFile));
LettuceAssert.isTrue(keyCertChainFile.isFile(),
() -> String.format("Key certificate %s is not a file", keyCertChainFile));
LettuceAssert.isTrue(keyFile.exists(), () -> String.format("Key file %s does not exist", keyFile));
LettuceAssert.isTrue(keyFile.isFile(), () -> String.format("Key %s is not a file", keyFile));
return keyManager(Resource.from(keyCertChainFile), Resource.from(keyFile), keyPassword);
}
/**
* Sets the key and its certificate to use for client authentication. The key is reloaded on each connection attempt
* that allows to replace certificates during runtime.
*
* @param keyCertChain an {@link Resource} for a X.509 certificate chain in PEM format.
* @param key an {@link Resource} for a PKCS#8 private key in PEM format.
* @param keyPassword the password of the {@code keyFile}, or {@code null} if it's not password-protected.
* @return {@code this}
* @since 5.3
* @see Resource
*/
public Builder keyManager(Resource keyCertChain, Resource key, char[] keyPassword) {
LettuceAssert.notNull(keyCertChain, "KeyChain InputStreamProvider must not be null");
LettuceAssert.notNull(key, "Key InputStreamProvider must not be null");
char[] passwordToUse = getPassword(keyPassword);
this.keymanager = (builder, keyStoreType) -> {
try (InputStream keyCertChainIs = keyCertChain.get(); InputStream keyIs = key.get()) {
builder.keyManager(keyCertChainIs, keyIs,
passwordToUse == null || passwordToUse.length == 0 ? null : new String(passwordToUse));
}
};
return this;
}
/**
* Sets the {@link KeyManagerFactory}.
*
* @param keyManagerFactory the {@link KeyManagerFactory} to use.
* @return {@code this}
* @since 5.3
*/
public Builder keyManager(KeyManagerFactory keyManagerFactory) {
LettuceAssert.notNull(keyManagerFactory, "KeyManagerFactory must not be null");
this.keymanager = (builder, keyStoreType) -> builder.keyManager(keyManagerFactory);
return this;
}
/**
* Sets the Java Keystore resource to load client certificates. The keystore file must be supported by
* {@link java.security.KeyStore} which is {@link KeyStore#getDefaultType()} by default. The keystore is reloaded on
* each connection attempt that allows to replace certificates during runtime.
*
* @param resource the provider that opens a {@link InputStream} to the keystore file, must not be {@code null}.
* @param keystorePassword the keystore password. May be empty to omit password and the keystore integrity check.
* @return {@code this}
* @since 5.3
*/
public Builder keystore(Resource resource, char[] keystorePassword) {
LettuceAssert.notNull(resource, "Keystore InputStreamProvider must not be null");
char[] keystorePasswordToUse = getPassword(keystorePassword);
this.keystorePassword = keystorePasswordToUse;
this.keymanager = (builder, keyStoreType) -> {
try (InputStream is = resource.get()) {
builder.keyManager(createKeyManagerFactory(is, keystorePasswordToUse, keyStoreType));
}
};
return this;
}
/**
* Sets the protocol used for the connection established to Redis Server, such as {@code TLSv1.2, TLSv1.1, TLSv1}.
*
* @param protocols list of desired protocols to use.
* @return {@code this}
* @since 5.3
*/
public Builder protocols(String... protocols) {
LettuceAssert.notNull(protocols, "Protocols must not be null");
this.protocols = protocols;
return this;
}
/**
* Sets the Truststore file to load trusted certificates. The truststore file must be supported by
* {@link java.security.KeyStore} which is {@link KeyStore#getDefaultType()} by default. The truststore is reloaded on
* each connection attempt that allows to replace certificates during runtime.
*
* @param truststore the truststore file, must not be {@code null}.
* @return {@code this}
*/
public Builder truststore(File truststore) {
return truststore(truststore, null);
}
/**
* Sets the Truststore file to load trusted certificates. The truststore file must be supported by
* {@link java.security.KeyStore} which is {@link KeyStore#getDefaultType()} by default. The truststore is reloaded on
* each connection attempt that allows to replace certificates during runtime.
*
* @param truststore the truststore file, must not be {@code null}.
* @param truststorePassword the truststore password. May be empty to omit password and the truststore integrity check.
* @return {@code this}
*/
public Builder truststore(File truststore, String truststorePassword) {
LettuceAssert.notNull(truststore, "Truststore must not be null");
LettuceAssert.isTrue(truststore.exists(), () -> String.format("Truststore file %s does not exist", truststore));
LettuceAssert.isTrue(truststore.isFile(), () -> String.format("Truststore file %s is not a file", truststore));
return truststore(Resource.from(truststore), getPassword(truststorePassword));
}
/**
* Sets the Truststore resource to load trusted certificates. The truststore resource must be supported by
* {@link java.security.KeyStore} which is {@link KeyStore#getDefaultType()} by default. The truststore is reloaded on
* each connection attempt that allows to replace certificates during runtime.
*
* @param truststore the truststore file, must not be {@code null}.
* @return {@code this}
*/
public Builder truststore(URL truststore) {
return truststore(truststore, null);
}
/**
* Sets the Truststore resource to load trusted certificates. The truststore resource must be supported by
* {@link java.security.KeyStore} which is {@link KeyStore#getDefaultType()} by default. The truststore is reloaded on
* each connection attempt that allows to replace certificates during runtime.
*
* @param truststore the truststore file, must not be {@code null}.
* @param truststorePassword the truststore password. May be empty to omit password and the truststore integrity check.
* @return {@code this}
*/
public Builder truststore(URL truststore, String truststorePassword) {
LettuceAssert.notNull(truststore, "Truststore must not be null");
this.truststore = truststore;
return truststore(Resource.from(truststore), getPassword(truststorePassword));
}
/**
* Sets the certificate file to load trusted certificates. The file must provide X.509 certificates in PEM format.
* Certificates are reloaded on each connection attempt that allows to replace certificates during runtime.
*
* @param certCollection the X.509 certificate collection in PEM format.
* @return {@code this}
* @since 5.3
*/
public Builder trustManager(File certCollection) {
LettuceAssert.notNull(certCollection, "Certificate collection must not be null");
LettuceAssert.isTrue(certCollection.exists(),
() -> String.format("Certificate collection file %s does not exist", certCollection));
LettuceAssert.isTrue(certCollection.isFile(),
() -> String.format("Certificate collection %s is not a file", certCollection));
return trustManager(Resource.from(certCollection));
}
/**
* Sets the certificate resource to load trusted certificates. The file must provide X.509 certificates in PEM format.
* Certificates are reloaded on each connection attempt that allows to replace certificates during runtime.
*
* @param certCollection the X.509 certificate collection in PEM format.
* @return {@code this}
* @since 5.3
*/
public Builder trustManager(Resource certCollection) {
LettuceAssert.notNull(certCollection, "Truststore must not be null");
this.trustmanager = (builder, keyStoreType) -> {
try (InputStream is = certCollection.get()) {
builder.trustManager(is);
}
};
return this;
}
/**
* Sets the {@link TrustManagerFactory}.
*
* @param trustManagerFactory the {@link TrustManagerFactory} to use.
* @return {@code this}
* @since 5.3
*/
public Builder trustManager(TrustManagerFactory trustManagerFactory) {
LettuceAssert.notNull(trustManagerFactory, "TrustManagerFactory must not be null");
this.trustmanager = (builder, keyStoreType) -> {
builder.trustManager(trustManagerFactory);
};
return this;
}
/**
* Sets the Truststore resource to load trusted certificates. The truststore resource must be supported by
* {@link java.security.KeyStore} which is {@link KeyStore#getDefaultType()} by default. The truststore is reloaded on
* each connection attempt that allows to replace certificates during runtime.
*
* @param resource the provider that opens a {@link InputStream} to the keystore file, must not be {@code null}.
* @param truststorePassword the truststore password. May be empty to omit password and the truststore integrity check.
* @return {@code this}
*/
public Builder truststore(Resource resource, char[] truststorePassword) {
LettuceAssert.notNull(resource, "Truststore InputStreamProvider must not be null");
char[] passwordToUse = getPassword(truststorePassword);
this.truststorePassword = passwordToUse;
this.trustmanager = (builder, keyStoreType) -> {
try (InputStream is = resource.get()) {
builder.trustManager(createTrustManagerFactory(is, passwordToUse, keyStoreType));
}
};
return this;
}
/**
* Applies a {@link SslContextBuilder} customizer by calling {@link java.util.function.Consumer#accept(Object)}
*
* @param contextBuilderCustomizer builder callback to customize the {@link SslContextBuilder}.
* @return {@code this}
* @since 5.3
*/
public Builder sslContext(Consumer<SslContextBuilder> contextBuilderCustomizer) {
LettuceAssert.notNull(contextBuilderCustomizer, "SslContextBuilder customizer must not be null");
this.sslContextBuilderCustomizer = contextBuilderCustomizer;
return this;
}
/**
* Configures a {@link Supplier} to create {@link SSLParameters}.
*
* @param sslParametersSupplier {@link Supplier} for {@link SSLParameters}.
* @return {@code this}
* @since 5.3
*/
public Builder sslParameters(Supplier<SSLParameters> sslParametersSupplier) {
LettuceAssert.notNull(sslParametersSupplier, "SSLParameters supplier must not be null");
this.sslParametersSupplier = sslParametersSupplier;
return this;
}
/**
* Create a new instance of {@link SslOptions}
*
* @return new instance of {@link SslOptions}
*/
public SslOptions build() {
return new SslOptions(this);
}
}
/**
* Creates a new {@link SslContextBuilder} object that is pre-configured with values from this {@link SslOptions} object.
*
* @return a new {@link SslContextBuilder}.
* @throws IOException thrown when loading the keystore or the truststore fails.
* @throws GeneralSecurityException thrown when loading the keystore or the truststore fails.
* @since 5.3
*/
public SslContextBuilder createSslContextBuilder() throws IOException, GeneralSecurityException {
SslContextBuilder sslContextBuilder = SslContextBuilder.forClient().sslProvider(this.sslProvider)
.keyStoreType(keyStoreType);
if (protocols != null && protocols.length > 0) {
sslContextBuilder.protocols(protocols);
}
if (cipherSuites != null && cipherSuites.length > 0) {
sslContextBuilder.ciphers(Arrays.asList(cipherSuites));
}
keymanager.accept(sslContextBuilder, this.keyStoreType);
trustmanager.accept(sslContextBuilder, this.keyStoreType);
sslContextBuilderCustomizer.accept(sslContextBuilder);
return sslContextBuilder;
}
/**
* Creates a {@link SSLParameters} object that is pre-configured with values from this {@link SslOptions} object.
*
* @return a new a {@link SSLParameters} object.
* @since 5.3
*/
public SSLParameters createSSLParameters() {
SSLParameters sslParams = sslParametersSupplier.get();
if (protocols != null && protocols.length > 0) {
sslParams.setProtocols(protocols);
}
if (cipherSuites != null && cipherSuites.length > 0) {
sslParams.setCipherSuites(cipherSuites);
}
return sslParams;
}
/**
* Returns a builder to create new {@link SslOptions} whose settings are replicated from the current {@link SslOptions}.
*
* @return a {@link SslOptions.Builder} to create new {@link SslOptions} whose settings are replicated from the current
* {@link SslOptions}
*
* @since 5.3
*/
public SslOptions.Builder mutate() {
Builder builder = builder();
builder.keyStoreType = this.keyStoreType;
builder.sslProvider = this.getSslProvider();
builder.keystore = this.keystore;
builder.keystorePassword = this.keystorePassword;
builder.truststore = this.getTruststore();
builder.truststorePassword = this.getTruststorePassword();
builder.protocols = this.protocols;
builder.cipherSuites = this.cipherSuites;
builder.sslContextBuilderCustomizer = this.sslContextBuilderCustomizer;
builder.sslParametersSupplier = this.sslParametersSupplier;
builder.keymanager = this.keymanager;
builder.trustmanager = this.trustmanager;
builder.sslHandshakeTimeout = this.handshakeTimeout;
return builder;
}
/**
* @return the configured {@link SslProvider}.
*/
@Deprecated
public SslProvider getSslProvider() {
return sslProvider;
}
/**
* @return the keystore {@link URL}.
* @deprecated since 5.3, {@link javax.net.ssl.KeyManager} is configured via {@link #createSslContextBuilder()}.
*/
@Deprecated
public URL getKeystore() {
return keystore;
}
/**
* @return the set of protocols
*/
public String[] getProtocols() {
return protocols;
}
/**
* @return the set of cipher suites
*/
public String[] getCipherSuites() {
return cipherSuites;
}
/**
* @return the SSL handshake timeout
* @since 5.3.2
*/
public Duration getHandshakeTimeout() {
return handshakeTimeout;
}
/**
* @return the password for the keystore. May be empty.
* @deprecated since 5.3, {@link javax.net.ssl.KeyManager} is configured via {@link #createSslContextBuilder()}.
*/
@Deprecated
public char[] getKeystorePassword() {
return Arrays.copyOf(keystorePassword, keystorePassword.length);
}
/**
* @return the truststore {@link URL}.
* @deprecated since 5.3, {@link javax.net.ssl.TrustManager} is configured via {@link #createSslContextBuilder()}.
*/
@Deprecated
public URL getTruststore() {
return truststore;
}
/**
* @return the password for the truststore. May be empty.
* @deprecated since 5.3, {@link javax.net.ssl.TrustManager} is configured via {@link #createSslContextBuilder()}.
*/
@Deprecated
public char[] getTruststorePassword() {
return Arrays.copyOf(truststorePassword, truststorePassword.length);
}
private static KeyManagerFactory createKeyManagerFactory(InputStream inputStream, char[] storePassword, String keyStoreType)
throws GeneralSecurityException, IOException {
KeyStore keyStore = getKeyStore(inputStream, storePassword, keyStoreType);
KeyManagerFactory keyManagerFactory = KeyManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm());
keyManagerFactory.init(keyStore, storePassword == null ? new char[0] : storePassword);
return keyManagerFactory;
}
private static KeyStore getKeyStore(InputStream inputStream, char[] storePassword, String keyStoreType)
throws KeyStoreException, IOException, NoSuchAlgorithmException, CertificateException {
KeyStore keyStore = KeyStore
.getInstance(LettuceStrings.isEmpty(keyStoreType) ? KeyStore.getDefaultType() : keyStoreType);
try {
keyStore.load(inputStream, storePassword);
} finally {
inputStream.close();
}
return keyStore;
}
private static TrustManagerFactory createTrustManagerFactory(InputStream inputStream, char[] storePassword,
String keystoreType) throws GeneralSecurityException, IOException {
KeyStore trustStore = getKeyStore(inputStream, storePassword, keystoreType);
TrustManagerFactory trustManagerFactory = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm());
trustManagerFactory.init(trustStore);
return trustManagerFactory;
}
private static char[] getPassword(String truststorePassword) {
return LettuceStrings.isNotEmpty(truststorePassword) ? truststorePassword.toCharArray() : null;
}
private static char[] getPassword(char[] chars) {
return chars != null ? Arrays.copyOf(chars, chars.length) : null;
}
@FunctionalInterface
| Builder |
java | assertj__assertj-core | assertj-core/src/main/java/org/assertj/core/error/ShouldBeSubstring.java | {
"start": 725,
"end": 1626
} | class ____ extends BasicErrorMessageFactory {
/**
* Creates a new <code>{@link org.assertj.core.error.ShouldBeSubstring}</code>.
* @param actual the actual value in the failed assertion.
* @param expected the expected value in the failed assertion.
* @param comparisonStrategy the {@link ComparisonStrategy} used
* @return the created {@code ErrorMessageFactory}.
*/
public static ErrorMessageFactory shouldBeSubstring(CharSequence actual, CharSequence expected,
ComparisonStrategy comparisonStrategy) {
return new ShouldBeSubstring(actual, expected, comparisonStrategy);
}
private ShouldBeSubstring(CharSequence actual, CharSequence expected, ComparisonStrategy comparisonStrategy) {
super("%nExpecting actual:%n %s%nto be a substring of:%n %s%n%s", actual, expected, comparisonStrategy);
}
}
| ShouldBeSubstring |
java | elastic__elasticsearch | x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobErrorActionTests.java | {
"start": 1099,
"end": 2886
} | class ____ extends ESSingleNodeTestCase {
private static final Long TIMEOUT_SECONDS = 10L;
private final ThreadPool threadPool = new TestThreadPool(getClass().getName());
private TransportUpdateConnectorSyncJobErrorAction action;
@Before
public void setup() {
ClusterService clusterService = getInstanceFromNode(ClusterService.class);
TransportService transportService = new TransportService(
Settings.EMPTY,
mock(Transport.class),
threadPool,
TransportService.NOOP_TRANSPORT_INTERCEPTOR,
x -> null,
null,
Collections.emptySet()
);
action = new TransportUpdateConnectorSyncJobErrorAction(transportService, mock(ActionFilters.class), client());
}
@Override
public void tearDown() throws Exception {
super.tearDown();
ThreadPool.terminate(threadPool, TIMEOUT_SECONDS, TimeUnit.SECONDS);
}
public void testUpdateConnectorSyncJobError_ExpectNoWarnings() throws InterruptedException {
UpdateConnectorSyncJobErrorAction.Request request = ConnectorSyncJobTestUtils.getRandomUpdateConnectorSyncJobErrorActionRequest();
executeRequest(request);
ensureNoWarnings();
}
private void executeRequest(UpdateConnectorSyncJobErrorAction.Request request) throws InterruptedException {
final CountDownLatch latch = new CountDownLatch(1);
action.doExecute(mock(Task.class), request, ActionListener.wrap(response -> latch.countDown(), exception -> latch.countDown()));
boolean requestTimedOut = latch.await(TIMEOUT_SECONDS, TimeUnit.SECONDS);
assertTrue("Timeout waiting for update request", requestTimedOut);
}
}
| TransportUpdateConnectorSyncJobErrorActionTests |
java | apache__maven | api/maven-api-core/src/main/java/org/apache/maven/api/services/ModelBuilderRequest.java | {
"start": 2641,
"end": 5286
} | enum ____ {
/**
* The repositories declared in the POM have precedence over the repositories specified in the request.
*/
POM_DOMINANT,
/**
* The repositories specified in the request have precedence over the repositories declared in the POM.
*/
REQUEST_DOMINANT,
}
@Nonnull
ModelSource getSource();
@Nonnull
RequestType getRequestType();
boolean isLocationTracking();
boolean isRecursive();
/**
* Defines external profiles that may be activated for the given model.
* Those are external profiles usually defined in {@link org.apache.maven.api.settings.Settings#getProfiles()}.
*/
@Nonnull
Collection<Profile> getProfiles();
/**
* List of profile ids that have been explicitly activated by the user.
*/
@Nonnull
List<String> getActiveProfileIds();
/**
* List of profile ids that have been explicitly deactivated by the user.
*/
@Nonnull
List<String> getInactiveProfileIds();
/**
* Provides a map of system properties.
*/
@Nonnull
Map<String, String> getSystemProperties();
/**
* Provides a map of user properties.
* User properties
*/
@Nonnull
Map<String, String> getUserProperties();
@Nonnull
RepositoryMerging getRepositoryMerging();
@Nullable
ModelTransformer getLifecycleBindingsInjector();
@Nonnull
static ModelBuilderRequest build(@Nonnull ModelBuilderRequest request, @Nonnull ModelSource source) {
return builder(requireNonNull(request, "request cannot be null"))
.source(requireNonNull(source, "source cannot be null"))
.build();
}
@Nonnull
static ModelBuilderRequest build(@Nonnull Session session, @Nonnull ModelSource source) {
return builder()
.session(requireNonNull(session, "session cannot be null"))
.source(requireNonNull(source, "source cannot be null"))
.build();
}
@Nonnull
static ModelBuilderRequest build(@Nonnull Session session, @Nonnull Path path) {
return builder()
.session(requireNonNull(session, "session cannot be null"))
.source(Sources.buildSource(path))
.build();
}
@Nonnull
static ModelBuilderRequestBuilder builder() {
return new ModelBuilderRequestBuilder();
}
@Nonnull
static ModelBuilderRequestBuilder builder(ModelBuilderRequest request) {
return new ModelBuilderRequestBuilder(request);
}
@NotThreadSafe
| RepositoryMerging |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/JpaEndpointBuilderFactory.java | {
"start": 66313,
"end": 73450
} | interface ____ extends EndpointProducerBuilder {
default JpaEndpointProducerBuilder basic() {
return (JpaEndpointProducerBuilder) this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default AdvancedJpaEndpointProducerBuilder lazyStartProducer(boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default AdvancedJpaEndpointProducerBuilder lazyStartProducer(String lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* If set to true, then Camel will use the EntityManager from the header
* JpaConstants.ENTITY_MANAGER instead of the configured entity manager
* on the component/endpoint. This allows end users to control which
* entity manager will be in use.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param usePassedInEntityManager the value to set
* @return the dsl builder
*/
default AdvancedJpaEndpointProducerBuilder usePassedInEntityManager(boolean usePassedInEntityManager) {
doSetProperty("usePassedInEntityManager", usePassedInEntityManager);
return this;
}
/**
* If set to true, then Camel will use the EntityManager from the header
* JpaConstants.ENTITY_MANAGER instead of the configured entity manager
* on the component/endpoint. This allows end users to control which
* entity manager will be in use.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param usePassedInEntityManager the value to set
* @return the dsl builder
*/
default AdvancedJpaEndpointProducerBuilder usePassedInEntityManager(String usePassedInEntityManager) {
doSetProperty("usePassedInEntityManager", usePassedInEntityManager);
return this;
}
/**
* Additional properties for the entity manager to use. This is a
* multi-value option with prefix: emf.
*
* The option is a: <code>java.util.Map<java.lang.String,
* java.lang.Object></code> type.
* The option is multivalued, and you can use the
* entityManagerProperties(String, Object) method to add a value (call
* the method multiple times to set more values).
*
* Group: advanced
*
* @param key the option key
* @param value the option value
* @return the dsl builder
*/
default AdvancedJpaEndpointProducerBuilder entityManagerProperties(String key, Object value) {
doSetMultiValueProperty("entityManagerProperties", "emf." + key, value);
return this;
}
/**
* Additional properties for the entity manager to use. This is a
* multi-value option with prefix: emf.
*
* The option is a: <code>java.util.Map<java.lang.String,
* java.lang.Object></code> type.
* The option is multivalued, and you can use the
* entityManagerProperties(String, Object) method to add a value (call
* the method multiple times to set more values).
*
* Group: advanced
*
* @param values the values
* @return the dsl builder
*/
default AdvancedJpaEndpointProducerBuilder entityManagerProperties(Map values) {
doSetMultiValueProperties("entityManagerProperties", "emf.", values);
return this;
}
/**
* Whether to use Spring's SharedEntityManager for the
* consumer/producer. Note in most cases, joinTransaction should be set
* to false as this is not an EXTENDED EntityManager.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*
* @param sharedEntityManager the value to set
* @return the dsl builder
*/
default AdvancedJpaEndpointProducerBuilder sharedEntityManager(boolean sharedEntityManager) {
doSetProperty("sharedEntityManager", sharedEntityManager);
return this;
}
/**
* Whether to use Spring's SharedEntityManager for the
* consumer/producer. Note in most cases, joinTransaction should be set
* to false as this is not an EXTENDED EntityManager.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: advanced
*
* @param sharedEntityManager the value to set
* @return the dsl builder
*/
default AdvancedJpaEndpointProducerBuilder sharedEntityManager(String sharedEntityManager) {
doSetProperty("sharedEntityManager", sharedEntityManager);
return this;
}
}
/**
* Builder for endpoint for the JPA component.
*/
public | AdvancedJpaEndpointProducerBuilder |
java | apache__kafka | test-common/test-common-internal-api/src/main/java/org/apache/kafka/common/test/api/ClusterTestDefaults.java | {
"start": 1093,
"end": 1404
} | class ____ defaults for any test template methods annotated with {@link ClusterTest} or
* {@link ClusterTests}. The default values here are also used as the source for defaults in
* {@link org.apache.kafka.common.test.junit.ClusterTestExtensions}.
*/
@Documented
@Target({TYPE})
@Retention(RUNTIME)
public @ | level |
java | alibaba__nacos | common/src/main/java/com/alibaba/nacos/common/ability/AbstractAbilityControlManager.java | {
"start": 6718,
"end": 7644
} | class ____ extends Event {
private static final long serialVersionUID = -1232411212311111L;
private AbilityKey abilityKey;
private boolean isOn;
private Map<String, Boolean> table;
private AbilityUpdateEvent() {
}
public Map<String, Boolean> getAbilityTable() {
return table;
}
public void setTable(Map<String, Boolean> abilityTable) {
this.table = abilityTable;
}
public AbilityKey getAbilityKey() {
return abilityKey;
}
public void setAbilityKey(AbilityKey abilityKey) {
this.abilityKey = abilityKey;
}
public boolean isOn() {
return isOn;
}
public void setOn(boolean on) {
isOn = on;
}
}
}
| AbilityUpdateEvent |
java | apache__logging-log4j2 | log4j-1.2-api/src/test/java/org/apache/log4j/config/XmlConfigurationTest.java | {
"start": 2137,
"end": 7985
} | class ____ extends AbstractLog4j1ConfigurationTest {
private static final String SUFFIX = ".xml";
@Override
Configuration getConfiguration(final String configResourcePrefix) throws IOException {
final String configResource = configResourcePrefix + SUFFIX;
final InputStream inputStream = getResourceAsStream(configResource);
final ConfigurationSource source = new ConfigurationSource(inputStream);
final LoggerContext context = LoggerContext.getContext(false);
final Configuration configuration = new XmlConfigurationFactory().getConfiguration(context, source);
assertNotNull(configuration, "No configuration created");
configuration.initialize();
return configuration;
}
@Test
void testListAppender() throws Exception {
final LoggerContext loggerContext = TestConfigurator.configure("target/test-classes/log4j1-list.xml");
final Logger logger = LogManager.getLogger("test");
logger.debug("This is a test of the root logger");
final Configuration configuration = loggerContext.getConfiguration();
final Map<String, Appender> appenders = configuration.getAppenders();
ListAppender eventAppender = null;
ListAppender messageAppender = null;
for (final Map.Entry<String, Appender> entry : appenders.entrySet()) {
if (entry.getKey().equals("list")) {
messageAppender = (ListAppender) ((AppenderAdapter.Adapter) entry.getValue()).getAppender();
} else if (entry.getKey().equals("events")) {
eventAppender = (ListAppender) ((AppenderAdapter.Adapter) entry.getValue()).getAppender();
}
}
assertNotNull(eventAppender, "No Event Appender");
assertNotNull(messageAppender, "No Message Appender");
final List<LoggingEvent> events = eventAppender.getEvents();
assertTrue(events != null && !events.isEmpty(), "No events");
final List<String> messages = messageAppender.getMessages();
assertTrue(messages != null && !messages.isEmpty(), "No messages");
}
@Test
void testXML() throws Exception {
TestConfigurator.configure("target/test-classes/log4j1-file.xml");
final Logger logger = LogManager.getLogger("test");
logger.debug("This is a test of the root logger");
File file = new File("target/temp.A1");
assertTrue(file.exists(), "File A1 was not created");
assertTrue(file.length() > 0, "File A1 is empty");
file = new File("target/temp.A2");
assertTrue(file.exists(), "File A2 was not created");
assertTrue(file.length() > 0, "File A2 is empty");
}
@Override
@Test
public void testConsoleEnhancedPatternLayout() throws Exception {
super.testConsoleEnhancedPatternLayout();
}
@Override
@Test
public void testConsoleHtmlLayout() throws Exception {
super.testConsoleHtmlLayout();
}
@Override
@Test
public void testConsolePatternLayout() throws Exception {
super.testConsolePatternLayout();
}
@Override
@Test
public void testConsoleSimpleLayout() throws Exception {
super.testConsoleSimpleLayout();
}
@Override
@Test
public void testFileSimpleLayout() throws Exception {
super.testFileSimpleLayout();
}
@Override
@Test
public void testNullAppender() throws Exception {
super.testNullAppender();
}
@Override
@Test
public void testConsoleCapitalization() throws Exception {
super.testConsoleCapitalization();
}
@Override
@Test
public void testConsoleTtccLayout() throws Exception {
super.testConsoleTtccLayout();
}
@Override
@Test
public void testRollingFileAppender() throws Exception {
super.testRollingFileAppender();
}
@Override
@Test
public void testDailyRollingFileAppender() throws Exception {
super.testDailyRollingFileAppender();
}
@Override
@Test
public void testSystemProperties1() throws Exception {
super.testSystemProperties1();
}
@Override
@Test
public void testDefaultValues() throws Exception {
super.testDefaultValues();
}
@Override
@Test
public void testMultipleFilters() throws Exception {
super.testMultipleFilters();
}
@Override
@Test
public void testGlobalThreshold() throws Exception {
super.testGlobalThreshold();
}
@Test
void testEnhancedRollingFileAppender() throws Exception {
try (final LoggerContext ctx = configure("config-1.2/log4j-EnhancedRollingFileAppender")) {
final Configuration configuration = ctx.getConfiguration();
assertNotNull(configuration);
testEnhancedRollingFileAppender(configuration);
// Only supported through XML configuration
final Appender appender = configuration.getAppender("MIXED");
assertInstanceOf(RollingFileAppender.class, appender, "is RollingFileAppender");
final TriggeringPolicy policy = ((RollingFileAppender) appender).getTriggeringPolicy();
assertInstanceOf(CompositeTriggeringPolicy.class, policy, "is CompositeTriggeringPolicy");
final TriggeringPolicy[] policies = ((CompositeTriggeringPolicy) policy).getTriggeringPolicies();
assertEquals(2, policies.length);
assertInstanceOf(TimeBasedTriggeringPolicy.class, policies[0], "is TimeBasedTriggeringPolicy");
assertInstanceOf(SizeBasedTriggeringPolicy.class, policies[1], "is SizeBasedTriggeringPolicy");
}
}
@Override
@Test
public void testLevelRangeFilter() throws Exception {
super.testLevelRangeFilter();
}
}
| XmlConfigurationTest |
java | apache__kafka | connect/runtime/src/main/java/org/apache/kafka/connect/runtime/errors/LogReporter.java | {
"start": 4554,
"end": 5005
} | class ____ extends LogReporter<SourceRecord> {
public Source(ConnectorTaskId id, ConnectorConfig connConfig, ErrorHandlingMetrics errorHandlingMetrics) {
super(id, connConfig, errorHandlingMetrics);
}
@Override
protected void appendMessage(StringBuilder builder, SourceRecord original) {
builder.append(", where source record is = ");
builder.append(original);
}
}
}
| Source |
java | apache__kafka | streams/src/test/java/org/apache/kafka/streams/processor/internals/assignment/TaskAssignmentUtilsTest.java | {
"start": 3341,
"end": 22344
} | class ____ {
@Timeout(value = 30)
@ParameterizedTest
@ValueSource(strings = {
StreamsConfig.RACK_AWARE_ASSIGNMENT_STRATEGY_MIN_TRAFFIC,
StreamsConfig.RACK_AWARE_ASSIGNMENT_STRATEGY_BALANCE_SUBTOPOLOGY,
})
public void shouldOptimizeActiveTaskSimple(final String strategy) {
final AssignmentConfigs assignmentConfigs = defaultAssignmentConfigs(
strategy, 100, 1, 1, Collections.emptyList());
final Map<TaskId, TaskInfo> tasks = mkMap(
mkTaskInfo(TASK_0_0, true, Set.of("rack-2")),
mkTaskInfo(TASK_0_1, true, Set.of("rack-1"))
);
final Map<ProcessId, KafkaStreamsState> kafkaStreamsStates = mkMap(
mkStreamState(1, 1, Optional.of("rack-1")),
mkStreamState(2, 1, Optional.of("rack-2"))
);
final ApplicationState applicationState = new TestApplicationState(
assignmentConfigs, kafkaStreamsStates, tasks);
final Map<ProcessId, KafkaStreamsAssignment> assignments = mkMap(
mkAssignment(AssignedTask.Type.ACTIVE, 1, TASK_0_0),
mkAssignment(AssignedTask.Type.ACTIVE, 2, TASK_0_1)
);
TaskAssignmentUtils.optimizeRackAwareActiveTasks(
RackAwareOptimizationParams.of(applicationState), assignments);
assertThat(assignments.size(), equalTo(2));
assertThat(assignments.get(processId(1)).tasks().keySet(), equalTo(Set.of(TASK_0_1)));
assertThat(assignments.get(processId(2)).tasks().keySet(), equalTo(Set.of(TASK_0_0)));
// Repeated to make sure nothing gets shifted around after the first round of optimization.
TaskAssignmentUtils.optimizeRackAwareActiveTasks(
RackAwareOptimizationParams.of(applicationState), assignments);
assertThat(assignments.size(), equalTo(2));
assertThat(assignments.get(processId(1)).tasks().keySet(), equalTo(Set.of(TASK_0_1)));
assertThat(assignments.get(processId(2)).tasks().keySet(), equalTo(Set.of(TASK_0_0)));
}
@Timeout(value = 30)
@ParameterizedTest
@ValueSource(strings = {
StreamsConfig.RACK_AWARE_ASSIGNMENT_STRATEGY_MIN_TRAFFIC,
StreamsConfig.RACK_AWARE_ASSIGNMENT_STRATEGY_BALANCE_SUBTOPOLOGY,
})
public void shouldOptimizeStandbyTasksBasic(final String strategy) {
final AssignmentConfigs assignmentConfigs = defaultAssignmentConfigs(
strategy, 100, 1, 1, Collections.emptyList());
final Map<TaskId, TaskInfo> tasks = mkMap(
mkTaskInfo(TASK_0_0, true, Set.of("rack-2")),
mkTaskInfo(TASK_0_1, true, Set.of("rack-3"))
);
final Map<ProcessId, KafkaStreamsState> kafkaStreamsStates = mkMap(
mkStreamState(1, 2, Optional.of("rack-1")),
mkStreamState(2, 2, Optional.of("rack-2")),
mkStreamState(3, 2, Optional.of("rack-3"))
);
final ApplicationState applicationState = new TestApplicationState(
assignmentConfigs, kafkaStreamsStates, tasks);
final Map<ProcessId, KafkaStreamsAssignment> assignments = mkMap(
mkAssignment(AssignedTask.Type.ACTIVE, 1, TASK_0_0, TASK_0_1),
mkAssignment(AssignedTask.Type.STANDBY, 2, TASK_0_1),
mkAssignment(AssignedTask.Type.STANDBY, 3, TASK_0_0)
);
TaskAssignmentUtils.optimizeRackAwareStandbyTasks(RackAwareOptimizationParams.of(applicationState), assignments);
assertThat(assignments.size(), equalTo(3));
assertThat(assignments.get(processId(1)).tasks().keySet(), equalTo(Set.of(TASK_0_0, TASK_0_1)));
assertThat(assignments.get(processId(2)).tasks().keySet(), equalTo(Set.of(TASK_0_0)));
assertThat(assignments.get(processId(3)).tasks().keySet(), equalTo(Set.of(TASK_0_1)));
}
@Timeout(value = 30)
@Test
public void shouldAssignStandbyTasksWithClientTags() {
final AssignmentConfigs assignmentConfigs = defaultAssignmentConfigs(
StreamsConfig.RACK_AWARE_ASSIGNMENT_STRATEGY_NONE, 100, 1, 2, Collections.singletonList("az"));
final Map<TaskId, TaskInfo> tasks = mkMap(
mkTaskInfo(TASK_0_0, true)
);
final Map<ProcessId, KafkaStreamsState> kafkaStreamsStates = mkMap(
mkStreamState(1, 2, Optional.empty(), Set.of(), Set.of(), mkMap(
mkEntry("az", "1")
)),
mkStreamState(2, 2, Optional.empty(), Set.of(), Set.of(), mkMap(
mkEntry("az", "1")
)),
mkStreamState(3, 2, Optional.empty(), Set.of(), Set.of(), mkMap(
mkEntry("az", "2")
)),
mkStreamState(4, 2, Optional.empty(), Set.of(), Set.of(), mkMap(
mkEntry("az", "3")
))
);
final ApplicationState applicationState = new TestApplicationState(
assignmentConfigs, kafkaStreamsStates, tasks);
final Map<ProcessId, KafkaStreamsAssignment> assignments = mkMap(
mkAssignment(AssignedTask.Type.ACTIVE, 1, TASK_0_0)
);
TaskAssignmentUtils.defaultStandbyTaskAssignment(applicationState, assignments);
assertThat(assignments.size(), equalTo(4));
assertThat(assignments.get(processId(1)).tasks().keySet(), equalTo(Set.of(TASK_0_0)));
assertThat(assignments.get(processId(1)).tasks().get(TASK_0_0).type(), equalTo(AssignedTask.Type.ACTIVE));
assertThat(assignments.get(processId(2)).tasks().keySet(), equalTo(Set.of()));
assertThat(assignments.get(processId(3)).tasks().keySet(), equalTo(Set.of(TASK_0_0)));
assertThat(assignments.get(processId(4)).tasks().keySet(), equalTo(Set.of(TASK_0_0)));
}
@Timeout(value = 30)
@Test
public void shouldAssignStandbyTasksByClientLoad() {
final AssignmentConfigs assignmentConfigs = defaultAssignmentConfigs(
StreamsConfig.RACK_AWARE_ASSIGNMENT_STRATEGY_NONE, 100, 1, 3, Collections.emptyList());
final Map<TaskId, TaskInfo> tasks = mkMap(
mkTaskInfo(TASK_0_0, true),
mkTaskInfo(TASK_0_1, false),
mkTaskInfo(TASK_0_2, false),
mkTaskInfo(TASK_0_3, false),
mkTaskInfo(TASK_0_4, false),
mkTaskInfo(TASK_0_5, false)
);
final Map<ProcessId, KafkaStreamsState> kafkaStreamsStates = mkMap(
mkStreamState(1, 5, Optional.empty(), Set.of(), Set.of()),
mkStreamState(2, 5, Optional.empty(), Set.of(), Set.of()),
mkStreamState(3, 5, Optional.empty(), Set.of(), Set.of()),
mkStreamState(4, 5, Optional.empty(), Set.of(), Set.of()),
mkStreamState(5, 5, Optional.empty(), Set.of(), Set.of())
);
final ApplicationState applicationState = new TestApplicationState(
assignmentConfigs, kafkaStreamsStates, tasks);
final Map<ProcessId, KafkaStreamsAssignment> assignments = mkMap(
mkAssignment(AssignedTask.Type.ACTIVE, 1, TASK_0_0, TASK_0_1, TASK_0_2),
mkAssignment(AssignedTask.Type.ACTIVE, 2, TASK_0_3, TASK_0_4, TASK_0_5)
);
TaskAssignmentUtils.defaultStandbyTaskAssignment(applicationState, assignments);
assertThat(assignments.size(), equalTo(5));
assertThat(assignments.get(processId(2)).tasks().keySet(), equalTo(Set.of(TASK_0_3, TASK_0_4, TASK_0_5)));
assertThat(assignments.get(processId(3)).tasks().keySet(), equalTo(Set.of(TASK_0_0)));
assertThat(assignments.get(processId(4)).tasks().keySet(), equalTo(Set.of(TASK_0_0)));
assertThat(assignments.get(processId(5)).tasks().keySet(), equalTo(Set.of(TASK_0_0)));
}
@Timeout(value = 30)
@ParameterizedTest
@ValueSource(strings = {
StreamsConfig.RACK_AWARE_ASSIGNMENT_STRATEGY_MIN_TRAFFIC,
StreamsConfig.RACK_AWARE_ASSIGNMENT_STRATEGY_BALANCE_SUBTOPOLOGY,
})
public void shouldNotViolateClientTagsAssignmentDuringStandbyOptimization(final String strategy) {
final AssignmentConfigs assignmentConfigs = defaultAssignmentConfigs(
strategy, 100, 1, 2, Collections.singletonList("az"));
final Map<TaskId, TaskInfo> tasks = mkMap(
mkTaskInfo(TASK_0_0, true, Set.of("r1")),
mkTaskInfo(TASK_0_1, true, Set.of("r1"))
);
final Map<ProcessId, KafkaStreamsState> kafkaStreamsStates = mkMap(
mkStreamState(1, 2, Optional.of("r1"), Set.of(), Set.of(), mkMap(
mkEntry("az", "1")
)),
mkStreamState(2, 2, Optional.of("r1"), Set.of(), Set.of(), mkMap(
mkEntry("az", "2")
)),
mkStreamState(3, 2, Optional.of("r1"), Set.of(), Set.of(), mkMap(
mkEntry("az", "3")
)),
mkStreamState(4, 2, Optional.of("r1"), Set.of(), Set.of(), mkMap(
mkEntry("az", "2")
))
);
final ApplicationState applicationState = new TestApplicationState(
assignmentConfigs, kafkaStreamsStates, tasks);
final Map<ProcessId, KafkaStreamsAssignment> assignments = mkMap(
mkAssignment(
1,
new AssignedTask(TASK_0_0, AssignedTask.Type.ACTIVE),
new AssignedTask(TASK_0_1, AssignedTask.Type.STANDBY)
),
mkAssignment(
2,
new AssignedTask(TASK_0_0, AssignedTask.Type.STANDBY),
new AssignedTask(TASK_0_1, AssignedTask.Type.ACTIVE)
),
mkAssignment(
3,
new AssignedTask(TASK_0_0, AssignedTask.Type.STANDBY),
new AssignedTask(TASK_0_1, AssignedTask.Type.STANDBY)
),
mkAssignment(4)
);
TaskAssignmentUtils.optimizeRackAwareStandbyTasks(RackAwareOptimizationParams.of(applicationState), assignments);
assertThat(assignments.size(), equalTo(4));
assertThat(assignments.get(processId(1)).tasks().keySet(), equalTo(Set.of(TASK_0_0, TASK_0_1)));
assertThat(assignments.get(processId(2)).tasks().keySet(), equalTo(Set.of(TASK_0_0, TASK_0_1)));
assertThat(assignments.get(processId(3)).tasks().keySet(), equalTo(Set.of(TASK_0_0, TASK_0_1)));
assertThat(assignments.get(processId(4)).tasks().keySet(), equalTo(Set.of()));
}
@Timeout(value = 30)
@ParameterizedTest
@ValueSource(strings = {
StreamsConfig.RACK_AWARE_ASSIGNMENT_STRATEGY_MIN_TRAFFIC,
StreamsConfig.RACK_AWARE_ASSIGNMENT_STRATEGY_BALANCE_SUBTOPOLOGY,
})
public void shouldOptimizeStandbyTasksWithMultipleRacks(final String strategy) {
final AssignmentConfigs assignmentConfigs = defaultAssignmentConfigs(
strategy, 100, 1, 1, Collections.emptyList());
final Map<TaskId, TaskInfo> tasks = mkMap(
mkTaskInfo(TASK_0_0, true, Set.of("rack-1", "rack-2")),
mkTaskInfo(TASK_0_1, true, Set.of("rack-2", "rack-3")),
mkTaskInfo(TASK_0_2, true, Set.of("rack-3", "rack-4"))
);
final Map<ProcessId, KafkaStreamsState> kafkaStreamsStates = mkMap(
mkStreamState(1, 2, Optional.of("rack-1")),
mkStreamState(2, 2, Optional.of("rack-2")),
mkStreamState(3, 2, Optional.of("rack-3"))
);
final ApplicationState applicationState = new TestApplicationState(
assignmentConfigs, kafkaStreamsStates, tasks);
final Map<ProcessId, KafkaStreamsAssignment> assignments = mkMap(
mkAssignment(AssignedTask.Type.ACTIVE, 1, TASK_0_0),
mkAssignment(AssignedTask.Type.ACTIVE, 2, TASK_0_1),
mkAssignment(AssignedTask.Type.ACTIVE, 3, TASK_0_2)
);
TaskAssignmentUtils.optimizeRackAwareActiveTasks(
RackAwareOptimizationParams.of(applicationState)
.forTasks(new TreeSet<>(Set.of(TASK_0_0, TASK_0_1, TASK_0_2))),
assignments
);
assertThat(assignments.size(), equalTo(3));
assertThat(assignments.get(processId(1)).tasks().keySet(), equalTo(Set.of(TASK_0_0)));
assertThat(assignments.get(processId(2)).tasks().keySet(), equalTo(Set.of(TASK_0_1)));
assertThat(assignments.get(processId(3)).tasks().keySet(), equalTo(Set.of(TASK_0_2)));
}
@Timeout(value = 30)
@Test
public void shouldCorrectlyReturnIdentityAssignment() {
final AssignmentConfigs assignmentConfigs = defaultAssignmentConfigs(
StreamsConfig.RACK_AWARE_ASSIGNMENT_STRATEGY_NONE, 100, 1, 1, Collections.emptyList());
final Map<TaskId, TaskInfo> tasks = mkMap(
mkTaskInfo(TASK_0_0, true),
mkTaskInfo(TASK_0_1, true),
mkTaskInfo(TASK_0_2, true)
);
final Map<ProcessId, KafkaStreamsState> kafkaStreamsStates = mkMap(
mkStreamState(1, 5, Optional.empty(), Set.of(TASK_0_0, TASK_0_1, TASK_0_2), Set.of()),
mkStreamState(2, 5, Optional.empty(), Set.of(), Set.of(TASK_0_0, TASK_0_1, TASK_0_2)),
mkStreamState(3, 5, Optional.empty(), Set.of(), Set.of()),
mkStreamState(4, 5, Optional.empty(), Set.of(), Set.of()),
mkStreamState(5, 5, Optional.empty(), Set.of(), Set.of())
);
final ApplicationState applicationState = new TestApplicationState(
assignmentConfigs, kafkaStreamsStates, tasks);
final Map<ProcessId, KafkaStreamsAssignment> assignments = TaskAssignmentUtils.identityAssignment(applicationState);
assertThat(assignments.size(), equalTo(5));
assertThat(assignments.get(processId(1)).tasks().keySet(), equalTo(Set.of(TASK_0_0, TASK_0_1, TASK_0_2)));
assertThat(assignments.get(processId(2)).tasks().keySet(), equalTo(Set.of(TASK_0_0, TASK_0_1, TASK_0_2)));
assertThat(assignments.get(processId(3)).tasks().keySet(), equalTo(Set.of()));
assertThat(assignments.get(processId(4)).tasks().keySet(), equalTo(Set.of()));
assertThat(assignments.get(processId(5)).tasks().keySet(), equalTo(Set.of()));
}
@Timeout(value = 30)
@Test
public void testValidateTaskAssignment() {
final AssignmentConfigs assignmentConfigs = defaultAssignmentConfigs(
StreamsConfig.RACK_AWARE_ASSIGNMENT_STRATEGY_NONE, 100, 1, 1, Collections.emptyList());
final Map<TaskId, TaskInfo> tasks = mkMap(
mkTaskInfo(TASK_1_1, false)
);
final Map<ProcessId, KafkaStreamsState> kafkaStreamsStates = mkMap(
mkStreamState(1, 5, Optional.empty()),
mkStreamState(2, 5, Optional.empty())
);
final ApplicationState applicationState = new TestApplicationState(
assignmentConfigs, kafkaStreamsStates, tasks);
// ****
final org.apache.kafka.streams.processor.assignment.TaskAssignor.TaskAssignment noError = new org.apache.kafka.streams.processor.assignment.TaskAssignor.TaskAssignment(
Set.of(
KafkaStreamsAssignment.of(processId(1), Set.of(
new KafkaStreamsAssignment.AssignedTask(
new TaskId(1, 1), KafkaStreamsAssignment.AssignedTask.Type.ACTIVE
)
)),
KafkaStreamsAssignment.of(processId(2), Set.of())
)
);
org.apache.kafka.streams.processor.assignment.TaskAssignor.AssignmentError error = TaskAssignmentUtils.validateTaskAssignment(applicationState, noError);
assertThat(error, equalTo(TaskAssignor.AssignmentError.NONE));
// ****
final org.apache.kafka.streams.processor.assignment.TaskAssignor.TaskAssignment missingProcessId = new org.apache.kafka.streams.processor.assignment.TaskAssignor.TaskAssignment(
Set.of(
KafkaStreamsAssignment.of(processId(1), Set.of(
new KafkaStreamsAssignment.AssignedTask(
new TaskId(1, 1), KafkaStreamsAssignment.AssignedTask.Type.ACTIVE
)
))
)
);
error = TaskAssignmentUtils.validateTaskAssignment(applicationState, missingProcessId);
assertThat(error, equalTo(TaskAssignor.AssignmentError.MISSING_PROCESS_ID));
// ****
final org.apache.kafka.streams.processor.assignment.TaskAssignor.TaskAssignment unknownProcessId = new org.apache.kafka.streams.processor.assignment.TaskAssignor.TaskAssignment(
Set.of(
KafkaStreamsAssignment.of(processId(1), Set.of(
new KafkaStreamsAssignment.AssignedTask(
new TaskId(1, 1), KafkaStreamsAssignment.AssignedTask.Type.ACTIVE
)
)),
KafkaStreamsAssignment.of(processId(2), Set.of()),
KafkaStreamsAssignment.of(ProcessId.randomProcessId(), Set.of())
)
);
error = TaskAssignmentUtils.validateTaskAssignment(applicationState, unknownProcessId);
assertThat(error, equalTo(TaskAssignor.AssignmentError.UNKNOWN_PROCESS_ID));
// ****
final org.apache.kafka.streams.processor.assignment.TaskAssignor.TaskAssignment unknownTaskId = new org.apache.kafka.streams.processor.assignment.TaskAssignor.TaskAssignment(
Set.of(
KafkaStreamsAssignment.of(processId(1), Set.of(
new KafkaStreamsAssignment.AssignedTask(
new TaskId(1, 1), KafkaStreamsAssignment.AssignedTask.Type.ACTIVE
)
)),
KafkaStreamsAssignment.of(processId(2), Set.of(
new KafkaStreamsAssignment.AssignedTask(
new TaskId(13, 13), KafkaStreamsAssignment.AssignedTask.Type.ACTIVE
)
))
)
);
error = TaskAssignmentUtils.validateTaskAssignment(applicationState, unknownTaskId);
assertThat(error, equalTo(TaskAssignor.AssignmentError.UNKNOWN_TASK_ID));
// ****
final org.apache.kafka.streams.processor.assignment.TaskAssignor.TaskAssignment activeTaskDuplicated = new org.apache.kafka.streams.processor.assignment.TaskAssignor.TaskAssignment(
Set.of(
KafkaStreamsAssignment.of(processId(1), Set.of(
new KafkaStreamsAssignment.AssignedTask(
new TaskId(1, 1), KafkaStreamsAssignment.AssignedTask.Type.ACTIVE
)
)),
KafkaStreamsAssignment.of(processId(2), Set.of(
new KafkaStreamsAssignment.AssignedTask(
new TaskId(1, 1), KafkaStreamsAssignment.AssignedTask.Type.ACTIVE
)
))
)
);
error = TaskAssignmentUtils.validateTaskAssignment(applicationState, activeTaskDuplicated);
assertThat(error, equalTo(TaskAssignor.AssignmentError.ACTIVE_TASK_ASSIGNED_MULTIPLE_TIMES));
}
public static | TaskAssignmentUtilsTest |
java | apache__logging-log4j2 | log4j-1.2-api/src/main/java/org/apache/log4j/config/PropertiesConfiguration.java | {
"start": 24695,
"end": 27798
} | class ____ a list of pre-parsed
// name-value pairs associated to that filter
final int fIdx = filterPrefix.length();
final SortedMap<String, List<NameValue>> filters = new TreeMap<>();
final Enumeration<?> e = props.keys();
String name = "";
while (e.hasMoreElements()) {
final String key = (String) e.nextElement();
if (key.startsWith(filterPrefix)) {
final int dotIdx = key.indexOf('.', fIdx);
String filterKey = key;
if (dotIdx != -1) {
filterKey = key.substring(0, dotIdx);
name = key.substring(dotIdx + 1);
}
final List<NameValue> filterOpts = filters.computeIfAbsent(filterKey, k -> new ArrayList<>());
if (dotIdx != -1) {
final String value = OptionConverter.findAndSubst(key, props);
filterOpts.add(new NameValue(name, value));
}
}
}
Filter head = null;
for (final Map.Entry<String, List<NameValue>> entry : filters.entrySet()) {
final String clazz = props.getProperty(entry.getKey());
Filter filter = null;
if (clazz != null) {
filter = manager.parse(clazz, entry.getKey(), props, this, BuilderManager.INVALID_FILTER);
if (filter == null) {
LOGGER.debug("Filter key: [{}] class: [{}] props: {}", entry.getKey(), clazz, entry.getValue());
filter = buildFilter(clazz, appenderName, entry.getValue());
}
}
head = FilterAdapter.addFilter(head, filter);
}
return head;
}
private Filter buildFilter(final String className, final String appenderName, final List<NameValue> props) {
final Filter filter = newInstanceOf(className, "Filter");
if (filter != null) {
final PropertySetter propSetter = new PropertySetter(filter);
for (final NameValue property : props) {
propSetter.setProperty(property.key, property.value);
}
propSetter.activate();
}
return filter;
}
public TriggeringPolicy parseTriggeringPolicy(final Properties props, final String policyPrefix) {
final String policyClass = OptionConverter.findAndSubst(policyPrefix, props);
if (policyClass == null) {
return null;
}
return manager.parse(policyClass, policyPrefix, props, this, null);
}
private static <T> T newInstanceOf(final String className, final String type) {
try {
return LoaderUtil.newInstanceOf(className);
} catch (ReflectiveOperationException ex) {
LOGGER.error(
"Unable to create {} {} due to {}:{}",
type,
className,
ex.getClass().getSimpleName(),
ex.getMessage(),
ex);
return null;
}
}
private static | to |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/interceptors/bindings/repeatable/InheritedRepeatableInterceptorBindingTest.java | {
"start": 3632,
"end": 3783
} | class ____ extends SuperclassWithMethodLevelBindings {
}
@Interceptor
@MyBinding("foo")
@MyBinding("bar")
static | MethodInterceptedBean |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/pkg/builditem/CompiledJavaVersionBuildItem.java | {
"start": 362,
"end": 1002
} | class ____ extends SimpleBuildItem {
private final JavaVersion javaVersion;
private CompiledJavaVersionBuildItem(JavaVersion javaVersion) {
this.javaVersion = javaVersion;
}
public static CompiledJavaVersionBuildItem unknown() {
return new CompiledJavaVersionBuildItem(new JavaVersion.Unknown());
}
public static CompiledJavaVersionBuildItem fromMajorJavaVersion(int majorJavaVersion) {
return new CompiledJavaVersionBuildItem(new JavaVersion.Known(majorJavaVersion));
}
public JavaVersion getJavaVersion() {
return javaVersion;
}
public | CompiledJavaVersionBuildItem |
java | redisson__redisson | redisson/src/main/java/org/redisson/eviction/JCacheEvictionTask.java | {
"start": 908,
"end": 2410
} | class ____ extends EvictionTask {
private final String name;
private final String timeoutSetName;
private final String expiredChannelName;
public JCacheEvictionTask(String name, String timeoutSetName, String expiredChannelName, CommandAsyncExecutor executor) {
super(executor);
this.name = name;
this.timeoutSetName = timeoutSetName;
this.expiredChannelName = expiredChannelName;
}
@Override
String getName() {
return name;
}
@Override
CompletionStage<Integer> execute() {
return executor.evalWriteAsync(name, LongCodec.INSTANCE, RedisCommands.EVAL_INTEGER,
"local expiredKeys = redis.call('zrangebyscore', KEYS[2], 0, ARGV[1], 'limit', 0, ARGV[2]); "
+ "for i, k in ipairs(expiredKeys) do "
+ "local v = redis.call('hget', KEYS[1], k);"
+ "local msg = struct.pack('Lc0Lc0', string.len(tostring(k)), tostring(k), string.len(tostring(v)), tostring(v));"
+ "redis.call('publish', KEYS[3], msg);"
+ "end; "
+ "if #expiredKeys > 0 then "
+ "redis.call('zrem', KEYS[2], unpack(expiredKeys)); "
+ "redis.call('hdel', KEYS[1], unpack(expiredKeys)); "
+ "end; "
+ "return #expiredKeys;",
Arrays.<Object>asList(name, timeoutSetName, expiredChannelName), System.currentTimeMillis(), keysLimit);
}
}
| JCacheEvictionTask |
java | grpc__grpc-java | okhttp/src/test/java/io/grpc/okhttp/OkHttpChannelBuilderTest.java | {
"start": 2404,
"end": 20494
} | class ____ {
@Rule public final GrpcCleanupRule grpcCleanupRule = new GrpcCleanupRule();
@Test
public void authorityIsReadable() {
OkHttpChannelBuilder builder = OkHttpChannelBuilder.forAddress("original", 1234);
ManagedChannel channel = grpcCleanupRule.register(builder.build());
assertEquals("original:1234", channel.authority());
}
@Test
public void overrideAuthorityIsReadableForAddress() {
OkHttpChannelBuilder builder = OkHttpChannelBuilder.forAddress("original", 1234);
overrideAuthorityIsReadableHelper(builder, "override:5678");
}
@Test
public void overrideAuthorityIsReadableForTarget() {
OkHttpChannelBuilder builder = OkHttpChannelBuilder.forTarget("original:1234");
overrideAuthorityIsReadableHelper(builder, "override:5678");
}
private void overrideAuthorityIsReadableHelper(OkHttpChannelBuilder builder,
String overrideAuthority) {
builder.overrideAuthority(overrideAuthority);
ManagedChannel channel = grpcCleanupRule.register(builder.build());
assertEquals(overrideAuthority, channel.authority());
}
@Test
public void failOverrideInvalidAuthority() {
OkHttpChannelBuilder builder = OkHttpChannelBuilder.forAddress("good", 1234);
IllegalArgumentException e = assertThrows(IllegalArgumentException.class,
() -> builder.overrideAuthority("[invalidauthority"));
assertThat(e).hasMessageThat().isEqualTo("Invalid authority: [invalidauthority");
}
@Test
public void disableCheckAuthorityAllowsInvalidAuthority() {
OkHttpChannelBuilder builder = OkHttpChannelBuilder.forAddress("good", 1234)
.disableCheckAuthority();
builder.overrideAuthority("[invalidauthority").usePlaintext().buildTransportFactory();
}
@Test
public void enableCheckAuthorityFailOverrideInvalidAuthority() {
OkHttpChannelBuilder builder = OkHttpChannelBuilder.forAddress("good", 1234)
.disableCheckAuthority()
.enableCheckAuthority();
IllegalArgumentException e = assertThrows(IllegalArgumentException.class,
() -> builder.overrideAuthority("[invalidauthority"));
assertThat(e).hasMessageThat().isEqualTo("Invalid authority: [invalidauthority");
}
@Test
public void failInvalidAuthority() {
IllegalArgumentException e = assertThrows(IllegalArgumentException.class,
() -> OkHttpChannelBuilder.forAddress("invalid_authority", 1234));
assertThat(e.getMessage()).isEqualTo("Invalid host or port: invalid_authority 1234");
}
@Test
public void sslSocketFactoryFrom_unknown() {
OkHttpChannelBuilder.SslSocketFactoryResult result =
OkHttpChannelBuilder.sslSocketFactoryFrom(new ChannelCredentials() {
@Override
public ChannelCredentials withoutBearerTokens() {
throw new UnsupportedOperationException();
}
});
assertThat(result.error).isNotNull();
assertThat(result.callCredentials).isNull();
assertThat(result.factory).isNull();
}
@Test
public void sslSocketFactoryFrom_tls() {
OkHttpChannelBuilder.SslSocketFactoryResult result =
OkHttpChannelBuilder.sslSocketFactoryFrom(TlsChannelCredentials.create());
assertThat(result.error).isNull();
assertThat(result.callCredentials).isNull();
assertThat(result.factory).isNotNull();
}
@Test
public void sslSocketFactoryFrom_unsupportedTls() {
OkHttpChannelBuilder.SslSocketFactoryResult result = OkHttpChannelBuilder.sslSocketFactoryFrom(
TlsChannelCredentials.newBuilder().requireFakeFeature().build());
assertThat(result.error).contains("FAKE");
assertThat(result.callCredentials).isNull();
assertThat(result.factory).isNull();
}
@Test
public void sslSocketFactoryFrom_tls_customRoots() throws Exception {
SSLContext serverContext = SSLContext.getInstance("TLS");
try (InputStream server1Chain = TlsTesting.loadCert("server1.pem");
InputStream server1Key = TlsTesting.loadCert("server1.key")) {
serverContext.init(
OkHttpChannelBuilder.createKeyManager(server1Chain, server1Key), null, null);
}
final SSLServerSocket serverListenSocket =
(SSLServerSocket) serverContext.getServerSocketFactory().createServerSocket(0);
final SettableFuture<SSLSocket> serverSocket = SettableFuture.create();
new Thread(new Runnable() {
@Override public void run() {
try {
SSLSocket socket = (SSLSocket) serverListenSocket.accept();
socket.getSession(); // Force handshake
serverSocket.set(socket);
serverListenSocket.close();
} catch (Throwable t) {
serverSocket.setException(t);
}
}
}).start();
ChannelCredentials creds;
try (InputStream ca = TlsTesting.loadCert("ca.pem")) {
creds = TlsChannelCredentials.newBuilder()
.trustManager(ca)
.build();
}
OkHttpChannelBuilder.SslSocketFactoryResult result =
OkHttpChannelBuilder.sslSocketFactoryFrom(creds);
SSLSocket socket =
(SSLSocket) result.factory.createSocket("localhost", serverListenSocket.getLocalPort());
socket.getSession(); // Force handshake
socket.close();
serverSocket.get().close();
}
@Test
public void sslSocketFactoryFrom_tls_mtls() throws Exception {
KeyManager[] keyManagers;
try (InputStream server1Chain = TlsTesting.loadCert("server1.pem");
InputStream server1Key = TlsTesting.loadCert("server1.key")) {
keyManagers = OkHttpChannelBuilder.createKeyManager(server1Chain, server1Key);
}
TrustManager[] trustManagers;
try (InputStream ca = TlsTesting.loadCert("ca.pem")) {
trustManagers = CertificateUtils.createTrustManager(ca);
}
SSLContext serverContext = SSLContext.getInstance("TLS");
serverContext.init(keyManagers, trustManagers, null);
final SSLServerSocket serverListenSocket =
(SSLServerSocket) serverContext.getServerSocketFactory().createServerSocket(0);
serverListenSocket.setNeedClientAuth(true);
final SettableFuture<SSLSocket> serverSocket = SettableFuture.create();
new Thread(new Runnable() {
@Override public void run() {
try {
SSLSocket socket = (SSLSocket) serverListenSocket.accept();
socket.getSession(); // Force handshake
serverSocket.set(socket);
serverListenSocket.close();
} catch (Throwable t) {
serverSocket.setException(t);
}
}
}).start();
ChannelCredentials creds = TlsChannelCredentials.newBuilder()
.keyManager(keyManagers)
.trustManager(trustManagers)
.build();
OkHttpChannelBuilder.SslSocketFactoryResult result =
OkHttpChannelBuilder.sslSocketFactoryFrom(creds);
SSLSocket socket =
(SSLSocket) result.factory.createSocket("localhost", serverListenSocket.getLocalPort());
socket.getSession(); // Force handshake
assertThat(((X500Principal) serverSocket.get().getSession().getPeerPrincipal()).getName())
.isEqualTo("CN=*.test.google.com,O=Example\\, Co.,L=Chicago,ST=Illinois,C=US");
socket.close();
serverSocket.get().close();
}
@Test
public void sslSocketFactoryFrom_tls_mtls_keyFile() throws Exception {
SSLContext serverContext = SSLContext.getInstance("TLS");
try (InputStream server1Chain = TlsTesting.loadCert("server1.pem");
InputStream server1Key = TlsTesting.loadCert("server1.key");
InputStream ca = TlsTesting.loadCert("ca.pem")) {
serverContext.init(
OkHttpChannelBuilder.createKeyManager(server1Chain, server1Key),
CertificateUtils.createTrustManager(ca),
null);
}
final SSLServerSocket serverListenSocket =
(SSLServerSocket) serverContext.getServerSocketFactory().createServerSocket(0);
serverListenSocket.setNeedClientAuth(true);
final SettableFuture<SSLSocket> serverSocket = SettableFuture.create();
new Thread(new Runnable() {
@Override public void run() {
try {
SSLSocket socket = (SSLSocket) serverListenSocket.accept();
socket.getSession(); // Force handshake
serverSocket.set(socket);
serverListenSocket.close();
} catch (Throwable t) {
serverSocket.setException(t);
}
}
}).start();
ChannelCredentials creds;
try (InputStream server1Chain = TlsTesting.loadCert("server1.pem");
InputStream server1Key = TlsTesting.loadCert("server1.key");
InputStream ca = TlsTesting.loadCert("ca.pem")) {
creds = TlsChannelCredentials.newBuilder()
.keyManager(server1Chain, server1Key)
.trustManager(ca)
.build();
}
OkHttpChannelBuilder.SslSocketFactoryResult result =
OkHttpChannelBuilder.sslSocketFactoryFrom(creds);
SSLSocket socket =
(SSLSocket) result.factory.createSocket("localhost", serverListenSocket.getLocalPort());
socket.getSession(); // Force handshake
assertThat(((X500Principal) serverSocket.get().getSession().getPeerPrincipal()).getName())
.isEqualTo("CN=*.test.google.com,O=Example\\, Co.,L=Chicago,ST=Illinois,C=US");
socket.close();
serverSocket.get().close();
}
@Test
public void sslSocketFactoryFrom_tls_mtls_passwordUnsupported() throws Exception {
ChannelCredentials creds = TlsChannelCredentials.newBuilder()
.keyManager(
TlsTesting.loadCert("server1.pem"), TlsTesting.loadCert("server1.key"), "password")
.build();
OkHttpChannelBuilder.SslSocketFactoryResult result =
OkHttpChannelBuilder.sslSocketFactoryFrom(creds);
assertThat(result.error).contains("unsupported");
assertThat(result.callCredentials).isNull();
assertThat(result.factory).isNull();
}
@Test
public void sslSocketFactoryFrom_insecure() {
OkHttpChannelBuilder.SslSocketFactoryResult result =
OkHttpChannelBuilder.sslSocketFactoryFrom(InsecureChannelCredentials.create());
assertThat(result.error).isNull();
assertThat(result.callCredentials).isNull();
assertThat(result.factory).isNull();
}
@Test
public void sslSocketFactoryFrom_composite() {
CallCredentials callCredentials = mock(CallCredentials.class);
OkHttpChannelBuilder.SslSocketFactoryResult result =
OkHttpChannelBuilder.sslSocketFactoryFrom(CompositeChannelCredentials.create(
TlsChannelCredentials.create(), callCredentials));
assertThat(result.error).isNull();
assertThat(result.callCredentials).isSameInstanceAs(callCredentials);
assertThat(result.factory).isNotNull();
result = OkHttpChannelBuilder.sslSocketFactoryFrom(CompositeChannelCredentials.create(
InsecureChannelCredentials.create(), callCredentials));
assertThat(result.error).isNull();
assertThat(result.callCredentials).isSameInstanceAs(callCredentials);
assertThat(result.factory).isNull();
}
@Test
public void sslSocketFactoryFrom_okHttp() throws Exception {
SSLContext sslContext = SSLContext.getInstance("TLS");
sslContext.init(null, null, null);
SSLSocketFactory sslSocketFactory = sslContext.getSocketFactory();
OkHttpChannelBuilder.SslSocketFactoryResult result = OkHttpChannelBuilder.sslSocketFactoryFrom(
SslSocketFactoryChannelCredentials.create(sslSocketFactory));
assertThat(result.error).isNull();
assertThat(result.callCredentials).isNull();
assertThat(result.factory).isSameInstanceAs(sslSocketFactory);
}
@Test
public void sslSocketFactoryFrom_choice() {
OkHttpChannelBuilder.SslSocketFactoryResult result =
OkHttpChannelBuilder.sslSocketFactoryFrom(ChoiceChannelCredentials.create(
new ChannelCredentials() {
@Override
public ChannelCredentials withoutBearerTokens() {
throw new UnsupportedOperationException();
}
},
TlsChannelCredentials.create(),
InsecureChannelCredentials.create()));
assertThat(result.error).isNull();
assertThat(result.callCredentials).isNull();
assertThat(result.factory).isNotNull();
result = OkHttpChannelBuilder.sslSocketFactoryFrom(ChoiceChannelCredentials.create(
InsecureChannelCredentials.create(),
new ChannelCredentials() {
@Override
public ChannelCredentials withoutBearerTokens() {
throw new UnsupportedOperationException();
}
},
TlsChannelCredentials.create()));
assertThat(result.error).isNull();
assertThat(result.callCredentials).isNull();
assertThat(result.factory).isNull();
}
@Test
public void sslSocketFactoryFrom_choice_unknown() {
OkHttpChannelBuilder.SslSocketFactoryResult result =
OkHttpChannelBuilder.sslSocketFactoryFrom(ChoiceChannelCredentials.create(
new ChannelCredentials() {
@Override
public ChannelCredentials withoutBearerTokens() {
throw new UnsupportedOperationException();
}
}));
assertThat(result.error).isNotNull();
assertThat(result.callCredentials).isNull();
assertThat(result.factory).isNull();
}
@Test
public void failForUsingClearTextSpecDirectly() {
OkHttpChannelBuilder builder = OkHttpChannelBuilder.forAddress("host", 1234);
IllegalArgumentException e = assertThrows(IllegalArgumentException.class,
() -> builder.connectionSpec(ConnectionSpec.CLEARTEXT));
assertThat(e).hasMessageThat().isEqualTo("plaintext ConnectionSpec is not accepted");
}
@Test
public void allowUsingTlsConnectionSpec() {
OkHttpChannelBuilder.forAddress("host", 1234).connectionSpec(ConnectionSpec.MODERN_TLS);
}
@Test
public void usePlaintext_newClientTransportAllowed() {
OkHttpChannelBuilder builder = OkHttpChannelBuilder.forAddress("host", 1234).usePlaintext();
builder.buildTransportFactory().newClientTransport(
new InetSocketAddress(5678),
new ClientTransportFactory.ClientTransportOptions(), new FakeChannelLogger());
}
@Test
public void usePlaintextDefaultPort() {
OkHttpChannelBuilder builder = OkHttpChannelBuilder.forAddress("host", 1234).usePlaintext();
assertEquals(GrpcUtil.DEFAULT_PORT_PLAINTEXT, builder.getDefaultPort());
}
@Test
public void usePlaintextCreatesNullSocketFactory() {
OkHttpChannelBuilder builder = OkHttpChannelBuilder.forAddress("host", 1234);
assertNotNull(builder.createSslSocketFactory());
builder.usePlaintext();
assertNull(builder.createSslSocketFactory());
}
@Test
public void scheduledExecutorService_default() {
OkHttpChannelBuilder builder = OkHttpChannelBuilder.forTarget("foo");
ClientTransportFactory clientTransportFactory = builder.buildTransportFactory();
assertSame(
SharedResourceHolder.get(TIMER_SERVICE),
clientTransportFactory.getScheduledExecutorService());
SharedResourceHolder.release(
TIMER_SERVICE, clientTransportFactory.getScheduledExecutorService());
clientTransportFactory.close();
}
@Test
public void scheduledExecutorService_custom() {
OkHttpChannelBuilder builder = OkHttpChannelBuilder.forTarget("foo");
ScheduledExecutorService scheduledExecutorService =
new FakeClock().getScheduledExecutorService();
OkHttpChannelBuilder builder1 = builder.scheduledExecutorService(scheduledExecutorService);
assertSame(builder, builder1);
ClientTransportFactory clientTransportFactory = builder1.buildTransportFactory();
assertSame(scheduledExecutorService, clientTransportFactory.getScheduledExecutorService());
clientTransportFactory.close();
}
@Test
public void socketFactory_default() {
OkHttpChannelBuilder builder = OkHttpChannelBuilder.forTarget("foo");
ClientTransportFactory transportFactory = builder.buildTransportFactory();
OkHttpClientTransport transport =
(OkHttpClientTransport)
transportFactory.newClientTransport(
new InetSocketAddress(5678),
new ClientTransportFactory.ClientTransportOptions(),
new FakeChannelLogger());
assertSame(SocketFactory.getDefault(), transport.getSocketFactory());
transportFactory.close();
}
@Test
public void socketFactory_custom() {
SocketFactory socketFactory =
new SocketFactory() {
@Override
public Socket createSocket(String s, int i) {
return null;
}
@Override
public Socket createSocket(String s, int i, InetAddress inetAddress, int i1) {
return null;
}
@Override
public Socket createSocket(InetAddress inetAddress, int i) {
return null;
}
@Override
public Socket createSocket(
InetAddress inetAddress, int i, InetAddress inetAddress1, int i1) {
return null;
}
};
OkHttpChannelBuilder builder =
OkHttpChannelBuilder.forTarget("foo").socketFactory(socketFactory);
ClientTransportFactory transportFactory = builder.buildTransportFactory();
OkHttpClientTransport transport =
(OkHttpClientTransport)
transportFactory.newClientTransport(
new InetSocketAddress(5678),
new ClientTransportFactory.ClientTransportOptions(),
new FakeChannelLogger());
assertSame(socketFactory, transport.getSocketFactory());
transportFactory.close();
}
@Test
public void transportFactorySupportsOkHttpChannelCreds() {
OkHttpChannelBuilder builder = OkHttpChannelBuilder.forTarget("foo");
ClientTransportFactory transportFactory = builder.buildTransportFactory();
SwapChannelCredentialsResult result = transportFactory.swapChannelCredentials(
mock(ChannelCredentials.class));
assertThat(result).isNull();
result = transportFactory.swapChannelCredentials(
SslSocketFactoryChannelCredentials.create(mock(SSLSocketFactory.class)));
assertThat(result).isNotNull();
}
private static final | OkHttpChannelBuilderTest |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/AnySetterTest.java | {
"start": 2915,
"end": 3134
} | class ____
{
@JsonAnyGetter
public Map<String, JsonNode> getUndefinedProperties() {
throw new IllegalStateException("Should not call parent version!");
}
}
static | Bean797Base |
java | google__guice | core/test/com/google/inject/NullableInjectionPointTest.java | {
"start": 8814,
"end": 8983
} | class ____ {
Foo foo;
@Inject
TypeUseNullableFooConstructor(@TypeUse.Nullable Foo foo) {
this.foo = foo;
}
}
static | TypeUseNullableFooConstructor |
java | apache__dubbo | dubbo-rpc/dubbo-rpc-api/src/main/java/org/apache/dubbo/rpc/protocol/ReferenceCountInvokerWrapper.java | {
"start": 1470,
"end": 4190
} | class ____<T> implements Invoker<T> {
private final ErrorTypeAwareLogger logger =
LoggerFactory.getErrorTypeAwareLogger(ReferenceCountInvokerWrapper.class);
private final Invoker<T> invoker;
private final ReadWriteLock lock = new ReentrantReadWriteLock();
private final AtomicBoolean destroyed = new AtomicBoolean(false);
public ReferenceCountInvokerWrapper(Invoker<T> invoker) {
this.invoker = invoker;
}
@Override
public URL getUrl() {
return invoker.getUrl();
}
@Override
public boolean isAvailable() {
return !destroyed.get() && invoker.isAvailable();
}
@Override
public void destroy() {
try {
int timeout =
ConfigurationUtils.getServerShutdownTimeout(invoker.getUrl().getScopeModel());
boolean locked = lock.writeLock().tryLock(timeout, TimeUnit.MILLISECONDS);
if (!locked) {
logger.warn(
LoggerCodeConstants.PROTOCOL_CLOSED_SERVER,
"",
"",
"Failed to wait for invocation end in " + timeout + "ms.");
}
destroyed.set(true);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
} finally {
try {
lock.writeLock().unlock();
} catch (IllegalMonitorStateException ignore) {
// ignore if lock failed, maybe in a long invoke
} catch (Throwable t) {
logger.warn(
LoggerCodeConstants.PROTOCOL_CLOSED_SERVER,
"",
"",
"Unexpected error occurred when releasing write lock, cause: " + t.getMessage(),
t);
}
}
invoker.destroy();
}
@Override
public Class<T> getInterface() {
return invoker.getInterface();
}
@Override
public Result invoke(Invocation invocation) throws RpcException {
try {
lock.readLock().lock();
if (destroyed.get()) {
logger.warn(
LoggerCodeConstants.PROTOCOL_CLOSED_SERVER,
"",
"",
"Remote invoker has been destroyed, and unable to invoke anymore.");
throw new RpcException("This invoker has been destroyed!");
}
return invoker.invoke(invocation);
} finally {
lock.readLock().unlock();
}
}
public Invoker<T> getInvoker() {
return invoker;
}
}
| ReferenceCountInvokerWrapper |
java | micronaut-projects__micronaut-core | inject/src/main/java/io/micronaut/context/DefaultCustomScopeRegistry.java | {
"start": 1573,
"end": 4722
} | class ____ implements CustomScopeRegistry {
/**
* Constant to refer to inject scope.
*/
static final CustomScope<InjectScope> INJECT_SCOPE = new InjectScopeImpl();
private final BeanLocator beanLocator;
private final Map<String, Optional<CustomScope<?>>> scopes = new ConcurrentHashMap<>(2);
/**
* @param beanLocator The bean locator
*/
protected DefaultCustomScopeRegistry(BeanLocator beanLocator) {
this.beanLocator = beanLocator;
this.scopes.put(InjectScope.class.getName(), Optional.of(INJECT_SCOPE));
}
@Override
public <T> Optional<BeanRegistration<T>> findBeanRegistration(T bean) {
for (Optional<CustomScope<?>> value : scopes.values()) {
if (value.isPresent()) {
final CustomScope<?> customScope = value.get();
final Optional<BeanRegistration<T>> beanRegistration = customScope.findBeanRegistration(bean);
if (beanRegistration.isPresent()) {
return beanRegistration;
}
}
}
return Optional.empty();
}
@Override
public Optional<CustomScope<?>> findDeclaredScope(@NonNull Argument<?> argument) {
final AnnotationMetadata annotationMetadata = argument.getAnnotationMetadata();
if (annotationMetadata.hasStereotype(AnnotationUtil.SCOPE)) {
return annotationMetadata.getAnnotationNameByStereotype(AnnotationUtil.SCOPE).flatMap(this::findScope);
}
return Optional.empty();
}
@Override
public Optional<CustomScope<?>> findDeclaredScope(@NonNull BeanType<?> beanType) {
if (beanType.getAnnotationMetadata().hasStereotype(AnnotationUtil.SCOPE)) {
final List<String> scopeHierarchy = beanType.getAnnotationMetadata().getAnnotationNamesByStereotype(AnnotationUtil.SCOPE);
if (CollectionUtils.isNotEmpty(scopeHierarchy)) {
Optional<CustomScope<?>> registeredScope = Optional.empty();
for (String scope : scopeHierarchy) {
registeredScope = findScope(scope);
if (registeredScope.isPresent()) {
break;
}
}
return registeredScope;
}
}
return Optional.empty();
}
@SuppressWarnings("unchecked")
@Override
public Optional<CustomScope<?>> findScope(Class<? extends Annotation> scopeAnnotation) {
return scopes.computeIfAbsent(scopeAnnotation.getName(), s -> {
final Qualifier qualifier = Qualifiers.byTypeArguments(scopeAnnotation);
return beanLocator.findBean(CustomScope.class, qualifier);
});
}
@SuppressWarnings("unchecked")
@Override
public Optional<CustomScope<?>> findScope(String scopeAnnotation) {
return scopes.computeIfAbsent(scopeAnnotation, type -> {
final Qualifier qualifier = Qualifiers.byExactTypeArgumentName(scopeAnnotation);
return beanLocator.findBean(CustomScope.class, qualifier);
});
}
private static final | DefaultCustomScopeRegistry |
java | hibernate__hibernate-orm | hibernate-envers/src/test/java/org/hibernate/orm/test/envers/entities/onetomany/detached/ListJoinColumnBidirectionalInheritanceRefIngEntity.java | {
"start": 827,
"end": 3025
} | class ____ {
@Id
@GeneratedValue
private Integer id;
private String data;
@OneToMany
@JoinColumn(name = "some_join_column")
@AuditMappedBy(mappedBy = "owner")
private List<ListJoinColumnBidirectionalInheritanceRefEdParentEntity> references;
public ListJoinColumnBidirectionalInheritanceRefIngEntity() {
}
public ListJoinColumnBidirectionalInheritanceRefIngEntity(
Integer id,
String data,
ListJoinColumnBidirectionalInheritanceRefEdParentEntity... references) {
this.id = id;
this.data = data;
this.references = new ArrayList<ListJoinColumnBidirectionalInheritanceRefEdParentEntity>();
this.references.addAll( Arrays.asList( references ) );
}
public ListJoinColumnBidirectionalInheritanceRefIngEntity(
String data,
ListJoinColumnBidirectionalInheritanceRefEdParentEntity... references) {
this( null, data, references );
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getData() {
return data;
}
public void setData(String data) {
this.data = data;
}
public List<ListJoinColumnBidirectionalInheritanceRefEdParentEntity> getReferences() {
return references;
}
public void setReferences(List<ListJoinColumnBidirectionalInheritanceRefEdParentEntity> references) {
this.references = references;
}
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( !(o instanceof ListJoinColumnBidirectionalInheritanceRefIngEntity) ) {
return false;
}
ListJoinColumnBidirectionalInheritanceRefIngEntity that = (ListJoinColumnBidirectionalInheritanceRefIngEntity) o;
if ( data != null ? !data.equals( that.data ) : that.data != null ) {
return false;
}
//noinspection RedundantIfStatement
if ( id != null ? !id.equals( that.id ) : that.id != null ) {
return false;
}
return true;
}
public int hashCode() {
int result;
result = (id != null ? id.hashCode() : 0);
result = 31 * result + (data != null ? data.hashCode() : 0);
return result;
}
public String toString() {
return "ListJoinColumnBidirectionalInheritanceRefIngEntity(id = " + id + ", data = " + data + ")";
}
}
| ListJoinColumnBidirectionalInheritanceRefIngEntity |
java | quarkusio__quarkus | core/processor/src/test/java/io/quarkus/annotation/processor/documentation/config/formatter/JavadocToAsciidocTransformerConfigItemTest.java | {
"start": 581,
"end": 9654
} | class ____ {
@Test
public void removeParagraphIndentation() {
ParsedJavadoc parsed = JavadocUtil.parseConfigItemJavadoc("First paragraph<br><br> Second Paragraph");
assertEquals("First paragraph +\n +\nSecond Paragraph",
JavadocToAsciidocTransformer.toAsciidoc(parsed.description(), parsed.format()));
}
@Test
public void parseUntrimmedJavaDoc() {
ParsedJavadoc parsed = JavadocUtil.parseConfigItemJavadoc(" ");
assertNull(parsed.description());
parsed = JavadocUtil.parseConfigItemJavadoc(" <br> </br> ");
String description = JavadocToAsciidocTransformer.toAsciidoc(parsed.description(), parsed.format());
assertNull(description);
}
@Test
public void parseJavaDocWithParagraph() {
String javaDoc = "hello<p>world</p>";
String expectedOutput = "hello\n\nworld";
ParsedJavadoc parsed = JavadocUtil.parseConfigItemJavadoc(javaDoc);
String description = JavadocToAsciidocTransformer.toAsciidoc(parsed.description(), parsed.format());
assertEquals(expectedOutput, description);
javaDoc = "hello world<p>bonjour </p><p>le monde</p>";
expectedOutput = "hello world\n\nbonjour\n\nle monde";
parsed = JavadocUtil.parseConfigItemJavadoc(javaDoc);
description = JavadocToAsciidocTransformer.toAsciidoc(parsed.description(), parsed.format());
assertEquals(expectedOutput, description);
}
@Test
public void parseJavaDocWithStyles() {
// Bold
String javaDoc = "hello <b>world</b>";
String expectedOutput = "hello *world*";
ParsedJavadoc parsed = JavadocUtil.parseConfigItemJavadoc(javaDoc);
String description = JavadocToAsciidocTransformer.toAsciidoc(parsed.description(), parsed.format());
assertEquals(expectedOutput, description);
javaDoc = "hello <strong>world</strong>";
expectedOutput = "hello *world*";
parsed = JavadocUtil.parseConfigItemJavadoc(javaDoc);
description = JavadocToAsciidocTransformer.toAsciidoc(parsed.description(), parsed.format());
assertEquals(expectedOutput, description);
// Emphasized
javaDoc = "<em>hello world</em>";
expectedOutput = "_hello world_";
parsed = JavadocUtil.parseConfigItemJavadoc(javaDoc);
description = JavadocToAsciidocTransformer.toAsciidoc(parsed.description(), parsed.format());
assertEquals(expectedOutput, description);
// Italics
javaDoc = "<i>hello world</i>";
expectedOutput = "_hello world_";
parsed = JavadocUtil.parseConfigItemJavadoc(javaDoc);
description = JavadocToAsciidocTransformer.toAsciidoc(parsed.description(), parsed.format());
assertEquals(expectedOutput, description);
// Underline
javaDoc = "<u>hello world</u>";
expectedOutput = "[.underline]#hello world#";
parsed = JavadocUtil.parseConfigItemJavadoc(javaDoc);
description = JavadocToAsciidocTransformer.toAsciidoc(parsed.description(), parsed.format());
assertEquals(expectedOutput, description);
// small
javaDoc = "<small>quarkus subatomic</small>";
expectedOutput = "[.small]#quarkus subatomic#";
parsed = JavadocUtil.parseConfigItemJavadoc(javaDoc);
description = JavadocToAsciidocTransformer.toAsciidoc(parsed.description(), parsed.format());
assertEquals(expectedOutput, description);
// big
javaDoc = "<big>hello world</big>";
expectedOutput = "[.big]#hello world#";
parsed = JavadocUtil.parseConfigItemJavadoc(javaDoc);
description = JavadocToAsciidocTransformer.toAsciidoc(parsed.description(), parsed.format());
assertEquals(expectedOutput, description);
// line through
javaDoc = "<del>hello </del><strike>monolith </strike><s>world</s>";
expectedOutput = "[.line-through]#hello #[.line-through]#monolith #[.line-through]#world#";
parsed = JavadocUtil.parseConfigItemJavadoc(javaDoc);
description = JavadocToAsciidocTransformer.toAsciidoc(parsed.description(), parsed.format());
assertEquals(expectedOutput, description);
// superscript and subscript
javaDoc = "<sup>cloud </sup><sub>in-premise</sub>";
expectedOutput = "^cloud ^~in-premise~";
parsed = JavadocUtil.parseConfigItemJavadoc(javaDoc);
description = JavadocToAsciidocTransformer.toAsciidoc(parsed.description(), parsed.format());
assertEquals(expectedOutput, description);
}
@Test
public void parseJavaDocWithLiTagsInsideUlTag() {
String javaDoc = "List:" +
"<ul>\n" +
"<li>1</li>\n" +
"<li>2</li>\n" +
"</ul>" +
"";
String expectedOutput = "List:\n\n - 1\n - 2";
ParsedJavadoc parsed = JavadocUtil.parseConfigItemJavadoc(javaDoc);
String description = JavadocToAsciidocTransformer.toAsciidoc(parsed.description(), parsed.format());
assertEquals(expectedOutput, description);
}
@Test
public void parseJavaDocWithLiTagsInsideOlTag() {
String javaDoc = "List:" +
"<ol>\n" +
"<li>1</li>\n" +
"<li>2</li>\n" +
"</ol>" +
"";
String expectedOutput = "List:\n\n . 1\n . 2";
ParsedJavadoc parsed = JavadocUtil.parseConfigItemJavadoc(javaDoc);
String description = JavadocToAsciidocTransformer.toAsciidoc(parsed.description(), parsed.format());
assertEquals(expectedOutput, description);
}
@Test
public void parseJavaDocWithLinkInlineSnippet() {
String javaDoc = "{@link firstlink} {@link #secondlink} \n {@linkplain #third.link}";
String expectedOutput = "`firstlink` `secondlink` `third.link`";
ParsedJavadoc parsed = JavadocUtil.parseConfigItemJavadoc(javaDoc);
String description = JavadocToAsciidocTransformer.toAsciidoc(parsed.description(), parsed.format());
assertEquals(expectedOutput, description);
}
@Test
public void parseJavaDocWithLinkTag() {
String javaDoc = "this is a <a href='http://link.com'>hello</a> link";
String expectedOutput = "this is a link:http://link.com[hello] link";
ParsedJavadoc parsed = JavadocUtil.parseConfigItemJavadoc(javaDoc);
String description = JavadocToAsciidocTransformer.toAsciidoc(parsed.description(), parsed.format());
assertEquals(expectedOutput, description);
}
@Test
public void parseJavaDocWithCodeInlineSnippet() {
String javaDoc = "{@code true} {@code false}";
String expectedOutput = "`true` `false`";
ParsedJavadoc parsed = JavadocUtil.parseConfigItemJavadoc(javaDoc);
String description = JavadocToAsciidocTransformer.toAsciidoc(parsed.description(), parsed.format());
assertEquals(expectedOutput, description);
}
@Test
public void parseJavaDocWithLiteralInlineSnippet() {
String javaDoc = "{@literal java.util.Boolean}";
String expectedOutput = "`java.util.Boolean`";
ParsedJavadoc parsed = JavadocUtil.parseConfigItemJavadoc(javaDoc);
String description = JavadocToAsciidocTransformer.toAsciidoc(parsed.description(), parsed.format());
assertEquals(expectedOutput, description);
}
@Test
public void parseJavaDocWithValueInlineSnippet() {
String javaDoc = "{@value 10s}";
String expectedOutput = "`10s`";
ParsedJavadoc parsed = JavadocUtil.parseConfigItemJavadoc(javaDoc);
String description = JavadocToAsciidocTransformer.toAsciidoc(parsed.description(), parsed.format());
assertEquals(expectedOutput, description);
}
@Test
public void parseJavaDocWithUnknownInlineSnippet() {
String javaDoc = "{@see java.util.Boolean}";
String expectedOutput = "java.util.Boolean";
ParsedJavadoc parsed = JavadocUtil.parseConfigItemJavadoc(javaDoc);
String description = JavadocToAsciidocTransformer.toAsciidoc(parsed.description(), parsed.format());
assertEquals(expectedOutput, description);
}
@Test
public void parseJavaDocWithUnknownNode() {
String javaDoc = "<unknown>hello</unknown>";
String expectedOutput = "hello";
ParsedJavadoc parsed = JavadocUtil.parseConfigItemJavadoc(javaDoc);
String description = JavadocToAsciidocTransformer.toAsciidoc(parsed.description(), parsed.format());
assertEquals(expectedOutput, description);
}
@Test
public void parseJavaDocWithBlockquoteBlock() {
ParsedJavadoc parsed = JavadocUtil
.parseConfigItemJavadoc("See Section 4.5.5 of the JSR 380 specification, specifically\n"
+ "\n"
+ "<blockquote>\n"
+ "In sub types (be it sub classes/interfaces or | JavadocToAsciidocTransformerConfigItemTest |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/engine/extension/TempDirectoryTests.java | {
"start": 41575,
"end": 41710
} | class ____ {
@Test
void nested() {
assertNotNull(tempDir);
assertTrue(tempDir.exists());
}
@Nested
| NestedTestClass |
java | spring-projects__spring-boot | core/spring-boot/src/main/java/org/springframework/boot/DefaultApplicationArguments.java | {
"start": 1058,
"end": 2031
} | class ____ implements ApplicationArguments {
private final Source source;
private final String[] args;
public DefaultApplicationArguments(String... args) {
Assert.notNull(args, "'args' must not be null");
this.source = new Source(args);
this.args = args;
}
@Override
public String[] getSourceArgs() {
return this.args;
}
@Override
public Set<String> getOptionNames() {
String[] names = this.source.getPropertyNames();
return Collections.unmodifiableSet(new HashSet<>(Arrays.asList(names)));
}
@Override
public boolean containsOption(String name) {
return this.source.containsProperty(name);
}
@Override
public @Nullable List<String> getOptionValues(String name) {
List<String> values = this.source.getOptionValues(name);
return (values != null) ? Collections.unmodifiableList(values) : null;
}
@Override
public List<String> getNonOptionArgs() {
return this.source.getNonOptionArgs();
}
private static | DefaultApplicationArguments |
java | hibernate__hibernate-orm | tooling/metamodel-generator/src/test/java/org/hibernate/processor/test/constructor/EntityWithInstanceGetEntityManager.java | {
"start": 307,
"end": 845
} | class ____ {
@Transient
public EntityManager getEntityManager() {
// In a real-world scenario, this would contain some framework-specific code
throw new IllegalStateException( "This method shouldn't be called in tests" );
}
@Id
private Long id;
private String name;
private String entityManager;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
| EntityWithInstanceGetEntityManager |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/filter/wall/mysql/MySqlWallTest9.java | {
"start": 837,
"end": 1236
} | class ____ extends TestCase {
public void test_true() throws Exception {
assertTrue(WallUtils.isValidateMySql(//
"REPLACE INTO lhwonline_guest (`ip`, `created_time`, `modify_time`, `fid`, `tid`, `request`)" +
"\nVALUES ('1', '1364366764', '1364368276', '0', '3436'" +
"\n , 'bbs/read/run')")); // 前置永真
}
}
| MySqlWallTest9 |
java | spring-projects__spring-framework | spring-beans/src/test/java/org/springframework/beans/factory/BeanFactoryUtilsTests.java | {
"start": 27807,
"end": 28445
} | class ____ implements SmartFactoryBean<String> {
private final TestBean testBean = new TestBean("enigma", 42);
@Override
public String getObject() {
return "testBean";
}
@Override
public Class<String> getObjectType() {
return String.class;
}
@Override
public <S> @Nullable S getObject(Class<S> type) throws Exception {
return (type.isInstance(testBean) ? type.cast(testBean) : SmartFactoryBean.super.getObject(type));
}
@Override
public boolean supportsType(Class<?> type) {
return (type.isInstance(testBean) || SmartFactoryBean.super.supportsType(type));
}
}
static | SupportsTypeSmartFactoryBean |
java | spring-cloud__spring-cloud-gateway | spring-cloud-gateway-sample/src/test/java/org/springframework/cloud/gateway/sample/GatewaySampleApplicationTests.java | {
"start": 7670,
"end": 7839
} | class ____ {
@Bean
public HttpBinCompatibleController httpBinCompatibleController() {
return new HttpBinCompatibleController();
}
}
protected static | TestConfig |
java | spring-projects__spring-data-jpa | spring-data-jpa/src/test/java/org/springframework/data/jpa/repository/query/HqlParserUnitTests.java | {
"start": 866,
"end": 1055
} | class ____ {
@Test // GH-3282
void shouldConsiderVisibility() {
assertThat(HqlParser.class).isPackagePrivate();
assertThat(HqlListener.class).isPackagePrivate();
}
}
| HqlParserUnitTests |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/injection/guice/SingleMethodInjector.java | {
"start": 1021,
"end": 2280
} | class ____ {
final Method method;
final SingleParameterInjector<?>[] parameterInjectors;
final InjectionPoint injectionPoint;
SingleMethodInjector(InjectorImpl injector, InjectionPoint injectionPoint, Errors errors) throws ErrorsException {
this.injectionPoint = injectionPoint;
method = (Method) injectionPoint.getMember();
parameterInjectors = injector.getParametersInjectors(injectionPoint.getDependencies(), errors);
}
public void inject(Errors errors, InternalContext context, Object o) {
Object[] parameters;
try {
parameters = SingleParameterInjector.getAll(errors, context, parameterInjectors);
} catch (ErrorsException e) {
errors.merge(e.getErrors());
return;
}
try {
method.invoke(o, parameters);
} catch (IllegalAccessException e) {
throw new AssertionError(e); // a security manager is blocking us, we're hosed
} catch (InvocationTargetException userException) {
Throwable cause = userException.getCause() != null ? userException.getCause() : userException;
errors.withSource(injectionPoint).errorInjectingMethod(cause);
}
}
}
| SingleMethodInjector |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/reuse/ScanReuser.java | {
"start": 3659,
"end": 4868
} | class ____ the following:
*
* <ul>
* <li>First, find the same source, regardless of their projection and metadata push down.
* <li>Union projections for different instances of the same source and create a new instance.
* <li>Generate different Calc nodes for different instances.
* <li>Replace instances.
* </ul>
*
* <p>For example, plan:
*
* <pre>{@code
* Calc(select=[a, b, c])
* +- Join(joinType=[InnerJoin], where=[(a = a0)], select=[a, b, a0, c])
* :- Exchange(distribution=[hash[a]])
* : +- TableSourceScan(table=[[MyTable, project=[a, b]]], fields=[a, b])
* +- Exchange(distribution=[hash[a]])
* : +- TableSourceScan(table=[[MyTable, project=[a, c]]], fields=[a, c])
* }</pre>
*
* <p>Unified to:
*
* <pre>{@code
* Calc(select=[a, b, c])
* +- Join(joinType=[InnerJoin], where=[(a = a0)], select=[a, b, a0, c])
* :- Exchange(distribution=[hash[a]])
* : +- Calc(select=[a, b])
* : +- TableSourceScan(table=[[MyTable, project=[a, b, c]]], fields=[a, b, c])
* +- Exchange(distribution=[hash[a]])
* +- Calc(select=[a, c])
* : +- TableSourceScan(table=[[MyTable, project=[a, b, c]]], fields=[a, b, c])
* }</pre>
*
* <p>This | does |
java | spring-projects__spring-security | access/src/test/java/org/springframework/security/access/vote/RoleHierarchyVoterTests.java | {
"start": 1062,
"end": 1598
} | class ____ {
@Test
public void hierarchicalRoleIsIncludedInDecision() {
RoleHierarchyImpl roleHierarchyImpl = RoleHierarchyImpl.fromHierarchy("ROLE_A > ROLE_B");
// User has role A, role B is required
TestingAuthenticationToken auth = new TestingAuthenticationToken("user", "password", "ROLE_A");
RoleHierarchyVoter voter = new RoleHierarchyVoter(roleHierarchyImpl);
assertThat(voter.vote(auth, new Object(), SecurityConfig.createList("ROLE_B")))
.isEqualTo(AccessDecisionVoter.ACCESS_GRANTED);
}
}
| RoleHierarchyVoterTests |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/context/annotation/ClassPathScanningCandidateComponentProvider.java | {
"start": 20268,
"end": 20440
} | class ____ not match any exclude filter
* and does match at least one include filter.
* @param metadataReader the ASM ClassReader for the class
* @return whether the | does |
java | elastic__elasticsearch | x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java | {
"start": 3919,
"end": 11986
} | class ____ extends WatcherMockScriptPlugin {
@Override
protected Map<String, Function<Map<String, Object>, Object>> pluginScripts() {
Map<String, Function<Map<String, Object>, Object>> scripts = new HashMap<>();
scripts.put("['key3' : ctx.payload.key1 + ctx.payload.key2]", vars -> {
int key1 = (int) XContentMapValues.extractValue("ctx.payload.key1", vars);
int key2 = (int) XContentMapValues.extractValue("ctx.payload.key2", vars);
return singletonMap("key3", key1 + key2);
});
scripts.put("['key4' : ctx.payload.key3 + 10]", vars -> {
int key3 = (int) XContentMapValues.extractValue("ctx.payload.key3", vars);
return singletonMap("key4", key3 + 10);
});
return scripts;
}
}
public void testScriptTransform() throws Exception {
final Script script;
if (randomBoolean()) {
logger.info("testing script transform with an inline script");
script = mockScript("['key3' : ctx.payload.key1 + ctx.payload.key2]");
} else {
logger.info("testing script transform with an indexed script");
putJsonStoredScript("my-script", Strings.format("""
{
"script": {
"lang": "%s",
"source": "['key3' : ctx.payload.key1 + ctx.payload.key2]"
}
}""", MockScriptPlugin.NAME));
script = new Script(ScriptType.STORED, null, "my-script", Collections.emptyMap());
}
// put a watch that has watch level transform:
PutWatchResponse putWatchResponse = new PutWatchRequestBuilder(client(), "_id1").setSource(
watchBuilder().trigger(schedule(interval("5s")))
.input(simpleInput(Map.of("key1", 10, "key2", 10)))
.transform(scriptTransform(script))
.addAction("_id", indexAction("output1"))
).get();
assertThat(putWatchResponse.isCreated(), is(true));
// put a watch that has a action level transform:
putWatchResponse = new PutWatchRequestBuilder(client(), "_id2").setSource(
watchBuilder().trigger(schedule(interval("5s")))
.input(simpleInput(Map.of("key1", 10, "key2", 10)))
.addAction("_id", scriptTransform(script), indexAction("output2"))
).get();
assertThat(putWatchResponse.isCreated(), is(true));
executeWatch("_id1");
executeWatch("_id2");
refresh();
assertWatchWithMinimumPerformedActionsCount("_id1", 1, false);
assertWatchWithMinimumPerformedActionsCount("_id2", 1, false);
refresh();
assertNoFailuresAndResponse(prepareSearch("output1"), response -> {
assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L));
Map<String, Object> source = response.getHits().getAt(0).getSourceAsMap();
assertThat(source.size(), equalTo(1));
assertThat(source.get("key3").toString(), equalTo("20"));
});
assertNoFailuresAndResponse(prepareSearch("output2"), response -> {
assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L));
Map<String, Object> source = response.getHits().getAt(0).getSourceAsMap();
assertThat(source.size(), equalTo(1));
assertThat(source.get("key3").toString(), equalTo("20"));
});
}
public void testSearchTransform() throws Exception {
createIndex("my-condition-index", "my-payload-index");
ensureGreen("my-condition-index", "my-payload-index");
indexDoc("my-payload-index", "mytestresult");
refresh();
WatcherSearchTemplateRequest inputRequest = templateRequest(searchSource().query(matchAllQuery()), "my-condition-index");
WatcherSearchTemplateRequest transformRequest = templateRequest(searchSource().query(matchAllQuery()), "my-payload-index");
PutWatchResponse putWatchResponse = new PutWatchRequestBuilder(client(), "_id1").setSource(
watchBuilder().trigger(schedule(interval("5s")))
.input(searchInput(inputRequest))
.transform(searchTransform(transformRequest))
.addAction("_id", indexAction("output1"))
).get();
assertThat(putWatchResponse.isCreated(), is(true));
putWatchResponse = new PutWatchRequestBuilder(client(), "_id2").setSource(
watchBuilder().trigger(schedule(interval("5s")))
.input(searchInput(inputRequest))
.addAction("_id", searchTransform(transformRequest), indexAction("output2"))
).get();
assertThat(putWatchResponse.isCreated(), is(true));
executeWatch("_id1");
executeWatch("_id2");
refresh();
assertWatchWithMinimumPerformedActionsCount("_id1", 1, false);
assertWatchWithMinimumPerformedActionsCount("_id2", 1, false);
refresh();
assertNoFailuresAndResponse(prepareSearch("output1"), response -> {
assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L));
assertThat(response.getHits().getAt(0).getSourceAsString(), containsString("mytestresult"));
});
assertNoFailuresAndResponse(prepareSearch("output2"), response -> {
assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L));
assertThat(response.getHits().getAt(0).getSourceAsString(), containsString("mytestresult"));
});
}
public void testChainTransform() throws Exception {
Script script1 = mockScript("['key3' : ctx.payload.key1 + ctx.payload.key2]");
Script script2 = mockScript("['key4' : ctx.payload.key3 + 10]");
// put a watch that has watch level transform:
PutWatchResponse putWatchResponse = new PutWatchRequestBuilder(client(), "_id1").setSource(
watchBuilder().trigger(schedule(interval("5s")))
.input(simpleInput(Map.of("key1", 10, "key2", 10)))
.transform(chainTransform(scriptTransform(script1), scriptTransform(script2)))
.addAction("_id", indexAction("output1"))
).get();
assertThat(putWatchResponse.isCreated(), is(true));
// put a watch that has a action level transform:
putWatchResponse = new PutWatchRequestBuilder(client(), "_id2").setSource(
watchBuilder().trigger(schedule(interval("5s")))
.input(simpleInput(Map.of("key1", 10, "key2", 10)))
.addAction("_id", chainTransform(scriptTransform(script1), scriptTransform(script2)), indexAction("output2"))
).get();
assertThat(putWatchResponse.isCreated(), is(true));
executeWatch("_id1");
executeWatch("_id2");
refresh();
assertWatchWithMinimumPerformedActionsCount("_id1", 1, false);
assertWatchWithMinimumPerformedActionsCount("_id2", 1, false);
refresh();
assertNoFailuresAndResponse(prepareSearch("output1"), response -> {
assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L));
Map<String, Object> source = response.getHits().getAt(0).getSourceAsMap();
assertThat(source.size(), equalTo(1));
assertThat(source.get("key4").toString(), equalTo("30"));
});
assertNoFailuresAndResponse(prepareSearch("output2"), response -> {
assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L));
Map<String, Object> source = response.getHits().getAt(0).getSourceAsMap();
assertThat(source.size(), equalTo(1));
assertThat(source.get("key4").toString(), equalTo("30"));
});
}
private void executeWatch(String watchId) {
new ExecuteWatchRequestBuilder(client(), watchId).setRecordExecution(true).get();
}
}
| CustomScriptPlugin |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.