language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToLong8Evaluator.java | {
"start": 4589,
"end": 5800
} | class ____ implements EvalOperator.ExpressionEvaluator.Factory {
private final Source source;
private final EvalOperator.ExpressionEvaluator.Factory field;
private final long p0;
private final long p1;
private final long p2;
private final long p3;
private final long p4;
private final long p5;
private final long p6;
private final long p7;
public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory field, long p0, long p1,
long p2, long p3, long p4, long p5, long p6, long p7) {
this.source = source;
this.field = field;
this.p0 = p0;
this.p1 = p1;
this.p2 = p2;
this.p3 = p3;
this.p4 = p4;
this.p5 = p5;
this.p6 = p6;
this.p7 = p7;
}
@Override
public RoundToLong8Evaluator get(DriverContext context) {
return new RoundToLong8Evaluator(source, field.get(context), p0, p1, p2, p3, p4, p5, p6, p7, context);
}
@Override
public String toString() {
return "RoundToLong8Evaluator[" + "field=" + field + ", p0=" + p0 + ", p1=" + p1 + ", p2=" + p2 + ", p3=" + p3 + ", p4=" + p4 + ", p5=" + p5 + ", p6=" + p6 + ", p7=" + p7 + "]";
}
}
}
| Factory |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/NonApiType.java | {
"start": 2213,
"end": 10326
} | class ____ extends BugChecker implements MethodTreeMatcher {
// TODO(kak): consider creating an annotation (e.g., `@NonApiType` or `@NotForPublicApi`) that
// users could apply to their own types.
private static final String FLOGGER_LINK = "";
private static final String TYPE_GENERALITY_LINK = "";
private static final String INTERFACES_NOT_IMPLS_LINK = "";
private static final String PRIMITIVE_ARRAYS_LINK = "";
private static final String PROTO_TIME_SERIALIZATION_LINK = "";
private static final String ITERATOR_LINK = "";
private static final String STREAM_LINK = "";
private static final String OPTIONAL_AS_PARAM_LINK = "";
private static final String PREFER_JDK_OPTIONAL_LINK = "";
private static final TypePredicate NON_GRAPH_WRAPPER =
not(isDescendantOf("com.google.apps.framework.producers.GraphWrapper"));
private static final ImmutableSet<TypeToCheck> NON_API_TYPES =
ImmutableSet.of(
// primitive arrays
withPublicVisibility(
anyOf(
(t, s) -> isSameType(t, s.getTypes().makeArrayType(s.getSymtab().intType), s),
(t, s) -> isSameType(t, makeArrayType("java.lang.Integer", s), s)),
"Prefer an ImmutableIntArray instead. " + PRIMITIVE_ARRAYS_LINK,
ApiElementType.ANY),
withPublicVisibility(
anyOf(
(t, s) -> isSameType(t, s.getTypes().makeArrayType(s.getSymtab().doubleType), s),
(t, s) -> isSameType(t, makeArrayType("java.lang.Double", s), s)),
"Prefer an ImmutableDoubleArray instead. " + PRIMITIVE_ARRAYS_LINK,
ApiElementType.ANY),
withPublicVisibility(
anyOf(
(t, s) -> isSameType(t, s.getTypes().makeArrayType(s.getSymtab().longType), s),
(t, s) -> isSameType(t, makeArrayType("java.lang.Long", s), s)),
"Prefer an ImmutableLongArray instead. " + PRIMITIVE_ARRAYS_LINK,
ApiElementType.ANY),
// Optionals
withPublicVisibility(
isExactType("java.util.Optional"),
NON_GRAPH_WRAPPER,
"Avoid Optional parameters. " + OPTIONAL_AS_PARAM_LINK,
ApiElementType.PARAMETER),
withPublicVisibility(
isExactType("com.google.common.base.Optional"),
NON_GRAPH_WRAPPER,
"Prefer a java.util.Optional instead. " + PREFER_JDK_OPTIONAL_LINK,
ApiElementType.ANY),
// ImmutableFoo as params
withPublicVisibility(
isExactType("com.google.common.collect.ImmutableCollection"),
NON_GRAPH_WRAPPER,
"Consider accepting a java.util.Collection or Iterable instead. "
+ TYPE_GENERALITY_LINK,
ApiElementType.PARAMETER),
withPublicVisibility(
isExactType("com.google.common.collect.ImmutableList"),
NON_GRAPH_WRAPPER,
"Consider accepting a java.util.List or Iterable instead. " + TYPE_GENERALITY_LINK,
ApiElementType.PARAMETER),
withPublicVisibility(
isExactType("com.google.common.collect.ImmutableSet"),
NON_GRAPH_WRAPPER,
"Consider accepting a java.util.Set or Iterable instead. " + TYPE_GENERALITY_LINK,
ApiElementType.PARAMETER),
withPublicVisibility(
isExactType("com.google.common.collect.ImmutableMap"),
NON_GRAPH_WRAPPER,
"Consider accepting a java.util.Map instead. " + TYPE_GENERALITY_LINK,
ApiElementType.PARAMETER),
// collection implementation classes
withAnyVisibility(
anyOf(isExactType("java.util.ArrayList"), isExactType("java.util.LinkedList")),
"Prefer a java.util.List instead. " + INTERFACES_NOT_IMPLS_LINK,
ApiElementType.ANY),
withAnyVisibility(
anyOf(
isExactType("java.util.HashSet"),
isExactType("java.util.LinkedHashSet"),
isExactType("java.util.TreeSet")),
"Prefer a java.util.Set instead. " + INTERFACES_NOT_IMPLS_LINK,
ApiElementType.ANY),
withAnyVisibility(
anyOf(
isExactType("java.util.HashMap"),
isExactType("java.util.LinkedHashMap"),
isExactType("java.util.TreeMap")),
"Prefer a java.util.Map instead. " + INTERFACES_NOT_IMPLS_LINK,
ApiElementType.ANY),
// Iterators
withPublicVisibility(
isDescendantOf("java.util.Iterator"),
"Prefer returning a Stream (or collecting to an ImmutableList/ImmutableSet) instead. "
+ ITERATOR_LINK,
ApiElementType.RETURN_TYPE),
// TODO(b/279464660): consider also warning on an Iterator as a ApiElementType.PARAMETER
// Streams
withPublicVisibility(
isDescendantOf("java.util.stream.Stream"),
"Prefer accepting an Iterable or Collection instead. " + STREAM_LINK,
ApiElementType.PARAMETER),
// Guice
withAnyVisibility(
isExactType("com.google.inject.AbstractModule"),
"Prefer using Module instead.",
ApiElementType.ANY),
// ProtoTime
withPublicVisibility(
isExactType("com.google.protobuf.Duration"),
"Prefer a java.time.Duration instead. " + PROTO_TIME_SERIALIZATION_LINK,
ApiElementType.ANY),
withPublicVisibility(
isExactType("com.google.protobuf.Timestamp"),
"Prefer a java.time.Instant instead. " + PROTO_TIME_SERIALIZATION_LINK,
ApiElementType.ANY),
withPublicVisibility(
isExactType("com.google.type.Date"),
"Prefer a java.time.LocalDate instead. " + PROTO_TIME_SERIALIZATION_LINK,
ApiElementType.ANY),
withPublicVisibility(
isExactType("com.google.type.DateTime"),
"Prefer a java.time.LocalDateTime instead. " + PROTO_TIME_SERIALIZATION_LINK,
ApiElementType.ANY),
withPublicVisibility(
isExactType("com.google.type.DayOfWeek"),
"Prefer a java.time.DayOfWeek instead. " + PROTO_TIME_SERIALIZATION_LINK,
ApiElementType.ANY),
withPublicVisibility(
isExactType("com.google.type.Month"),
"Prefer a java.time.Month instead. " + PROTO_TIME_SERIALIZATION_LINK,
ApiElementType.ANY),
withPublicVisibility(
isExactType("com.google.type.TimeOfDay"),
"Prefer a java.time.LocalTime instead. " + PROTO_TIME_SERIALIZATION_LINK,
ApiElementType.ANY),
withPublicVisibility(
isExactType("com.google.type.TimeZone"),
"Prefer a java.time.ZoneId instead. " + PROTO_TIME_SERIALIZATION_LINK,
ApiElementType.ANY),
// TODO(kak): consider com.google.type.Interval -> Range<Instant>
// ProtocolStringList subtypes (see b/408025632)
// Ideally, we also would flag local variables, but NonApiType is a method-level check.
withAnyVisibility(
anyOf(isDescendantOf("com.google.protobuf.ProtocolStringList")),
"Unless you need methods declared on the subtypes, prefer a java.util.List<String>"
+ " instead. "
+ INTERFACES_NOT_IMPLS_LINK,
ApiElementType.ANY),
// Flogger
withAnyVisibility(
anyOf(
isDescendantOf("com.google.common.flogger.FluentLogger"),
isDescendantOf("com.google.common.flogger.GoogleLogger"),
isDescendantOf("com.google.common.flogger.android.AndroidFluentLogger")),
"There is no advantage to passing around a logger rather than declaring one in the"
+ " | NonApiType |
java | apache__flink | flink-libraries/flink-state-processing-api/src/main/java/org/apache/flink/state/api/runtime/SavepointEnvironment.java | {
"start": 15145,
"end": 15629
} | class ____ implements TaskOperatorEventGateway {
@Override
public void sendOperatorEventToCoordinator(
OperatorID operator, SerializedValue<OperatorEvent> event) {}
@Override
public CompletableFuture<CoordinationResponse> sendRequestToCoordinator(
OperatorID operator, SerializedValue<CoordinationRequest> request) {
return CompletableFuture.completedFuture(null);
}
}
}
| NoOpTaskOperatorEventGateway |
java | apache__hadoop | hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/contract/ITestAbfsFileSystemContractEtag.java | {
"start": 1566,
"end": 2129
} | class ____-uses the test folder
// This leads to failures when the test is re-run as same ABFS test
// containers are re-used for test run and creation of source and
// destination test paths fail, as they are already present.
binding.getFileSystem().delete(binding.getTestPath(), true);
}
@Override
protected Configuration createConfiguration() {
return binding.getRawConfiguration();
}
@Override
protected AbstractFSContract createContract(final Configuration conf) {
return new AbfsFileSystemContract(conf, isSecure);
}
}
| re |
java | apache__dubbo | dubbo-common/src/test/java/org/apache/dubbo/rpc/model/ReflectionServiceDescriptorTest.java | {
"start": 1203,
"end": 3657
} | class ____ {
private final ReflectionServiceDescriptor service = new ReflectionServiceDescriptor(DemoService.class);
@Test
void addMethod() {
ReflectionServiceDescriptor service2 = new ReflectionServiceDescriptor(DemoService.class);
MethodDescriptor method = Mockito.mock(MethodDescriptor.class);
when(method.getMethodName()).thenReturn("sayHello2");
service2.addMethod(method);
Assertions.assertEquals(1, service2.getMethods("sayHello2").size());
}
@Test
void testStreamRpcTypeException() {
try {
new ReflectionServiceDescriptor(DemoService1.class);
} catch (IllegalStateException e) {
Assertions.assertTrue(e.getMessage().contains("Stream method could not be overloaded."));
}
}
@Test
void getFullServiceDefinition() {
TypeDefinitionBuilder.initBuilders(new FrameworkModel());
Assertions.assertNotNull(service.getFullServiceDefinition("demoService"));
}
@Test
void getInterfaceName() {
Assertions.assertEquals(DemoService.class.getName(), service.getInterfaceName());
}
@Test
void getServiceInterfaceClass() {
Assertions.assertEquals(DemoService.class, service.getServiceInterfaceClass());
}
@Test
void getAllMethods() {
Assertions.assertFalse(service.getAllMethods().isEmpty());
}
@Test
void getMethod() {
String desc = ReflectUtils.getDesc(String.class);
Assertions.assertNotNull(service.getMethod("sayHello", desc));
}
@Test
void testGetMethod() {
Assertions.assertNotNull(service.getMethod("sayHello", new Class[] {String.class}));
}
@Test
void getMethods() {
Assertions.assertEquals(1, service.getMethods("sayHello").size());
}
@Test
void testEquals() {
ReflectionServiceDescriptor service2 = new ReflectionServiceDescriptor(DemoService.class);
ReflectionServiceDescriptor service3 = new ReflectionServiceDescriptor(DemoService.class);
Assertions.assertEquals(service2, service3);
}
@Test
void testHashCode() {
ReflectionServiceDescriptor service2 = new ReflectionServiceDescriptor(DemoService.class);
ReflectionServiceDescriptor service3 = new ReflectionServiceDescriptor(DemoService.class);
Assertions.assertEquals(service2.hashCode(), service3.hashCode());
}
}
| ReflectionServiceDescriptorTest |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/postgresql/issues/Issue5474.java | {
"start": 510,
"end": 1177
} | class ____ {
@Test
public void test_create_triger_execute() throws Exception {
for (DbType dbType : new DbType[]{DbType.postgresql}) {
String sql = "CREATE TRIGGER \"update_time\" BEFORE UPDATE ON \"poit_cloud\".\"ent_i_checking_analyze\" FOR EACH ROW EXECUTE PROCEDURE poit_cloud.modify_timestamp();";
SQLStatementParser parser = SQLParserUtils.createSQLStatementParser(sql, dbType);
SQLStatement statement = parser.parseStatement();
System.out.println("原始的sql===" + sql);
String newSql = statement.toString();
System.out.println("生成的sql===" + newSql);
}
}
}
| Issue5474 |
java | apache__kafka | clients/src/main/java/org/apache/kafka/clients/consumer/internals/OffsetsForLeaderEpochClient.java | {
"start": 1227,
"end": 1310
} | class ____ making asynchronous requests to the OffsetsForLeaderEpoch API
*/
public | for |
java | spring-projects__spring-security | saml2/saml2-service-provider/src/opensaml5Main/java/org/springframework/security/saml2/provider/service/authentication/OpenSaml5AuthenticationProvider.java | {
"start": 35005,
"end": 36328
} | class ____ extends SAML20AssertionValidator {
private ValidSignatureAssertionValidator(@Nullable Collection<ConditionValidator> newConditionValidators,
@Nullable Collection<SubjectConfirmationValidator> newConfirmationValidators,
@Nullable Collection<StatementValidator> newStatementValidators,
@Nullable org.opensaml.saml.saml2.assertion.AssertionValidator newAssertionValidator,
@Nullable SignatureTrustEngine newTrustEngine,
@Nullable SignaturePrevalidator newSignaturePrevalidator) {
super(newConditionValidators, newConfirmationValidators, newStatementValidators, newAssertionValidator,
newTrustEngine, newSignaturePrevalidator);
}
@NonNull
@Override
protected ValidationResult validateSignature(@NonNull Assertion token, @NonNull ValidationContext context)
throws AssertionValidationException {
return ValidationResult.VALID;
}
}
}
/**
* A default implementation of {@link OpenSaml5AuthenticationProvider}'s response
* authentication converter. It will take the principal name from the
* {@link org.opensaml.saml.saml2.core.NameID} element. It will also extract the
* assertion attributes and session indexes. You can either configure the principal
* name converter and granted authorities converter in this | ValidSignatureAssertionValidator |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java | {
"start": 97052,
"end": 114609
} | class ____ extends Publication {
private final ClusterStatePublicationEvent clusterStatePublicationEvent;
private final PublishRequest publishRequest;
private final SubscribableListener<Void> localNodeAckEvent;
private final AckListener ackListener;
private final ActionListener<Void> publishListener;
private final PublicationTransportHandler.PublicationContext publicationContext;
@Nullable // if using single-node discovery
private final Scheduler.ScheduledCancellable timeoutHandler;
private final Scheduler.Cancellable infoTimeoutHandler;
// We may not have accepted our own state before receiving a join from another node, causing its join to be rejected (we cannot
// safely accept a join whose last-accepted term/version is ahead of ours), so store them up and process them at the end.
private final List<Join> receivedJoins = new ArrayList<>();
private boolean receivedJoinsProcessed;
CoordinatorPublication(
ClusterStatePublicationEvent clusterStatePublicationEvent,
PublishRequest publishRequest,
PublicationTransportHandler.PublicationContext publicationContext,
SubscribableListener<Void> localNodeAckEvent,
AckListener ackListener,
ActionListener<Void> publishListener
) {
super(publishRequest, new AckListener() {
@Override
public void onCommit(TimeValue commitTime) {
clusterStatePublicationEvent.setPublicationCommitElapsedMillis(commitTime.millis());
ackListener.onCommit(commitTime);
}
@Override
public void onNodeAck(DiscoveryNode node, Exception e) {
// acking and cluster state application for local node is handled specially
if (node.equals(getLocalNode())) {
synchronized (mutex) {
if (e == null) {
localNodeAckEvent.onResponse(null);
} else {
localNodeAckEvent.onFailure(e);
}
}
} else {
ackListener.onNodeAck(node, e);
if (e == null) {
lagDetector.setAppliedVersion(node, publishRequest.getAcceptedState().version());
}
}
}
}, transportService.getThreadPool()::rawRelativeTimeInMillis);
this.clusterStatePublicationEvent = clusterStatePublicationEvent;
this.publishRequest = publishRequest;
this.publicationContext = publicationContext;
this.localNodeAckEvent = localNodeAckEvent;
this.ackListener = ackListener;
this.publishListener = publishListener;
this.timeoutHandler = singleNodeDiscovery ? null : transportService.getThreadPool().schedule(new Runnable() {
@Override
public void run() {
synchronized (mutex) {
cancel("timed out after " + publishTimeout);
}
}
@Override
public String toString() {
return "scheduled timeout for " + CoordinatorPublication.this;
}
}, publishTimeout, clusterCoordinationExecutor);
this.infoTimeoutHandler = transportService.getThreadPool().schedule(new Runnable() {
@Override
public void run() {
synchronized (mutex) {
logIncompleteNodes(Level.INFO);
}
}
@Override
public String toString() {
return "scheduled timeout for reporting on " + CoordinatorPublication.this;
}
}, publishInfoTimeout, clusterCoordinationExecutor);
}
private void removePublicationAndPossiblyBecomeCandidate(String reason) {
assert Thread.holdsLock(mutex) : "Coordinator mutex not held";
assert currentPublication.get() == this;
currentPublication = Optional.empty();
logger.debug("publication ended unsuccessfully: {}", this);
// check if node has not already switched modes (by bumping term)
if (isActiveForCurrentLeader()) {
becomeCandidate(reason);
}
}
boolean isActiveForCurrentLeader() {
// checks if this publication can still influence the mode of the current publication
return mode == Mode.LEADER && publishRequest.getAcceptedState().term() == getCurrentTerm();
}
@Override
protected void onCompletion(boolean committed) {
assert Thread.holdsLock(mutex) : "Coordinator mutex not held";
final long completionTimeMillis = transportService.getThreadPool().rawRelativeTimeInMillis();
clusterStatePublicationEvent.setPublicationCompletionElapsedMillis(completionTimeMillis - getStartTime());
localNodeAckEvent.addListener(new ActionListener<>() {
@Override
public void onResponse(Void ignore) {
assert Thread.holdsLock(mutex) : "Coordinator mutex not held";
assert committed;
receivedJoins.forEach(CoordinatorPublication.this::handleAssociatedJoin);
assert receivedJoinsProcessed == false;
receivedJoinsProcessed = true;
clusterApplier.onNewClusterState(CoordinatorPublication.this.toString(), () -> applierState, new ActionListener<>() {
@Override
public void onFailure(Exception e) {
synchronized (mutex) {
removePublicationAndPossiblyBecomeCandidate("clusterApplier#onNewClusterState");
}
cancelTimeoutHandlers();
ackListener.onNodeAck(getLocalNode(), e);
publishListener.onFailure(e);
}
@Override
public void onResponse(Void ignored) {
onClusterStateApplied();
clusterStatePublicationEvent.setMasterApplyElapsedMillis(
transportService.getThreadPool().rawRelativeTimeInMillis() - completionTimeMillis
);
synchronized (mutex) {
assert currentPublication.get() == CoordinatorPublication.this;
currentPublication = Optional.empty();
logger.debug("publication ended successfully: {}", CoordinatorPublication.this);
// trigger term bump if new term was found during publication
updateMaxTermSeen(getCurrentTerm());
if (mode == Mode.LEADER) {
// if necessary, abdicate to another node or improve the voting configuration
boolean attemptReconfiguration = true;
final ClusterState state = getLastAcceptedState(); // committed state
if (localNodeMayWinElection(state, electionStrategy).mayWin() == false) {
final List<DiscoveryNode> masterCandidates = completedNodes().stream()
.filter(DiscoveryNode::isMasterNode)
.filter(node -> electionStrategy.nodeMayWinElection(state, node).mayWin())
.filter(node -> {
// check if master candidate would be able to get an election quorum if we were to
// abdicate to it. Assume that every node that completed the publication can provide
// a vote in that next election and has the latest state.
final long futureElectionTerm = state.term() + 1;
final VoteCollection futureVoteCollection = new VoteCollection();
completedNodes().forEach(
completedNode -> futureVoteCollection.addJoinVote(
new Join(completedNode, node, futureElectionTerm, state.term(), state.version())
)
);
return electionStrategy.isElectionQuorum(
node,
futureElectionTerm,
state.term(),
state.version(),
state.getLastCommittedConfiguration(),
state.getLastAcceptedConfiguration(),
futureVoteCollection
);
})
.toList();
if (masterCandidates.isEmpty() == false) {
abdicateTo(masterCandidates.get(random.nextInt(masterCandidates.size())));
attemptReconfiguration = false;
}
}
if (attemptReconfiguration) {
scheduleReconfigurationIfNeeded();
}
}
lagDetector.startLagDetector(publishRequest.getAcceptedState().version());
logIncompleteNodes(Level.WARN);
}
cancelTimeoutHandlers();
ackListener.onNodeAck(getLocalNode(), null);
publishListener.onResponse(null);
}
});
}
@Override
public void onFailure(Exception e) {
assert Thread.holdsLock(mutex) : "Coordinator mutex not held";
removePublicationAndPossiblyBecomeCandidate("Publication.onCompletion(false)");
cancelTimeoutHandlers();
final FailedToCommitClusterStateException exception = new FailedToCommitClusterStateException(
Strings.format(
"publication of cluster state version [%d] in term [%d] failed [committed=%s]",
publishRequest.getAcceptedState().version(),
publishRequest.getAcceptedState().term(),
committed
),
e
);
ackListener.onNodeAck(getLocalNode(), exception); // other nodes have acked, but not the master.
publishListener.onFailure(exception);
}
}, EsExecutors.DIRECT_EXECUTOR_SERVICE, transportService.getThreadPool().getThreadContext());
}
private void cancelTimeoutHandlers() {
if (timeoutHandler != null) {
timeoutHandler.cancel();
}
infoTimeoutHandler.cancel();
}
private void handleAssociatedJoin(Join join) {
if (join.term() == getCurrentTerm() && missingJoinVoteFrom(join.votingNode())) {
logger.trace("handling {}", join);
handleJoin(join);
}
}
@Override
protected boolean isPublishQuorum(VoteCollection votes) {
assert Thread.holdsLock(mutex) : "Coordinator mutex not held";
return coordinationState.get().isPublishQuorum(votes);
}
@Override
protected Optional<SubscribableListener<ApplyCommitRequest>> handlePublishResponse(
DiscoveryNode sourceNode,
PublishResponse publishResponse
) {
assert Thread.holdsLock(mutex) : "Coordinator mutex not held";
assert getCurrentTerm() >= publishResponse.getTerm();
return coordinationState.get().handlePublishResponse(sourceNode, publishResponse).map(applyCommitRequest -> {
final var future = new SubscribableListener<ApplyCommitRequest>();
beforeCommit(applyCommitRequest.getTerm(), applyCommitRequest.getVersion(), future.map(ignored -> applyCommitRequest));
future.addListener(new ActionListener<>() {
@Override
public void onResponse(ApplyCommitRequest applyCommitRequest) {}
@Override
public void onFailure(Exception e) {
logger.log(
e instanceof CoordinationStateRejectedException ? Level.DEBUG : Level.WARN,
Strings.format(
"publication of cluster state version [%d] in term [%d] failed to commit after reaching quorum",
publishRequest.getAcceptedState().version(),
publishRequest.getAcceptedState().term()
),
e
);
}
});
return future;
});
}
@Override
protected void onJoin(Join join) {
assert Thread.holdsLock(mutex) : "Coordinator mutex not held";
if (receivedJoinsProcessed) {
// a late response may arrive after the state has been locally applied, meaning that receivedJoins has already been
// processed, so we have to handle this late response here.
handleAssociatedJoin(join);
} else {
receivedJoins.add(join);
}
}
@Override
protected void onMissingJoin(DiscoveryNode discoveryNode) {
assert Thread.holdsLock(mutex) : "Coordinator mutex not held";
// The remote node did not include a join vote in its publish response. We do not persist joins, so it could be that the remote
// node voted for us and then rebooted, or it could be that it voted for a different node in this term. If we don't have a copy
// of a join from this node then we assume the latter and bump our term to obtain a vote from this node.
if (missingJoinVoteFrom(discoveryNode)) {
final long term = publishRequest.getAcceptedState().term();
logger.debug("onMissingJoin: no join vote from {}, bumping term to exceed {}", discoveryNode, term);
updateMaxTermSeen(term + 1);
}
}
@Override
protected void sendPublishRequest(
DiscoveryNode destination,
PublishRequest publishRequest,
ActionListener<PublishWithJoinResponse> responseActionListener
) {
publicationContext.sendPublishRequest(destination, publishRequest, wrapWithMutex(responseActionListener));
}
private static final TransportRequestOptions COMMIT_STATE_REQUEST_OPTIONS = TransportRequestOptions.of(
null,
TransportRequestOptions.Type.STATE
);
@Override
protected void sendApplyCommit(
DiscoveryNode destination,
ApplyCommitRequest applyCommit,
ActionListener<Void> responseActionListener
) {
assert transportService.getThreadPool().getThreadContext().isSystemContext();
assert Thread.holdsLock(mutex) : "Coordinator mutex not held";
try {
transportService.sendRequest(
destination,
COMMIT_STATE_ACTION_NAME,
applyCommit,
COMMIT_STATE_REQUEST_OPTIONS,
TransportResponseHandler.empty(clusterCoordinationExecutor, wrapWithMutex(responseActionListener))
);
} catch (Exception e) {
responseActionListener.onFailure(e);
}
}
@Override
protected <T> ActionListener<T> wrapListener(ActionListener<T> listener) {
return wrapWithMutex(listener);
}
@Override
boolean publicationCompletedIffAllTargetsInactiveOrCancelled() {
assert Thread.holdsLock(mutex) : "Coordinator mutex not held";
return super.publicationCompletedIffAllTargetsInactiveOrCancelled();
}
}
public | CoordinatorPublication |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-examples/src/test/java/org/apache/hadoop/examples/TestWordStats.java | {
"start": 1389,
"end": 1912
} | class ____ {
private final static String INPUT = "src/test/java/org/apache/hadoop/examples/pi/math";
private final static String BASEDIR = System.getProperty("test.build.data",
"target/test-dir");
private final static String MEAN_OUTPUT = BASEDIR + "/mean_output";
private final static String MEDIAN_OUTPUT = BASEDIR + "/median_output";
private final static String STDDEV_OUTPUT = BASEDIR + "/stddev_output";
/**
* Modified internal test | TestWordStats |
java | quarkusio__quarkus | extensions/funqy/funqy-knative-events/runtime/src/main/java/io/quarkus/funqy/knative/events/EventAttribute.java | {
"start": 50,
"end": 463
} | interface ____ {
/**
* Defines the cloud event attribute name that will be used for additional filtering
* of incoming events
*
* @return
*/
String name();
/**
* Defines the cloud event attribute's (one defined by <code>name</code>) value that
* will be used for additional filtering of incoming events
*
* @return
*/
String value();
}
| EventAttribute |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/DFSClientCache.java | {
"start": 4033,
"end": 6715
} | class ____ {
private final String userName;
private final int namenodeId;
private DfsClientKey(String userName, int namenodeId) {
this.userName = userName;
this.namenodeId = namenodeId;
}
@Override
public boolean equals(Object obj) {
if (obj instanceof DfsClientKey) {
DfsClientKey k = (DfsClientKey) obj;
return userName.equals(k.userName) &&
(namenodeId == k.namenodeId);
}
return false;
}
@Override
public int hashCode() {
return Objects.hashCode(userName, namenodeId);
}
}
DFSClientCache(NfsConfiguration config) throws IOException {
this(config, DEFAULT_DFS_CLIENT_CACHE_SIZE);
}
DFSClientCache(NfsConfiguration config, int clientCache) throws IOException {
this.config = config;
namenodeUriMap = new HashMap<>();
prepareAddressMap();
this.clientCache = CacheBuilder.newBuilder()
.maximumSize(clientCache)
.removalListener(clientRemovalListener())
.build(clientLoader());
this.inputstreamCache = CacheBuilder.newBuilder()
.maximumSize(DEFAULT_DFS_INPUTSTREAM_CACHE_SIZE)
.expireAfterAccess(DEFAULT_DFS_INPUTSTREAM_CACHE_TTL, TimeUnit.SECONDS)
.removalListener(inputStreamRemovalListener())
.build(inputStreamLoader());
ShutdownHookManager.get().addShutdownHook(new CacheFinalizer(),
SHUTDOWN_HOOK_PRIORITY);
}
private void prepareAddressMap() throws IOException {
FileSystem fs = FileSystem.get(config);
String[] exportsPath =
config.getStrings(NfsConfigKeys.DFS_NFS_EXPORT_POINT_KEY,
NfsConfigKeys.DFS_NFS_EXPORT_POINT_DEFAULT);
for (String exportPath : exportsPath) {
URI exportURI = Nfs3Utils.getResolvedURI(fs, exportPath);
int namenodeId = Nfs3Utils.getNamenodeId(config, exportURI);
URI value = namenodeUriMap.get(namenodeId);
// if a unique nnid, add it to the map
if (value == null) {
LOG.info("Added export: {} FileSystem URI: {} with namenodeId: {}",
exportPath, exportPath, namenodeId);
namenodeUriMap.put(namenodeId, exportURI);
} else {
// if the nnid already exists, it better be the for the same namenode
String msg = String.format("FS:%s, Namenode ID collision for path:%s "
+ "nnid:%s uri being added:%s existing uri:%s", fs.getScheme(),
exportPath, namenodeId, exportURI, value);
LOG.error(msg);
throw new FileSystemException(msg);
}
}
}
/**
* Priority of the FileSystem shutdown hook.
*/
public static final int SHUTDOWN_HOOK_PRIORITY = 10;
private | DfsClientKey |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestRetryCacheWithHA.java | {
"start": 16743,
"end": 17854
} | class ____ extends AtMostOnceOp {
private final String oldName;
private final String newName;
private boolean renamed;
RenameOp(DFSClient client, String oldName, String newName) {
super("rename", client);
this.oldName = oldName;
this.newName = newName;
}
@Override
void prepare() throws Exception {
final Path filePath = new Path(oldName);
if (!dfs.exists(filePath)) {
DFSTestUtil.createFile(dfs, filePath, BlockSize, DataNodes, 0);
}
}
@SuppressWarnings("deprecation")
@Override
void invoke() throws Exception {
this.renamed = client.rename(oldName, newName);
}
@Override
boolean checkNamenodeBeforeReturn() throws Exception {
Path targetPath = new Path(newName);
boolean renamed = dfs.exists(targetPath);
for (int i = 0; i < CHECKTIMES && !renamed; i++) {
Thread.sleep(1000);
renamed = dfs.exists(targetPath);
}
return renamed;
}
@Override
Object getResult() {
return new Boolean(renamed);
}
}
/** rename2 */
| RenameOp |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/JteComponentBuilderFactory.java | {
"start": 7816,
"end": 9247
} | class ____
extends AbstractComponentBuilder<JteComponent>
implements JteComponentBuilder {
@Override
protected JteComponent buildConcreteComponent() {
return new JteComponent();
}
@Override
protected boolean setPropertyOnComponent(
Component component,
String name,
Object value) {
switch (name) {
case "allowContextMapAll": ((JteComponent) component).setAllowContextMapAll((boolean) value); return true;
case "allowTemplateFromHeader": ((JteComponent) component).setAllowTemplateFromHeader((boolean) value); return true;
case "contentCache": ((JteComponent) component).setContentCache((boolean) value); return true;
case "contentType": ((JteComponent) component).setContentType((gg.jte.ContentType) value); return true;
case "lazyStartProducer": ((JteComponent) component).setLazyStartProducer((boolean) value); return true;
case "preCompile": ((JteComponent) component).setPreCompile((boolean) value); return true;
case "workDir": ((JteComponent) component).setWorkDir((java.lang.String) value); return true;
case "autowiredEnabled": ((JteComponent) component).setAutowiredEnabled((boolean) value); return true;
default: return false;
}
}
}
} | JteComponentBuilderImpl |
java | quarkusio__quarkus | integration-tests/smallrye-stork-registration/src/test/java/org/acme/ClientCallingResourceTest.java | {
"start": 229,
"end": 400
} | class ____ {
@Inject
Registration registration;
@Test
public void test() {
Assertions.assertNotNull(registration);
}
}
| ClientCallingResourceTest |
java | apache__hadoop | hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/statistics/CountersAndGauges.java | {
"start": 1066,
"end": 2321
} | interface ____ extends DurationTrackerFactory {
/**
* Increment a specific counter.
* No-op if not defined.
* @param op operation
* @param count increment value
*/
void incrementCounter(Statistic op, long count);
/**
* Increment a specific gauge.
* No-op if not defined.
* @param op operation
* @param count increment value
* @throws ClassCastException if the metric is of the wrong type
*/
void incrementGauge(Statistic op, long count);
/**
* Decrement a specific gauge.
* No-op if not defined.
* @param op operation
* @param count increment value
* @throws ClassCastException if the metric is of the wrong type
*/
void decrementGauge(Statistic op, long count);
/**
* Add a value to a quantiles statistic. No-op if the quantile
* isn't found.
* @param op operation to look up.
* @param value value to add.
* @throws ClassCastException if the metric is not a Quantiles.
*/
void addValueToQuantiles(Statistic op, long value);
/**
* Record a duration.
* @param op operation
* @param success was the operation a success?
* @param duration how long did it take
*/
void recordDuration(Statistic op, boolean success, Duration duration);
}
| CountersAndGauges |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/calcite/sql/type/SqlTypeFactoryImpl.java | {
"start": 24305,
"end": 24665
} | class ____ extends BasicSqlType {
UnknownSqlType(RelDataTypeFactory typeFactory) {
super(typeFactory.getTypeSystem(), SqlTypeName.NULL);
}
@Override
protected void generateTypeString(StringBuilder sb, boolean withDetail) {
sb.append("UNKNOWN");
}
}
// FLINK MODIFICATION END
}
| UnknownSqlType |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/UpdateSecuritySettingsAction.java | {
"start": 3735,
"end": 3994
} | class ____ extends AcknowledgedRequest<Request> {
private final Map<String, Object> mainIndexSettings;
private final Map<String, Object> tokensIndexSettings;
private final Map<String, Object> profilesIndexSettings;
public | Request |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java | {
"start": 2064,
"end": 29146
} | class ____<HB extends AbstractHighlighterBuilder<?>>
implements
Writeable,
Rewriteable<HB>,
ToXContentObject {
public static final ParseField PRE_TAGS_FIELD = new ParseField("pre_tags");
public static final ParseField POST_TAGS_FIELD = new ParseField("post_tags");
public static final ParseField FIELDS_FIELD = new ParseField("fields");
public static final ParseField ORDER_FIELD = new ParseField("order");
public static final ParseField HIGHLIGHT_FILTER_FIELD = new ParseField("highlight_filter");
public static final ParseField FRAGMENT_SIZE_FIELD = new ParseField("fragment_size");
public static final ParseField FRAGMENT_OFFSET_FIELD = new ParseField("fragment_offset");
public static final ParseField NUMBER_OF_FRAGMENTS_FIELD = new ParseField("number_of_fragments");
public static final ParseField ENCODER_FIELD = new ParseField("encoder");
public static final ParseField TAGS_SCHEMA_FIELD = new ParseField("tags_schema");
public static final ParseField REQUIRE_FIELD_MATCH_FIELD = new ParseField("require_field_match");
public static final ParseField BOUNDARY_SCANNER_FIELD = new ParseField("boundary_scanner");
public static final ParseField BOUNDARY_MAX_SCAN_FIELD = new ParseField("boundary_max_scan");
public static final ParseField BOUNDARY_CHARS_FIELD = new ParseField("boundary_chars");
public static final ParseField BOUNDARY_SCANNER_LOCALE_FIELD = new ParseField("boundary_scanner_locale");
public static final ParseField TYPE_FIELD = new ParseField("type");
public static final ParseField FRAGMENTER_FIELD = new ParseField("fragmenter");
public static final ParseField NO_MATCH_SIZE_FIELD = new ParseField("no_match_size");
public static final ParseField FORCE_SOURCE_FIELD = new ParseField("force_source").withAllDeprecated()
.forRestApiVersion(restApiVersion -> restApiVersion == RestApiVersion.V_8);
public static final ParseField PHRASE_LIMIT_FIELD = new ParseField("phrase_limit");
public static final ParseField OPTIONS_FIELD = new ParseField("options");
public static final ParseField HIGHLIGHT_QUERY_FIELD = new ParseField("highlight_query");
public static final ParseField MATCHED_FIELDS_FIELD = new ParseField("matched_fields");
public static final ParseField MAX_ANALYZED_OFFSET_FIELD = new ParseField("max_analyzed_offset");
protected String encoder;
protected String[] preTags;
protected String[] postTags;
protected Integer fragmentSize;
protected Integer numOfFragments;
protected String highlighterType;
protected String fragmenter;
protected QueryBuilder highlightQuery;
protected Order order;
protected Boolean highlightFilter;
protected BoundaryScannerType boundaryScannerType;
protected Integer boundaryMaxScan;
protected char[] boundaryChars;
protected Locale boundaryScannerLocale;
protected Integer noMatchSize;
protected Integer phraseLimit;
protected Map<String, Object> options;
protected Boolean requireFieldMatch;
protected Integer maxAnalyzedOffset;
public AbstractHighlighterBuilder() {}
protected AbstractHighlighterBuilder(AbstractHighlighterBuilder<?> template, QueryBuilder queryBuilder) {
preTags = template.preTags;
postTags = template.postTags;
fragmentSize = template.fragmentSize;
numOfFragments = template.numOfFragments;
encoder = template.encoder;
highlighterType = template.highlighterType;
fragmenter = template.fragmenter;
highlightQuery = queryBuilder;
order = template.order;
highlightFilter = template.highlightFilter;
boundaryScannerType = template.boundaryScannerType;
boundaryMaxScan = template.boundaryMaxScan;
boundaryChars = template.boundaryChars;
boundaryScannerLocale = template.boundaryScannerLocale;
noMatchSize = template.noMatchSize;
phraseLimit = template.phraseLimit;
options = template.options;
requireFieldMatch = template.requireFieldMatch;
this.maxAnalyzedOffset = template.maxAnalyzedOffset;
}
/**
* Read from a stream.
*/
@SuppressWarnings("this-escape")
protected AbstractHighlighterBuilder(StreamInput in) throws IOException {
preTags(in.readOptionalStringArray());
postTags(in.readOptionalStringArray());
fragmentSize(in.readOptionalVInt());
numOfFragments(in.readOptionalVInt());
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_14_0)) {
encoder(in.readOptionalString());
}
highlighterType(in.readOptionalString());
fragmenter(in.readOptionalString());
if (in.readBoolean()) {
highlightQuery(in.readNamedWriteable(QueryBuilder.class));
}
order(in.readOptionalWriteable(Order::readFromStream));
highlightFilter(in.readOptionalBoolean());
boundaryScannerType(in.readOptionalWriteable(BoundaryScannerType::readFromStream));
boundaryMaxScan(in.readOptionalVInt());
if (in.readBoolean()) {
boundaryChars(in.readString().toCharArray());
}
if (in.readBoolean()) {
boundaryScannerLocale(in.readString());
}
noMatchSize(in.readOptionalVInt());
phraseLimit(in.readOptionalVInt());
if (in.readBoolean()) {
options(in.readGenericMap());
}
requireFieldMatch(in.readOptionalBoolean());
maxAnalyzedOffset(in.readOptionalInt());
}
/**
* write common parameters to {@link StreamOutput}
*/
@Override
public final void writeTo(StreamOutput out) throws IOException {
out.writeOptionalStringArray(preTags);
out.writeOptionalStringArray(postTags);
out.writeOptionalVInt(fragmentSize);
out.writeOptionalVInt(numOfFragments);
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_14_0)) {
out.writeOptionalString(encoder);
}
out.writeOptionalString(highlighterType);
out.writeOptionalString(fragmenter);
boolean hasQuery = highlightQuery != null;
out.writeBoolean(hasQuery);
if (hasQuery) {
out.writeNamedWriteable(highlightQuery);
}
out.writeOptionalWriteable(order);
out.writeOptionalBoolean(highlightFilter);
out.writeOptionalWriteable(boundaryScannerType);
out.writeOptionalVInt(boundaryMaxScan);
boolean hasBounaryChars = boundaryChars != null;
out.writeBoolean(hasBounaryChars);
if (hasBounaryChars) {
out.writeString(String.valueOf(boundaryChars));
}
boolean hasBoundaryScannerLocale = boundaryScannerLocale != null;
out.writeBoolean(hasBoundaryScannerLocale);
if (hasBoundaryScannerLocale) {
out.writeString(boundaryScannerLocale.toLanguageTag());
}
out.writeOptionalVInt(noMatchSize);
out.writeOptionalVInt(phraseLimit);
boolean hasOptions = options != null;
out.writeBoolean(hasOptions);
if (hasOptions) {
out.writeGenericMap(options);
}
out.writeOptionalBoolean(requireFieldMatch);
out.writeOptionalInt(maxAnalyzedOffset);
doWriteTo(out);
}
protected abstract void doWriteTo(StreamOutput out) throws IOException;
/**
* Set the pre tags that will be used for highlighting.
*/
@SuppressWarnings("unchecked")
public HB preTags(String... preTags) {
this.preTags = preTags;
return (HB) this;
}
/**
* @return the value set by {@link #preTags(String...)}
*/
public String[] preTags() {
return this.preTags;
}
/**
* Set the post tags that will be used for highlighting.
*/
@SuppressWarnings("unchecked")
public HB postTags(String... postTags) {
this.postTags = postTags;
return (HB) this;
}
/**
* @return the value set by {@link #postTags(String...)}
*/
public String[] postTags() {
return this.postTags;
}
/**
* Set the fragment size in characters, defaults to {@link HighlightBuilder#DEFAULT_FRAGMENT_CHAR_SIZE}
*/
@SuppressWarnings("unchecked")
public HB fragmentSize(Integer fragmentSize) {
this.fragmentSize = fragmentSize;
return (HB) this;
}
/**
* @return the value set by {@link #fragmentSize(Integer)}
*/
public Integer fragmentSize() {
return this.fragmentSize;
}
/**
* Set the number of fragments, defaults to {@link HighlightBuilder#DEFAULT_NUMBER_OF_FRAGMENTS}
*/
@SuppressWarnings("unchecked")
public HB numOfFragments(Integer numOfFragments) {
this.numOfFragments = numOfFragments;
return (HB) this;
}
/**
* @return the value set by {@link #numOfFragments(Integer)}
*/
public Integer numOfFragments() {
return this.numOfFragments;
}
/**
* Set the encoder, defaults to {@link HighlightBuilder#DEFAULT_ENCODER}
*/
@SuppressWarnings("unchecked")
public HB encoder(String encoder) {
this.encoder = encoder;
return (HB) this;
}
/**
* @return the value set by {@link #encoder(String)}
*/
public String encoder() {
return this.encoder;
}
/**
* Set a tag scheme that encapsulates a built in pre and post tags. The allowed schemes
* are {@code styled} and {@code default}.
*
* @param schemaName The tag scheme name
*/
@SuppressWarnings("unchecked")
public HB tagsSchema(String schemaName) {
switch (schemaName) {
case "default" -> {
preTags(HighlightBuilder.DEFAULT_PRE_TAGS);
postTags(HighlightBuilder.DEFAULT_POST_TAGS);
}
case "styled" -> {
preTags(HighlightBuilder.DEFAULT_STYLED_PRE_TAG);
postTags(HighlightBuilder.DEFAULT_STYLED_POST_TAGS);
}
default -> throw new IllegalArgumentException("Unknown tag schema [" + schemaName + "]");
}
return (HB) this;
}
/**
* Set type of highlighter to use. Out of the box supported types
* are {@code unified}, {@code plain} and {@code fvh}.
* Defaults to {@code unified}.
* Details of the different highlighter types are covered in the reference guide.
*/
@SuppressWarnings("unchecked")
public HB highlighterType(String highlighterType) {
this.highlighterType = highlighterType;
return (HB) this;
}
/**
* @return the value set by {@link #highlighterType(String)}
*/
public String highlighterType() {
return this.highlighterType;
}
/**
* Sets what fragmenter to use to break up text that is eligible for highlighting.
* This option is only applicable when using the plain highlighterType {@code highlighter}.
* Permitted values are "simple" or "span" relating to {@link SimpleFragmenter} and
* {@link SimpleSpanFragmenter} implementations respectively with the default being "span"
*/
@SuppressWarnings("unchecked")
public HB fragmenter(String fragmenter) {
this.fragmenter = fragmenter;
return (HB) this;
}
/**
* @return the value set by {@link #fragmenter(String)}
*/
public String fragmenter() {
return this.fragmenter;
}
/**
* Sets a query to be used for highlighting instead of the search query.
*/
@SuppressWarnings("unchecked")
public HB highlightQuery(QueryBuilder highlightQuery) {
this.highlightQuery = highlightQuery;
return (HB) this;
}
/**
* @return the value set by {@link #highlightQuery(QueryBuilder)}
*/
public QueryBuilder highlightQuery() {
return this.highlightQuery;
}
/**
* The order of fragments per field. By default, ordered by the order in the
* highlighted text. Can be {@code score}, which then it will be ordered
* by score of the fragments, or {@code none}.
*/
public HB order(String order) {
return order(Order.fromString(order));
}
/**
* By default, fragments of a field are ordered by the order in the highlighted text.
* If set to {@link Order#SCORE}, this changes order to score of the fragments.
*/
@SuppressWarnings("unchecked")
public HB order(Order scoreOrdered) {
this.order = scoreOrdered;
return (HB) this;
}
/**
* @return the value set by {@link #order(Order)}
*/
public Order order() {
return this.order;
}
/**
* Set this to true when using the highlighterType {@code fvh}
* and you want to provide highlighting on filter clauses in your
* query. Default is {@code false}.
*/
@SuppressWarnings("unchecked")
public HB highlightFilter(Boolean highlightFilter) {
this.highlightFilter = highlightFilter;
return (HB) this;
}
/**
* @return the value set by {@link #highlightFilter(Boolean)}
*/
public Boolean highlightFilter() {
return this.highlightFilter;
}
/**
* When using the highlighterType {@code fvh} this setting
* controls which scanner to use for fragment boundaries, and defaults to "simple".
*/
@SuppressWarnings("unchecked")
public HB boundaryScannerType(String boundaryScannerType) {
this.boundaryScannerType = BoundaryScannerType.fromString(boundaryScannerType);
return (HB) this;
}
/**
* When using the highlighterType {@code fvh} this setting
* controls which scanner to use for fragment boundaries, and defaults to "simple".
*/
@SuppressWarnings("unchecked")
public HB boundaryScannerType(BoundaryScannerType boundaryScannerType) {
this.boundaryScannerType = boundaryScannerType;
return (HB) this;
}
/**
* @return the value set by {@link #boundaryScannerType(String)}
*/
public BoundaryScannerType boundaryScannerType() {
return this.boundaryScannerType;
}
/**
* When using the highlighterType {@code fvh} this setting
* controls how far to look for boundary characters, and defaults to 20.
*/
@SuppressWarnings("unchecked")
public HB boundaryMaxScan(Integer boundaryMaxScan) {
this.boundaryMaxScan = boundaryMaxScan;
return (HB) this;
}
/**
* @return the value set by {@link #boundaryMaxScan(Integer)}
*/
public Integer boundaryMaxScan() {
return this.boundaryMaxScan;
}
/**
* When using the highlighterType {@code fvh} this setting
* defines what constitutes a boundary for highlighting. It’s a single string with
* each boundary character defined in it. It defaults to .,!? \t\n
*/
@SuppressWarnings("unchecked")
public HB boundaryChars(char[] boundaryChars) {
this.boundaryChars = boundaryChars;
return (HB) this;
}
/**
* @return the value set by {@link #boundaryChars(char[])}
*/
public char[] boundaryChars() {
return this.boundaryChars;
}
/**
* When using the highlighterType {@code fvh} and boundaryScannerType {@code break_iterator}, this setting
* controls the locale to use by the BreakIterator, defaults to "root".
*/
@SuppressWarnings("unchecked")
public HB boundaryScannerLocale(String boundaryScannerLocale) {
if (boundaryScannerLocale != null) {
this.boundaryScannerLocale = Locale.forLanguageTag(boundaryScannerLocale);
}
return (HB) this;
}
/**
* Allows to set custom options for custom highlighters.
*/
@SuppressWarnings("unchecked")
public HB options(Map<String, Object> options) {
this.options = options;
return (HB) this;
}
/**
* @return the value set by {@link #options(Map)}
*/
public Map<String, Object> options() {
return this.options;
}
/**
* Set to true to cause a field to be highlighted only if a query matches that field.
* Default is false meaning that terms are highlighted on all requested fields regardless
* if the query matches specifically on them.
*/
@SuppressWarnings("unchecked")
public HB requireFieldMatch(Boolean requireFieldMatch) {
this.requireFieldMatch = requireFieldMatch;
return (HB) this;
}
/**
* @return the value set by {@link #requireFieldMatch(Boolean)}
*/
public Boolean requireFieldMatch() {
return this.requireFieldMatch;
}
/**
* Sets the size of the fragment to return from the beginning of the field if there are no matches to
* highlight and the field doesn't also define noMatchSize.
* @param noMatchSize integer to set or null to leave out of request. default is null.
* @return this for chaining
*/
@SuppressWarnings("unchecked")
public HB noMatchSize(Integer noMatchSize) {
this.noMatchSize = noMatchSize;
return (HB) this;
}
/**
* @return the value set by {@link #noMatchSize(Integer)}
*/
public Integer noMatchSize() {
return this.noMatchSize;
}
/**
* Sets the maximum number of phrases the fvh will consider if the field doesn't also define phraseLimit.
* @param phraseLimit maximum number of phrases the fvh will consider
* @return this for chaining
*/
@SuppressWarnings("unchecked")
public HB phraseLimit(Integer phraseLimit) {
this.phraseLimit = phraseLimit;
return (HB) this;
}
/**
* @return the value set by {@link #phraseLimit(Integer)}
*/
public Integer phraseLimit() {
return this.phraseLimit;
}
/**
* "maxAnalyzedOffset" might be non-negative int, null (unknown), or a negative int (defaulting to index analyzed offset).
*/
@SuppressWarnings("unchecked")
public HB maxAnalyzedOffset(Integer maxAnalyzedOffset) {
if (maxAnalyzedOffset != null && (maxAnalyzedOffset < -1 || maxAnalyzedOffset == 0)) {
throw new IllegalArgumentException("[" + MAX_ANALYZED_OFFSET_FIELD + "] must be a positive integer, or -1");
}
this.maxAnalyzedOffset = maxAnalyzedOffset;
return (HB) this;
}
/**
* @return the value set by {@link #maxAnalyzedOffset(Integer)}
*/
public Integer maxAnalyzedOffset() {
return this.maxAnalyzedOffset;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
innerXContent(builder);
builder.endObject();
return builder;
}
protected abstract void innerXContent(XContentBuilder builder) throws IOException;
void commonOptionsToXContent(XContentBuilder builder) throws IOException {
if (preTags != null) {
builder.array(PRE_TAGS_FIELD.getPreferredName(), preTags);
}
if (postTags != null) {
builder.array(POST_TAGS_FIELD.getPreferredName(), postTags);
}
if (fragmentSize != null) {
builder.field(FRAGMENT_SIZE_FIELD.getPreferredName(), fragmentSize);
}
if (numOfFragments != null) {
builder.field(NUMBER_OF_FRAGMENTS_FIELD.getPreferredName(), numOfFragments);
}
if (encoder != null) {
builder.field(ENCODER_FIELD.getPreferredName(), encoder);
}
if (highlighterType != null) {
builder.field(TYPE_FIELD.getPreferredName(), highlighterType);
}
if (fragmenter != null) {
builder.field(FRAGMENTER_FIELD.getPreferredName(), fragmenter);
}
if (highlightQuery != null) {
builder.field(HIGHLIGHT_QUERY_FIELD.getPreferredName(), highlightQuery);
}
if (order != null) {
builder.field(ORDER_FIELD.getPreferredName(), order.toString());
}
if (highlightFilter != null) {
builder.field(HIGHLIGHT_FILTER_FIELD.getPreferredName(), highlightFilter);
}
if (boundaryScannerType != null) {
builder.field(BOUNDARY_SCANNER_FIELD.getPreferredName(), boundaryScannerType.name());
}
if (boundaryMaxScan != null) {
builder.field(BOUNDARY_MAX_SCAN_FIELD.getPreferredName(), boundaryMaxScan);
}
if (boundaryChars != null) {
builder.field(BOUNDARY_CHARS_FIELD.getPreferredName(), new String(boundaryChars));
}
if (boundaryScannerLocale != null) {
builder.field(BOUNDARY_SCANNER_LOCALE_FIELD.getPreferredName(), boundaryScannerLocale.toLanguageTag());
}
if (options != null && options.size() > 0) {
builder.field(OPTIONS_FIELD.getPreferredName(), options);
}
if (requireFieldMatch != null) {
builder.field(REQUIRE_FIELD_MATCH_FIELD.getPreferredName(), requireFieldMatch);
}
if (noMatchSize != null) {
builder.field(NO_MATCH_SIZE_FIELD.getPreferredName(), noMatchSize);
}
if (phraseLimit != null) {
builder.field(PHRASE_LIMIT_FIELD.getPreferredName(), phraseLimit);
}
if (maxAnalyzedOffset != null) {
builder.field(MAX_ANALYZED_OFFSET_FIELD.getPreferredName(), maxAnalyzedOffset);
}
}
static <HB extends AbstractHighlighterBuilder<HB>> BiFunction<XContentParser, HB, HB> setupParser(ObjectParser<HB, Void> parser) {
parser.declareStringArray(fromList(String.class, HB::preTags), PRE_TAGS_FIELD);
parser.declareStringArray(fromList(String.class, HB::postTags), POST_TAGS_FIELD);
parser.declareString(HB::order, ORDER_FIELD);
parser.declareBoolean(HB::highlightFilter, HIGHLIGHT_FILTER_FIELD);
parser.declareInt(HB::fragmentSize, FRAGMENT_SIZE_FIELD);
parser.declareInt(HB::numOfFragments, NUMBER_OF_FRAGMENTS_FIELD);
parser.declareString(HB::encoder, ENCODER_FIELD);
parser.declareString(HB::tagsSchema, TAGS_SCHEMA_FIELD);
parser.declareBoolean(HB::requireFieldMatch, REQUIRE_FIELD_MATCH_FIELD);
parser.declareString(HB::boundaryScannerType, BOUNDARY_SCANNER_FIELD);
parser.declareInt(HB::boundaryMaxScan, BOUNDARY_MAX_SCAN_FIELD);
parser.declareString((HB hb, String bc) -> hb.boundaryChars(bc.toCharArray()), BOUNDARY_CHARS_FIELD);
parser.declareString(HB::boundaryScannerLocale, BOUNDARY_SCANNER_LOCALE_FIELD);
parser.declareString(HB::highlighterType, TYPE_FIELD);
parser.declareString(HB::fragmenter, FRAGMENTER_FIELD);
parser.declareInt(HB::noMatchSize, NO_MATCH_SIZE_FIELD);
parser.declareBoolean((builder, value) -> {}, FORCE_SOURCE_FIELD); // force_source is ignored
parser.declareInt(HB::phraseLimit, PHRASE_LIMIT_FIELD);
parser.declareInt(HB::maxAnalyzedOffset, MAX_ANALYZED_OFFSET_FIELD);
parser.declareObject(HB::options, (XContentParser p, Void c) -> {
try {
return p.map();
} catch (IOException e) {
throw new RuntimeException("Error parsing options", e);
}
}, OPTIONS_FIELD);
parser.declareObject(HB::highlightQuery, (XContentParser p, Void c) -> {
try {
return parseTopLevelQuery(p);
} catch (IOException e) {
throw new RuntimeException("Error parsing query", e);
}
}, HIGHLIGHT_QUERY_FIELD);
return (XContentParser p, HB hb) -> {
try {
parser.parse(p, hb, null);
if (hb.preTags() != null && hb.postTags() == null) {
throw new ParsingException(p.getTokenLocation(), "pre_tags are set but post_tags are not set");
}
if (hb.preTags() != null && hb.postTags() != null && (hb.preTags().length == 0 || hb.postTags().length == 0)) {
throw new ParsingException(p.getTokenLocation(), "pre_tags or post_tags must not be empty");
}
} catch (IOException e) {
throw new RuntimeException(e);
}
return hb;
};
}
@Override
public final int hashCode() {
return Objects.hash(
getClass(),
Arrays.hashCode(preTags),
Arrays.hashCode(postTags),
fragmentSize,
numOfFragments,
encoder,
highlighterType,
fragmenter,
highlightQuery,
order,
highlightFilter,
boundaryScannerType,
boundaryMaxScan,
Arrays.hashCode(boundaryChars),
boundaryScannerLocale,
noMatchSize,
phraseLimit,
options,
requireFieldMatch,
maxAnalyzedOffset,
doHashCode()
);
}
/**
* fields only present in subclass should contribute to hashCode in the implementation
*/
protected abstract int doHashCode();
@Override
public final boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
@SuppressWarnings("unchecked")
HB other = (HB) obj;
return Arrays.equals(preTags, other.preTags)
&& Arrays.equals(postTags, other.postTags)
&& Objects.equals(fragmentSize, other.fragmentSize)
&& Objects.equals(numOfFragments, other.numOfFragments)
&& Objects.equals(encoder, other.encoder)
&& Objects.equals(highlighterType, other.highlighterType)
&& Objects.equals(fragmenter, other.fragmenter)
&& Objects.equals(highlightQuery, other.highlightQuery)
&& Objects.equals(order, other.order)
&& Objects.equals(highlightFilter, other.highlightFilter)
&& Objects.equals(boundaryScannerType, other.boundaryScannerType)
&& Objects.equals(boundaryMaxScan, other.boundaryMaxScan)
&& Arrays.equals(boundaryChars, other.boundaryChars)
&& Objects.equals(boundaryScannerLocale, other.boundaryScannerLocale)
&& Objects.equals(noMatchSize, other.noMatchSize)
&& Objects.equals(phraseLimit, other.phraseLimit)
&& Objects.equals(options, other.options)
&& Objects.equals(requireFieldMatch, other.requireFieldMatch)
&& Objects.equals(maxAnalyzedOffset, other.maxAnalyzedOffset)
&& doEquals(other);
}
/**
* fields only present in subclass should be checked for equality in the implementation
*/
protected abstract boolean doEquals(HB other);
@Override
public String toString() {
return Strings.toString(this, true, true);
}
}
| AbstractHighlighterBuilder |
java | apache__camel | components/camel-fhir/camel-fhir-api/src/main/java/org/apache/camel/component/fhir/api/FhirHistory.java | {
"start": 1250,
"end": 7376
} | class ____ {
private final IGenericClient client;
public FhirHistory(IGenericClient client) {
this.client = client;
}
/**
* Perform the operation across all versions of all resources of all types on the server
*
* @param returnType Request that the method return a Bundle resource (such as
* <code>ca.uhn.fhir.model.dstu2.resource.Bundle</code>). Use this method if you are
* accessing a DSTU2+ server.
* @param count Request that the server return only up to <code>theCount</code> number of resources, may
* be NULL
* @param cutoff Request that the server return only resource versions that were created at or after the
* given time (inclusive), may be NULL
* @param iCutoff Request that the server return only resource versions that were created at or after the
* given time (inclusive), may be NULL
* @param <T> extends {@link IBaseBundle}
* @param extraParameters see {@link ExtraParameters} for a full list of parameters that can be passed, may be NULL
* @return the {@link IBaseBundle}
*/
public <T extends IBaseBundle> T onServer(
Class<T> returnType, Integer count, Date cutoff, IPrimitiveType<Date> iCutoff,
Map<ExtraParameters, Object> extraParameters) {
IHistoryTyped<T> tiHistoryTyped = client.history().onServer().returnBundle(returnType);
processOptionalParams(count, cutoff, iCutoff, tiHistoryTyped);
ExtraParameters.process(extraParameters, tiHistoryTyped);
return tiHistoryTyped.execute();
}
/**
* Perform the operation across all versions of all resources of the given type on the server
*
* @param resourceType The resource type to search for
* @param returnType Request that the method return a Bundle resource (such as
* <code>ca.uhn.fhir.model.dstu2.resource.Bundle</code>). Use this method if you are
* accessing a DSTU2+ server.
* @param count Request that the server return only up to <code>theCount</code> number of resources, may
* be NULL
* @param cutoff Request that the server return only resource versions that were created at or after the
* given time (inclusive), may be NULL
* @param iCutoff Request that the server return only resource versions that were created at or after the
* given time (inclusive), may be NULL
* @param <T> extends {@link IBaseBundle}
* @param extraParameters see {@link ExtraParameters} for a full list of parameters that can be passed, may be NULL
* @return the {@link IBaseBundle}
*/
public <T extends IBaseBundle> T onType(
Class<IBaseResource> resourceType, Class<T> returnType, Integer count, Date cutoff,
IPrimitiveType<Date> iCutoff, Map<ExtraParameters, Object> extraParameters) {
IHistoryTyped<T> tiHistoryTyped = client.history().onType(resourceType).andReturnBundle(returnType);
processOptionalParams(count, cutoff, iCutoff, tiHistoryTyped);
ExtraParameters.process(extraParameters, tiHistoryTyped);
return tiHistoryTyped.execute();
}
/**
* Perform the operation across all versions of a specific resource (by ID and type) on the server. Note that
* <code>theId</code> must be populated with both a resource type and a resource ID at a minimum.
*
* @param id the {@link IIdType} which must be populated with both a resource type and a
* resource ID at
* @param returnType Request that the method return a Bundle resource (such as
* <code>ca.uhn.fhir.model.dstu2.resource.Bundle</code>). Use this method if you
* are accessing a DSTU2+ server.
* @param count Request that the server return only up to <code>theCount</code> number of
* resources, may be NULL
* @param cutoff Request that the server return only resource versions that were created at or
* after the given time (inclusive), may be NULL
* @param iCutoff Request that the server return only resource versions that were created at or
* after the given time (inclusive), may be NULL
* @param <T> extends {@link IBaseBundle}
* @param extraParameters see {@link ExtraParameters} for a full list of parameters that can be passed,
* may be NULL
* @throws IllegalArgumentException If <code>id</code> does not contain at least a resource type and ID
* @return the {@link IBaseBundle}
*/
public <T extends IBaseBundle> T onInstance(
IIdType id, Class<T> returnType, Integer count, Date cutoff, IPrimitiveType<Date> iCutoff,
Map<ExtraParameters, Object> extraParameters) {
IHistoryTyped<T> tiHistoryTyped = client.history().onInstance(id).andReturnBundle(returnType);
processOptionalParams(count, cutoff, iCutoff, tiHistoryTyped);
ExtraParameters.process(extraParameters, tiHistoryTyped);
return tiHistoryTyped.execute();
}
private <T extends IBaseBundle> void processOptionalParams(
Integer count, Date theCutoff, IPrimitiveType<Date> cutoff, IHistoryTyped<T> tiHistoryTyped) {
if (count != null) {
tiHistoryTyped.count(count);
}
if (theCutoff != null) {
tiHistoryTyped.since(theCutoff);
}
if (cutoff != null) {
tiHistoryTyped.since(cutoff);
}
}
}
| FhirHistory |
java | junit-team__junit5 | junit-platform-engine/src/main/java/org/junit/platform/engine/support/hierarchical/Node.java | {
"start": 9246,
"end": 10753
} | interface ____ {
/**
* Submit a dynamic test descriptor for immediate execution.
*
* @param testDescriptor the test descriptor to be executed; never
* {@code null}
*/
void execute(TestDescriptor testDescriptor);
/**
* Submit a dynamic test descriptor for immediate execution with a
* custom, potentially no-op, execution listener.
*
* @param testDescriptor the test descriptor to be executed; never
* {@code null}
* @param executionListener the executionListener to be notified; never
* {@code null}
* @return a future to cancel or wait for the execution
* @since 1.7
* @see EngineExecutionListener#NOOP
*/
@API(status = STABLE, since = "1.10")
Future<?> execute(TestDescriptor testDescriptor, EngineExecutionListener executionListener);
/**
* Block until all dynamic test descriptors submitted to this executor
* are finished.
*
* <p>This method is useful if the node needs to perform actions in its
* {@link #execute(EngineExecutionContext, DynamicTestExecutor)} method
* after all its dynamic children have finished.
*
* @throws InterruptedException if interrupted while waiting
*/
void awaitFinished() throws InterruptedException;
}
/**
* Supported execution modes for parallel execution.
*
* @since 1.3
* @see #SAME_THREAD
* @see #CONCURRENT
* @see Node#getExecutionMode()
*/
@API(status = STABLE, since = "1.10", consumers = "org.junit.platform.engine.support.hierarchical")
| DynamicTestExecutor |
java | micronaut-projects__micronaut-core | inject/src/main/java/io/micronaut/context/MessageSourceUtils.java | {
"start": 854,
"end": 1440
} | class ____ {
/**
* Returns a Map whose keys are the index of the varargs.
* E.g. for "Sergio", "John" the map ["0" => "Sergio", "1" => "John"] is returned
* @param args variables
* @return The variables map.
*/
@NonNull
public static Map<String, Object> variables(@NonNull Object... args) {
Map<String, Object> variables = new HashMap<>();
int count = 0;
for (Object value : args) {
variables.put(String.valueOf(count), value);
count++;
}
return variables;
}
}
| MessageSourceUtils |
java | spring-projects__spring-framework | spring-orm/src/test/java/org/springframework/orm/jpa/support/PersistenceContextTransactionTests.java | {
"start": 6862,
"end": 7361
} | class ____ {
@PersistenceContext
EntityManager sharedEntityManager;
@PersistenceContext(type = PersistenceContextType.EXTENDED)
EntityManager extendedEntityManager;
@PersistenceContext(synchronization = SynchronizationType.UNSYNCHRONIZED)
EntityManager sharedEntityManagerUnsynchronized;
@PersistenceContext(type = PersistenceContextType.EXTENDED, synchronization = SynchronizationType.UNSYNCHRONIZED)
EntityManager extendedEntityManagerUnsynchronized;
}
}
| EntityManagerHoldingBean |
java | micronaut-projects__micronaut-core | core-processor/src/main/java/io/micronaut/inject/writer/AbstractBeanDefinitionBuilder.java | {
"start": 3244,
"end": 34553
} | class ____ implements BeanElementBuilder {
private static final Map<String, AtomicInteger> BEAN_COUNTER = new HashMap<>(15);
private static final Predicate<Set<ElementModifier>> PUBLIC_FILTER = (
elementModifiers -> elementModifiers.contains(ElementModifier.PUBLIC));
private static final Predicate<Set<ElementModifier>> NON_PUBLIC_FILTER = (
elementModifiers -> !elementModifiers.contains(ElementModifier.PUBLIC));
private static final Comparator<MemberElement> SORTER = (o1, o2) -> {
final ClassElement d1 = o1.getDeclaringType();
final ClassElement d2 = o2.getDeclaringType();
final String o1Type = d1.getName();
final String o2Type = d2.getName();
if (o1Type.equals(o2Type)) {
return 0;
} else {
if (d1.isAssignable(d2)) {
return 1;
} else {
return -1;
}
}
};
protected final VisitorContext visitorContext;
protected final ElementAnnotationMetadataFactory elementAnnotationMetadataFactory;
private final Element originatingElement;
private final ClassElement originatingType;
private final ClassElement beanType;
private final int identifier;
private final MutableAnnotationMetadata annotationMetadata;
private final List<BeanMethodElement> executableMethods = new ArrayList<>(5);
private final List<BeanMethodElement> interceptedMethods = new ArrayList<>(5);
private final List<AbstractBeanDefinitionBuilder> childBeans = new ArrayList<>(5);
private final List<BeanMethodElement> injectedMethods = new ArrayList<>(5);
private final List<BeanMethodElement> preDestroyMethods = new ArrayList<>(5);
private final List<BeanMethodElement> postConstructMethods = new ArrayList<>(5);
private final List<BeanFieldElement> injectedFields = new ArrayList<>(5);
private BeanConstructorElement constructorElement;
private Map<String, Map<String, ClassElement>> typeArguments;
private ClassElement[] exposedTypes;
private boolean intercepted;
/**
* Default constructor.
*
* @param originatingElement The originating element
* @param beanType The bean type
* @param visitorContext the visitor context
* @param elementAnnotationMetadataFactory The element annotation metadata factory
*/
protected AbstractBeanDefinitionBuilder(
Element originatingElement,
ClassElement beanType,
VisitorContext visitorContext,
ElementAnnotationMetadataFactory elementAnnotationMetadataFactory) {
this.originatingElement = originatingElement;
this.elementAnnotationMetadataFactory = elementAnnotationMetadataFactory;
if (originatingElement instanceof MethodElement element) {
this.originatingType = element.getDeclaringType();
} else if (originatingElement instanceof ClassElement element) {
this.originatingType = element;
} else {
throw new IllegalArgumentException("Invalid originating element: " + originatingElement);
}
this.beanType = beanType;
this.visitorContext = visitorContext;
this.identifier = BEAN_COUNTER.computeIfAbsent(beanType.getName(), (s) -> new AtomicInteger(0))
.getAndIncrement();
this.annotationMetadata = MutableAnnotationMetadata.of(beanType.getAnnotationMetadata());
this.annotationMetadata.addDeclaredAnnotation(Bean.class.getName(), Collections.emptyMap());
this.constructorElement = initConstructor(beanType);
}
@Override
public BeanElementBuilder intercept(AnnotationValue<?>... annotationValue) {
for (AnnotationValue<?> value : annotationValue) {
annotate(value);
}
this.intercepted = true;
return this;
}
@Internal
public static void writeBeanDefinitionBuilders(ClassWriterOutputVisitor classWriterOutputVisitor,
List<AbstractBeanDefinitionBuilder> beanDefinitionBuilders)
throws IOException {
for (AbstractBeanDefinitionBuilder beanDefinitionBuilder : beanDefinitionBuilders) {
writeBeanDefinition(classWriterOutputVisitor, beanDefinitionBuilder);
final List<AbstractBeanDefinitionBuilder> childBeans = beanDefinitionBuilder.getChildBeans();
for (AbstractBeanDefinitionBuilder childBean : childBeans) {
writeBeanDefinition(classWriterOutputVisitor, childBean);
}
}
}
private static void writeBeanDefinition(ClassWriterOutputVisitor classWriterOutputVisitor, AbstractBeanDefinitionBuilder beanDefinitionBuilder)
throws IOException {
final ClassOutputWriter beanDefinitionWriter = beanDefinitionBuilder.build();
if (beanDefinitionWriter != null) {
beanDefinitionWriter.accept(classWriterOutputVisitor);
}
}
private InternalBeanConstructorElement initConstructor(ClassElement beanType) {
return beanType.getPrimaryConstructor().map(m -> new InternalBeanConstructorElement(
m,
!m.isPublic(),
initBeanParameters(m.getParameters())
)).orElse(null);
}
/**
* Is the bean to be built intercepted?
*
* @return True if it is
*/
protected boolean isIntercepted() {
return this.intercepted || !this.interceptedMethods.isEmpty();
}
@Override
public BeanElementBuilder inject() {
processInjectedMethods();
processInjectedFields();
return this;
}
/**
* Any child bean definitions.
*
* @return The child beans
*/
public List<AbstractBeanDefinitionBuilder> getChildBeans() {
return childBeans;
}
private void processInjectedFields() {
final ElementQuery<FieldElement> baseQuery = ElementQuery.ALL_FIELDS
.onlyInstance()
.onlyInjected();
Set<FieldElement> accessibleFields = new HashSet<>();
this.beanType.getEnclosedElements(baseQuery.modifiers(PUBLIC_FILTER))
.forEach(fieldElement -> {
accessibleFields.add(fieldElement);
new InternalBeanElementField(fieldElement, false).inject();
});
this.beanType.getEnclosedElements(baseQuery.modifiers(NON_PUBLIC_FILTER))
.forEach(fieldElement -> {
if (!accessibleFields.contains(fieldElement)) {
new InternalBeanElementField(fieldElement, true).inject();
}
});
}
private void processInjectedMethods() {
final ElementQuery<MethodElement> baseQuery = ElementQuery.ALL_METHODS
.onlyInstance()
.onlyConcrete()
.onlyInjected();
Set<MethodElement> accessibleMethods = new HashSet<>();
this.beanType.getEnclosedElements(baseQuery.modifiers(PUBLIC_FILTER))
.forEach(methodElement -> {
accessibleMethods.add(methodElement);
handleMethod(methodElement, false);
});
this.beanType.getEnclosedElements(baseQuery.modifiers(NON_PUBLIC_FILTER))
.forEach(methodElement -> {
if (!accessibleMethods.contains(methodElement)) {
handleMethod(methodElement, true);
}
});
}
private void handleMethod(MethodElement methodElement, boolean requiresReflection) {
boolean lifecycleMethod = false;
if (methodElement.getAnnotationMetadata().hasDeclaredAnnotation(AnnotationUtil.PRE_DESTROY)) {
new InternalBeanElementMethod(methodElement, requiresReflection)
.preDestroy();
lifecycleMethod = true;
}
if (methodElement.getAnnotationMetadata().hasDeclaredAnnotation(AnnotationUtil.POST_CONSTRUCT)) {
new InternalBeanElementMethod(methodElement, requiresReflection)
.postConstruct();
lifecycleMethod = true;
}
if (!lifecycleMethod) {
new InternalBeanElementMethod(methodElement, requiresReflection)
.inject();
}
}
@NonNull
@Override
public Element getOriginatingElement() {
return originatingElement;
}
@NonNull
@Override
public ClassElement getBeanType() {
return beanType;
}
/**
* Initialize the bean parameters.
*
* @param constructorParameters The parameters to use.
* @return The initialized parameters
*/
protected final BeanParameterElement[] initBeanParameters(@NonNull ParameterElement[] constructorParameters) {
if (ArrayUtils.isNotEmpty(constructorParameters)) {
return Arrays.stream(constructorParameters)
.map(InternalBeanParameter::new)
.toArray(BeanParameterElement[]::new);
} else {
return ZERO_BEAN_PARAMETER_ELEMENTS;
}
}
@NonNull
@Override
public AnnotationMetadata getAnnotationMetadata() {
return this.annotationMetadata;
}
@NonNull
@Override
public BeanElementBuilder createWith(@NonNull MethodElement element) {
// TODO: handle factories, static methods etc.
//noinspection ConstantConditions
if (element != null) {
constructorElement = new InternalBeanConstructorElement(
element,
!element.isPublic(),
initBeanParameters(element.getParameters())
);
}
return this;
}
@NonNull
@Override
public BeanElementBuilder typed(ClassElement... types) {
if (ArrayUtils.isNotEmpty(types)) {
this.exposedTypes = types;
}
return this;
}
@NonNull
@Override
public BeanElementBuilder typeArguments(@NonNull ClassElement... types) {
final Map<String, ClassElement> typeArguments = this.beanType.getTypeArguments();
Map<String, ClassElement> resolvedTypes = resolveTypeArguments(typeArguments, types);
if (resolvedTypes != null) {
if (this.typeArguments == null) {
this.typeArguments = new LinkedHashMap<>();
}
this.typeArguments.put(beanType.getName(), typeArguments);
}
return this;
}
@NonNull
@Override
public BeanElementBuilder typeArgumentsForType(ClassElement type, @NonNull ClassElement... types) {
if (type != null) {
final Map<String, ClassElement> typeArguments = type.getTypeArguments();
Map<String, ClassElement> resolvedTypes = resolveTypeArguments(typeArguments, types);
if (resolvedTypes != null) {
if (this.typeArguments == null) {
this.typeArguments = new LinkedHashMap<>();
}
this.typeArguments.put(type.getName(), resolvedTypes);
}
}
return this;
}
@Nullable
private Map<String, ClassElement> resolveTypeArguments(Map<String, ClassElement> typeArguments, ClassElement... types) {
Map<String, ClassElement> resolvedTypes = null;
if (typeArguments.size() == types.length) {
resolvedTypes = CollectionUtils.newLinkedHashMap(typeArguments.size());
final Iterator<String> i = typeArguments.keySet().iterator();
for (ClassElement type : types) {
final String variable = i.next();
resolvedTypes.put(variable, type);
}
}
return resolvedTypes;
}
@Override
public BeanElementBuilder withConstructor(Consumer<BeanConstructorElement> constructorElement) {
if (constructorElement != null && this.constructorElement != null) {
constructorElement.accept(this.constructorElement);
}
return this;
}
@NonNull
@Override
public BeanElementBuilder withMethods(
@NonNull ElementQuery<MethodElement> methods,
@NonNull Consumer<BeanMethodElement> beanMethods) {
//noinspection ConstantConditions
if (methods != null && beanMethods != null) {
final ElementQuery<MethodElement> baseQuery = methods.onlyInstance();
this.beanType.getEnclosedElements(baseQuery.modifiers(m -> m.contains(ElementModifier.PUBLIC)))
.forEach(methodElement ->
beanMethods.accept(new InternalBeanElementMethod(methodElement, false))
);
this.beanType.getEnclosedElements(baseQuery.modifiers(m -> !m.contains(ElementModifier.PUBLIC)))
.forEach(methodElement ->
beanMethods.accept(new InternalBeanElementMethod(methodElement, true))
);
}
return this;
}
@NonNull
@Override
public BeanElementBuilder withFields(@NonNull ElementQuery<FieldElement> fields, @NonNull Consumer<BeanFieldElement> beanFields) {
//noinspection ConstantConditions
if (fields != null && beanFields != null) {
this.beanType.getEnclosedElements(fields.onlyInstance().onlyAccessible(originatingType))
.forEach((fieldElement) ->
beanFields.accept(new InternalBeanElementField(fieldElement, false))
);
}
return this;
}
@NonNull
@Override
public BeanElementBuilder withParameters(Consumer<BeanParameterElement[]> parameters) {
if (parameters != null && this.constructorElement != null) {
parameters.accept(getParameters());
}
return this;
}
/**
* @return The bean creation parameters.
*/
@NonNull
protected BeanParameterElement[] getParameters() {
return constructorElement.getParameters();
}
@NonNull
@Override
public String getName() {
return beanType.getName();
}
@Override
public boolean isProtected() {
return beanType.isProtected();
}
@Override
public boolean isPublic() {
return beanType.isPublic();
}
@NonNull
@Override
public Object getNativeType() {
return beanType;
}
@NonNull
@Override
public <T extends Annotation> BeanElementBuilder annotate(@NonNull String annotationType, @NonNull Consumer<AnnotationValueBuilder<T>> consumer) {
annotate(this.annotationMetadata, annotationType, consumer);
return this;
}
@Override
public <T extends Annotation> Element annotate(AnnotationValue<T> annotationValue) {
annotate(this.annotationMetadata, annotationValue);
return this;
}
@Override
public BeanElementBuilder removeAnnotation(@NonNull String annotationType) {
removeAnnotation(this.annotationMetadata, annotationType);
return this;
}
@Override
public <T extends Annotation> BeanElementBuilder removeAnnotationIf(@NonNull Predicate<AnnotationValue<T>> predicate) {
removeAnnotationIf(this.annotationMetadata, predicate);
return this;
}
@Override
public BeanElementBuilder removeStereotype(@NonNull String annotationType) {
removeStereotype(this.annotationMetadata, annotationType);
return this;
}
private BeanElementBuilder addChildBean(@NonNull MethodElement producerMethod, Consumer<BeanElementBuilder> childBeanBuilder) {
final AbstractBeanDefinitionBuilder childBuilder = createChildBean(producerMethod);
this.childBeans.add(childBuilder);
if (childBeanBuilder != null) {
childBeanBuilder.accept(childBuilder);
}
return this;
}
private BeanElementBuilder addChildBean(@NonNull FieldElement producerMethod, Consumer<BeanElementBuilder> childBeanBuilder) {
final AbstractBeanDefinitionBuilder childBuilder = createChildBean(producerMethod);
this.childBeans.add(childBuilder);
if (childBeanBuilder != null) {
childBeanBuilder.accept(childBuilder);
}
return this;
}
@Override
public <E extends MemberElement> BeanElementBuilder produceBeans(ElementQuery<E> methodsOrFields,
Consumer<BeanElementBuilder> childBeanBuilder) {
methodsOrFields = methodsOrFields
.onlyConcrete()
.modifiers(modifiers -> modifiers.contains(ElementModifier.PUBLIC));
final List<E> enclosedElements = this.beanType.getEnclosedElements(methodsOrFields);
for (E enclosedElement : enclosedElements) {
if (enclosedElement instanceof FieldElement fe) {
final ClassElement type = fe.getGenericField().getType();
if (type.isPublic() && !type.isPrimitive()) {
addChildBean(fe, childBeanBuilder);
}
}
if (enclosedElement instanceof MethodElement me && !(enclosedElement instanceof ConstructorElement)) {
final ClassElement type = me.getGenericReturnType().getType();
if (type.isPublic() && !type.isPrimitive()) {
addChildBean(me, childBeanBuilder);
}
}
}
return this;
}
/**
* Creates a child bean for the given producer field.
*
* @param producerField The producer field
* @return The child bean builder
*/
protected abstract @NonNull AbstractBeanDefinitionBuilder createChildBean(FieldElement producerField);
/**
* Visit the intercepted methods of this type.
*
* @param consumer A consumer to handle the method
*/
protected void visitInterceptedMethods(BiConsumer<TypedElement, MethodElement> consumer) {
if (consumer != null) {
ClassElement beanClass = getBeanType();
if (CollectionUtils.isNotEmpty(interceptedMethods)) {
for (BeanMethodElement interceptedMethod : interceptedMethods) {
handleMethod(beanClass, interceptedMethod, consumer);
}
}
if (this.intercepted) {
beanClass.getEnclosedElements(
ElementQuery.ALL_METHODS
.onlyInstance()
.modifiers(mods -> !mods.contains(ElementModifier.FINAL) && mods.contains(ElementModifier.PUBLIC))
).forEach(method -> {
InternalBeanElementMethod ibem = new InternalBeanElementMethod(
method,
true
);
if (!interceptedMethods.contains(ibem)) {
handleMethod(beanClass, ibem, consumer);
}
});
}
}
}
@SuppressWarnings({"rawtypes", "unchecked"})
private void handleMethod(ClassElement beanClass, MethodElement method, BiConsumer<TypedElement, MethodElement> consumer) {
consumer.accept(
beanClass,
method.withAnnotationMetadata(new AnnotationMetadataHierarchy(getAnnotationMetadata(), method.getAnnotationMetadata()))
);
}
/**
* Creates a child bean for the given producer method.
*
* @param producerMethod The producer method
* @return The child bean builder
*/
protected abstract @NonNull AbstractBeanDefinitionBuilder createChildBean(MethodElement producerMethod);
/**
* Build the bean definition writer.
*
* @return The writer, possibly null if it wasn't possible to build it
*/
@SuppressWarnings({"ConstantConditions", "java:S2583"})
@Nullable
public BeanClassWriter build() {
BeanClassWriter beanWriter = buildBeanClassWriter();
if (beanWriter == null) {
return null;
} else {
BeanDefinitionVisitor parentVisitor = beanWriter.getBeanDefinitionVisitor();
AnnotationMetadata thisAnnotationMetadata = getAnnotationMetadata();
if (isIntercepted() && parentVisitor instanceof BeanDefinitionWriter beanDefinitionWriter) {
return new BeanClassWriter() {
@Override
public BeanDefinitionVisitor getBeanDefinitionVisitor() {
return parentVisitor;
}
@Override
public void accept(ClassWriterOutputVisitor classWriterOutputVisitor) throws IOException {
BeanDefinitionVisitor aopProxyWriter = AbstractBeanDefinitionBuilder.this.createAopWriter(beanDefinitionWriter, thisAnnotationMetadata);
if (configureBeanVisitor(aopProxyWriter)) {
return;
}
configureInjectionPoints(aopProxyWriter);
visitInterceptedMethods(
createAroundMethodVisitor(aopProxyWriter)
);
finalizeAndWriteBean(classWriterOutputVisitor, aopProxyWriter);
beanWriter.accept(classWriterOutputVisitor);
}
};
} else {
return beanWriter;
}
}
}
/**
* Creates the around method visitor.
*
* @param aopProxyWriter The AOP writer
* @return The visitor
*/
@NonNull
protected abstract BiConsumer<TypedElement, MethodElement> createAroundMethodVisitor(BeanDefinitionVisitor aopProxyWriter);
/**
* Creates the AOP writer.
*
* @param beanDefinitionWriter The bean definition writer
* @param annotationMetadata The annotation metadata
* @return The AOP writer
*/
@NonNull
protected abstract BeanDefinitionVisitor createAopWriter(BeanDefinitionWriter beanDefinitionWriter, AnnotationMetadata annotationMetadata);
@NonNull
private BeanClassWriter buildBeanClassWriter() {
final BeanDefinitionVisitor beanDefinitionWriter = createBeanDefinitionWriter();
return new BeanClassWriter() {
@Override
public BeanDefinitionVisitor getBeanDefinitionVisitor() {
return beanDefinitionWriter;
}
@Override
public void accept(ClassWriterOutputVisitor classWriterOutputVisitor) throws IOException {
if (configureBeanVisitor(beanDefinitionWriter)) {
return;
}
configureInjectionPoints(beanDefinitionWriter);
for (BeanMethodElement postConstructMethod : postConstructMethods) {
if (postConstructMethod.getDeclaringType().equals(beanType)) {
beanDefinitionWriter.visitPostConstructMethod(
beanType,
postConstructMethod,
postConstructMethod.isReflectionRequired(),
visitorContext
);
}
}
for (BeanMethodElement preDestroyMethod : preDestroyMethods) {
if (preDestroyMethod.getDeclaringType().equals(beanType)) {
beanDefinitionWriter.visitPreDestroyMethod(
beanType,
preDestroyMethod,
preDestroyMethod.isReflectionRequired(),
visitorContext
);
}
}
finalizeAndWriteBean(classWriterOutputVisitor, beanDefinitionWriter);
}
};
}
private void configureInjectionPoints(BeanDefinitionVisitor beanDefinitionWriter) {
Map<ClassElement, List<MemberElement>> sortedInjections = new LinkedHashMap<>();
List<MemberElement> allInjected = new ArrayList<>();
allInjected.addAll(injectedFields);
allInjected.addAll(injectedMethods);
allInjected.sort(SORTER);
for (MemberElement memberElement : allInjected) {
final List<MemberElement> list = sortedInjections
.computeIfAbsent(memberElement.getDeclaringType(),
classElement -> new ArrayList<>()
);
list.add(memberElement);
}
for (List<MemberElement> members : sortedInjections.values()) {
members.sort((o1, o2) -> {
if (o1 instanceof FieldElement && o2 instanceof MethodElement) {
return 1;
} else if (o1 instanceof MethodElement && o1 instanceof FieldElement) {
return -1;
}
return 0;
});
}
for (List<MemberElement> list : sortedInjections.values()) {
for (MemberElement memberElement : list) {
if (memberElement instanceof FieldElement) {
InternalBeanElementField ibf = (InternalBeanElementField) memberElement;
ibf.<InternalBeanElementField>with(element ->
visitField(beanDefinitionWriter, element, element)
);
} else {
InternalBeanElementMethod ibm = (InternalBeanElementMethod) memberElement;
ibm.<InternalBeanElementMethod>with(element ->
beanDefinitionWriter.visitMethodInjectionPoint(
ibm.getDeclaringType(),
ibm,
ibm.isReflectionRequired(),
visitorContext
)
);
}
}
}
for (BeanMethodElement executableMethod : executableMethods) {
beanDefinitionWriter.visitExecutableMethod(
beanType,
executableMethod,
visitorContext
);
if (executableMethod.getAnnotationMetadata().isTrue(Executable.class, Executable.MEMBER_PROCESS_ON_STARTUP)) {
beanDefinitionWriter.setRequiresMethodProcessing(true);
}
}
}
/**
* Finish the given bean and write it to the output.
*
* @param classWriterOutputVisitor The output
* @param beanDefinitionWriter The writer
* @throws IOException If an error occurred
*/
protected void finalizeAndWriteBean(ClassWriterOutputVisitor classWriterOutputVisitor, BeanDefinitionVisitor beanDefinitionWriter) throws IOException {
beanDefinitionWriter.visitBeanDefinitionEnd();
beanDefinitionWriter.accept(classWriterOutputVisitor);
}
/**
* Configure the bean visitor for this builder.
*
* @param beanDefinitionWriter The bean visitor
* @return True if an error occurred
*/
protected boolean configureBeanVisitor(BeanDefinitionVisitor beanDefinitionWriter) {
if (exposedTypes != null) {
final AnnotationClassValue<?>[] annotationClassValues =
Arrays.stream(exposedTypes).map(ce -> new AnnotationClassValue<>(ce.getName())).toArray(AnnotationClassValue[]::new);
annotate(Bean.class, builder -> builder.member("typed", annotationClassValues));
}
if (typeArguments != null) {
beanDefinitionWriter.visitTypeArguments(AbstractBeanDefinitionBuilder.this.typeArguments);
}
Element producingElement = getProducingElement();
if (producingElement instanceof ClassElement) {
if (constructorElement == null) {
constructorElement = initConstructor(beanType);
}
if (constructorElement == null) {
visitorContext.fail("Cannot create associated bean with no accessible primary constructor. Consider supply the constructor with createWith(..)", originatingElement);
return true;
} else {
beanDefinitionWriter.visitBeanDefinitionConstructor(
constructorElement,
!constructorElement.isPublic(),
visitorContext
);
}
}
return false;
}
/**
* @return Creates the bean definition writer.
*/
protected BeanDefinitionVisitor createBeanDefinitionWriter() {
return new BeanDefinitionWriter(
this,
OriginatingElements.of(originatingElement),
visitorContext,
identifier
);
}
private void visitField(BeanDefinitionVisitor beanDefinitionWriter,
BeanFieldElement injectedField,
InternalBeanElementField ibf) {
if (injectedField.hasAnnotation(Value.class) || injectedField.hasAnnotation(Property.class)) {
beanDefinitionWriter.visitFieldValue(
injectedField.getDeclaringType(),
injectedField,
ibf.isReflectionRequired(),
ibf.isDeclaredNullable() || !InjectionPoint.isInjectionRequired(injectedField)
);
} else {
beanDefinitionWriter.visitFieldInjectionPoint(
injectedField.getDeclaringType(),
ibf,
ibf.isReflectionRequired(),
visitorContext
);
}
}
/**
* Add an annotation to the given metadata.
*
* @param annotationMetadata The annotation metadata
* @param annotationType the annotation type
* @param consumer The builder
* @param <T> The annotation generic type
*/
protected abstract <T extends Annotation> void annotate(AnnotationMetadata annotationMetadata, String annotationType, Consumer<AnnotationValueBuilder<T>> consumer);
/**
* Add an annotation to the given metadata.
*
* @param annotationMetadata The annotation metadata
* @param annotationValue The value
* @param <T> The annotation generic type
* @since 3.3.0
*/
protected abstract <T extends Annotation> void annotate(@NonNull AnnotationMetadata annotationMetadata, @NonNull AnnotationValue<T> annotationValue);
/**
* Remove a stereotype from the given metadata.
*
* @param annotationMetadata The metadata
* @param annotationType The stereotype
*/
protected abstract void removeStereotype(AnnotationMetadata annotationMetadata, String annotationType);
/**
* Remove an annotation if it matches the given condition.
*
* @param annotationMetadata The metadata
* @param predicate The predicate
* @param <T> The annotation type
*/
protected abstract <T extends Annotation> void removeAnnotationIf(AnnotationMetadata annotationMetadata, Predicate<AnnotationValue<T>> predicate);
/**
* Remove an annotation for the given name.
*
* @param annotationMetadata The metadata
* @param annotationType The type
*/
protected abstract void removeAnnotation(AnnotationMetadata annotationMetadata, String annotationType);
/**
* Super | AbstractBeanDefinitionBuilder |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/throwable/ThrowableAssert_hasCause_Test.java | {
"start": 1126,
"end": 2576
} | class ____ extends ThrowableAssertBaseTest {
private final Throwable npe = new NullPointerException();
@Override
protected ThrowableAssert<Throwable> invoke_api_method() {
return assertions.hasCause(npe);
}
@Override
protected void verify_internal_effects() {
verify(throwables).assertHasCause(getInfo(assertions), getActual(assertions), npe);
}
@Test
void should_fail_if_actual_and_expected_cause_have_different_types() {
// GIVEN
Throwable actual = new IllegalArgumentException(new IllegalStateException());
// WHEN/THEN
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> assertThat(actual).hasCause(new NullPointerException()))
.withMessage(format("%n" +
"Expecting a cause with type:%n" +
" \"java.lang.NullPointerException\"%n" +
"but type was:%n" +
" \"java.lang.IllegalStateException\".%n%n" +
"Throwable that failed the check:%n"
+ escapePercent(getStackTrace(actual))));
}
}
| ThrowableAssert_hasCause_Test |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/web/configurers/AuthorizeHttpRequestsConfigurerTests.java | {
"start": 54783,
"end": 55128
} | class ____ {
@Bean
SecurityFilterChain filterChain(HttpSecurity http) throws Exception {
// @formatter:off
return http
.httpBasic(withDefaults())
.authorizeHttpRequests((authorize) -> authorize
.anyRequest().denyAll()
)
.build();
// @formatter:on
}
}
@Configuration
@EnableWebSecurity
static | DenyAllConfig |
java | netty__netty | codec-http2/src/main/java/io/netty/handler/codec/http2/Http2Exception.java | {
"start": 11611,
"end": 12263
} | class ____ extends Http2Exception {
private static final long serialVersionUID = 602472544416984384L;
private final int streamId;
StreamException(int streamId, Http2Error error, String message) {
super(error, message, ShutdownHint.NO_SHUTDOWN);
this.streamId = streamId;
}
StreamException(int streamId, Http2Error error, String message, Throwable cause) {
super(error, message, cause, ShutdownHint.NO_SHUTDOWN);
this.streamId = streamId;
}
public int streamId() {
return streamId;
}
}
public static final | StreamException |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/CommitResponse.java | {
"start": 1191,
"end": 1416
} | class ____ {
/**
* Create a Commit Response.
* @return Commit Response.
*/
@Private
@Unstable
public static CommitResponse newInstance() {
return Records.newRecord(CommitResponse.class);
}
}
| CommitResponse |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/query/sqm/tree/domain/SqmCorrelatedEntityJoin.java | {
"start": 730,
"end": 4011
} | class ____<L,R> extends SqmEntityJoin<L,R> implements SqmCorrelatedSingularValuedJoin<L, R> {
private final SqmCorrelatedRootJoin<L> correlatedRootJoin;
private final SqmEntityJoin<L,R> correlationParent;
public SqmCorrelatedEntityJoin(SqmEntityJoin<L,R> correlationParent) {
super(
correlationParent.getNavigablePath(),
correlationParent.getReferencedPathSource(),
correlationParent.getExplicitAlias(),
SqmJoinType.INNER,
correlationParent.getRoot()
);
this.correlatedRootJoin = SqmCorrelatedRootJoin.create( correlationParent, this );
this.correlationParent = correlationParent;
}
public SqmCorrelatedEntityJoin(
EntityDomainType<R> joinedEntityDescriptor,
@Nullable String alias,
SqmJoinType joinType,
SqmRoot<L> sqmRoot,
SqmCorrelatedRootJoin<L> correlatedRootJoin,
SqmEntityJoin<L,R> correlationParent) {
super( correlationParent.getNavigablePath(), joinedEntityDescriptor, alias, joinType, sqmRoot );
this.correlatedRootJoin = correlatedRootJoin;
this.correlationParent = correlationParent;
}
@Override
public SqmCorrelatedEntityJoin<L,R> copy(SqmCopyContext context) {
final SqmCorrelatedEntityJoin<L,R> existing = context.getCopy( this );
if ( existing != null ) {
return existing;
}
final SqmCorrelatedEntityJoin<L,R> path = context.registerCopy(
this,
new SqmCorrelatedEntityJoin<>(
getReferencedPathSource(),
getExplicitAlias(),
getSqmJoinType(),
getRoot().copy( context ),
correlatedRootJoin.copy( context ),
correlationParent.copy( context )
)
);
copyTo( path, context );
return path;
}
@Override
public SqmRoot<?> findRoot() {
return getCorrelatedRoot();
}
@Override
public <X> X accept(SemanticQueryWalker<X> walker) {
return walker.visitCorrelatedEntityJoin(this);
}
@Override
public SqmEntityJoin<L,R> getCorrelationParent() {
return correlationParent;
}
@Override
public SqmPath<R> getWrappedPath() {
return correlationParent;
}
@Override
public boolean isCorrelated() {
return true;
}
@Override
public SqmRoot<L> getCorrelatedRoot() {
return correlatedRootJoin;
}
@Override
public SqmCorrelatedEntityJoin<L,R> createCorrelation() {
return new SqmCorrelatedEntityJoin<>( this );
}
@Override
public SqmCorrelatedEntityJoin<L,R> makeCopy(SqmCreationProcessingState creationProcessingState) {
final SqmPathRegistry pathRegistry = creationProcessingState.getPathRegistry();
return new SqmCorrelatedEntityJoin<>(
getReferencedPathSource(),
getExplicitAlias(),
getSqmJoinType(),
pathRegistry.resolveFromByPath( getRoot().getNavigablePath() ),
pathRegistry.resolveFromByPath( correlatedRootJoin.getNavigablePath() ),
pathRegistry.resolveFromByPath( correlationParent.getNavigablePath() )
);
}
@Override
public boolean deepEquals(SqmFrom<?, ?> other) {
return super.deepEquals( other )
&& other instanceof SqmCorrelatedEntityJoin<?, ?> that
&& correlationParent.equals( that.correlationParent );
}
@Override
public boolean isDeepCompatible(SqmFrom<?, ?> other) {
return super.isDeepCompatible( other )
&& other instanceof SqmCorrelatedEntityJoin<?, ?> that
&& correlationParent.isCompatible( that.correlationParent );
}
}
| SqmCorrelatedEntityJoin |
java | elastic__elasticsearch | test/framework/src/main/java/org/elasticsearch/cluster/routing/GlobalRoutingTableTestHelper.java | {
"start": 898,
"end": 4738
} | class ____ {
/**
* Construct a new {@link GlobalRoutingTable} based on all the projects and indices in {@code metadata}.
* Each index is passed to the {@code indexConsumer} along with a builder for that project's routing table
*/
public static GlobalRoutingTable buildRoutingTable(Metadata metadata, BiConsumer<RoutingTable.Builder, IndexMetadata> indexConsumer) {
ImmutableOpenMap.Builder<ProjectId, RoutingTable> projectRouting = ImmutableOpenMap.builder(metadata.projects().size());
metadata.projects().forEach((projectId, projectMetadata) -> {
final RoutingTable.Builder rtBuilder = RoutingTable.builder(TestShardRoutingRoleStrategies.DEFAULT_ROLE_ONLY);
projectMetadata.indices().values().forEach(indexMetadata -> indexConsumer.accept(rtBuilder, indexMetadata));
projectRouting.put(projectId, rtBuilder.build());
});
return new GlobalRoutingTable(projectRouting.build());
}
/**
* Update the existing {@link GlobalRoutingTable}
* @param newIndicesConsumer Called for indices that do not exist in the routing table
*/
public static GlobalRoutingTable updateRoutingTable(
ClusterState clusterState,
BiConsumer<RoutingTable.Builder, IndexMetadata> newIndicesConsumer
) {
return updateRoutingTable(clusterState, newIndicesConsumer, (ignoreBuilder, ignoreIndex) -> {
// no-op
});
}
/**
* Update the existing {@link GlobalRoutingTable}
* @param newIndicesConsumer Called for indices that do not exist in the routing table
* @param updateIndicesConsumer Called for indices that already exist in the routing table
*/
public static GlobalRoutingTable updateRoutingTable(
ClusterState clusterState,
BiConsumer<RoutingTable.Builder, IndexMetadata> newIndicesConsumer,
BiConsumer<RoutingTable.Builder, IndexMetadata> updateIndicesConsumer
) {
final GlobalRoutingTable.Builder globalBuilder = GlobalRoutingTable.builder(clusterState.globalRoutingTable());
clusterState.metadata().projects().forEach((projectId, projectMetadata) -> {
final RoutingTable existingRoutingTable = clusterState.routingTable(projectId);
final RoutingTable.Builder rtBuilder = existingRoutingTable == null
? RoutingTable.builder(TestShardRoutingRoleStrategies.DEFAULT_ROLE_ONLY)
: RoutingTable.builder(TestShardRoutingRoleStrategies.DEFAULT_ROLE_ONLY, existingRoutingTable);
projectMetadata.indices().values().forEach(indexMetadata -> {
if (existingRoutingTable != null && existingRoutingTable.hasIndex(indexMetadata.getIndex())) {
updateIndicesConsumer.accept(rtBuilder, indexMetadata);
} else {
newIndicesConsumer.accept(rtBuilder, indexMetadata);
}
});
globalBuilder.put(projectId, rtBuilder.build());
});
return globalBuilder.build();
}
private GlobalRoutingTableTestHelper() {
// Utility class
}
public static GlobalRoutingTable routingTable(ProjectId projectId, RoutingTable.Builder projectRouting) {
return routingTable(projectId, projectRouting.build());
}
public static GlobalRoutingTable routingTable(ProjectId projectId, RoutingTable projectRouting) {
return GlobalRoutingTable.builder().put(projectId, projectRouting).build();
}
public static GlobalRoutingTable routingTable(ProjectId projectId, IndexRoutingTable... indexRouting) {
final RoutingTable.Builder rt = RoutingTable.builder();
for (IndexRoutingTable irt : indexRouting) {
rt.add(irt);
}
return routingTable(projectId, rt);
}
}
| GlobalRoutingTableTestHelper |
java | alibaba__nacos | client/src/main/java/com/alibaba/nacos/client/config/common/ConfigConstants.java | {
"start": 729,
"end": 1168
} | class ____ {
public static final String TENANT = "tenant";
public static final String DATA_ID = "dataId";
public static final String GROUP = "group";
public static final String CONTENT = "content";
public static final String CONFIG_TYPE = "configType";
public static final String ENCRYPTED_DATA_KEY = "encryptedDataKey";
public static final String TYPE = "type";
}
| ConfigConstants |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/AutoCloseableTest.java | {
"start": 1089,
"end": 1926
} | class ____ {
@Test
public void servicesShouldBeAutoCloseable() throws Exception {
CamelContext usedContext = null;
ProducerTemplate usedProducer = null;
try (CamelContext context = new DefaultCamelContext();
ProducerTemplate producer = context.createProducerTemplate()) {
usedContext = context;
usedProducer = producer;
context.addRoutes(new RouteBuilder() {
public void configure() {
from("direct:start").log("hello ${body}");
}
});
context.start();
producer.sendBody("direct:start", "word");
}
assertThat(usedContext.isStopped()).isTrue();
assertThat(((DefaultProducerTemplate) usedProducer).isStopped()).isTrue();
}
}
| AutoCloseableTest |
java | mybatis__mybatis-3 | src/main/java/org/apache/ibatis/session/SqlSession.java | {
"start": 1062,
"end": 11422
} | interface ____ extends Closeable {
/**
* Retrieve a single row mapped from the statement key.
*
* @param <T>
* the returned object type
* @param statement
* the statement
*
* @return Mapped object
*/
<T> T selectOne(String statement);
/**
* Retrieve a single row mapped from the statement key and parameter.
*
* @param <T>
* the returned object type
* @param statement
* Unique identifier matching the statement to use.
* @param parameter
* A parameter object to pass to the statement.
*
* @return Mapped object
*/
<T> T selectOne(String statement, Object parameter);
/**
* Retrieve a list of mapped objects from the statement key.
*
* @param <E>
* the returned list element type
* @param statement
* Unique identifier matching the statement to use.
*
* @return List of mapped object
*/
<E> List<E> selectList(String statement);
/**
* Retrieve a list of mapped objects from the statement key and parameter.
*
* @param <E>
* the returned list element type
* @param statement
* Unique identifier matching the statement to use.
* @param parameter
* A parameter object to pass to the statement.
*
* @return List of mapped object
*/
<E> List<E> selectList(String statement, Object parameter);
/**
* Retrieve a list of mapped objects from the statement key and parameter, within the specified row bounds.
*
* @param <E>
* the returned list element type
* @param statement
* Unique identifier matching the statement to use.
* @param parameter
* A parameter object to pass to the statement.
* @param rowBounds
* Bounds to limit object retrieval
*
* @return List of mapped object
*/
<E> List<E> selectList(String statement, Object parameter, RowBounds rowBounds);
/**
* The selectMap is a special case in that it is designed to convert a list of results into a Map based on one of the
* properties in the resulting objects. Eg. Return a of Map[Integer,Author] for selectMap("selectAuthors","id")
*
* @param <K>
* the returned Map keys type
* @param <V>
* the returned Map values type
* @param statement
* Unique identifier matching the statement to use.
* @param mapKey
* The property to use as key for each value in the list.
*
* @return Map containing key pair data.
*/
<K, V> Map<K, V> selectMap(String statement, String mapKey);
/**
* The selectMap is a special case in that it is designed to convert a list of results into a Map based on one of the
* properties in the resulting objects.
*
* @param <K>
* the returned Map keys type
* @param <V>
* the returned Map values type
* @param statement
* Unique identifier matching the statement to use.
* @param parameter
* A parameter object to pass to the statement.
* @param mapKey
* The property to use as key for each value in the list.
*
* @return Map containing key pair data.
*/
<K, V> Map<K, V> selectMap(String statement, Object parameter, String mapKey);
/**
* The selectMap is a special case in that it is designed to convert a list of results into a Map based on one of the
* properties in the resulting objects.
*
* @param <K>
* the returned Map keys type
* @param <V>
* the returned Map values type
* @param statement
* Unique identifier matching the statement to use.
* @param parameter
* A parameter object to pass to the statement.
* @param mapKey
* The property to use as key for each value in the list.
* @param rowBounds
* Bounds to limit object retrieval
*
* @return Map containing key pair data.
*/
<K, V> Map<K, V> selectMap(String statement, Object parameter, String mapKey, RowBounds rowBounds);
/**
* A Cursor offers the same results as a List, except it fetches data lazily using an Iterator.
*
* @param <T>
* the returned cursor element type.
* @param statement
* Unique identifier matching the statement to use.
*
* @return Cursor of mapped objects
*/
<T> Cursor<T> selectCursor(String statement);
/**
* A Cursor offers the same results as a List, except it fetches data lazily using an Iterator.
*
* @param <T>
* the returned cursor element type.
* @param statement
* Unique identifier matching the statement to use.
* @param parameter
* A parameter object to pass to the statement.
*
* @return Cursor of mapped objects
*/
<T> Cursor<T> selectCursor(String statement, Object parameter);
/**
* A Cursor offers the same results as a List, except it fetches data lazily using an Iterator.
*
* @param <T>
* the returned cursor element type.
* @param statement
* Unique identifier matching the statement to use.
* @param parameter
* A parameter object to pass to the statement.
* @param rowBounds
* Bounds to limit object retrieval
*
* @return Cursor of mapped objects
*/
<T> Cursor<T> selectCursor(String statement, Object parameter, RowBounds rowBounds);
/**
* Retrieve a single row mapped from the statement key and parameter using a {@code ResultHandler}.
*
* @param statement
* Unique identifier matching the statement to use.
* @param parameter
* A parameter object to pass to the statement.
* @param handler
* ResultHandler that will handle each retrieved row
*/
void select(String statement, Object parameter, ResultHandler handler);
/**
* Retrieve a single row mapped from the statement using a {@code ResultHandler}.
*
* @param statement
* Unique identifier matching the statement to use.
* @param handler
* ResultHandler that will handle each retrieved row
*/
void select(String statement, ResultHandler handler);
/**
* Retrieve a single row mapped from the statement key and parameter using a {@code ResultHandler} and
* {@code RowBounds}.
*
* @param statement
* Unique identifier matching the statement to use.
* @param parameter
* the parameter
* @param rowBounds
* RowBound instance to limit the query results
* @param handler
* ResultHandler that will handle each retrieved row
*/
void select(String statement, Object parameter, RowBounds rowBounds, ResultHandler handler);
/**
* Execute an insert statement.
*
* @param statement
* Unique identifier matching the statement to execute.
*
* @return int The number of rows affected by the insert.
*/
int insert(String statement);
/**
* Execute an insert statement with the given parameter object. Any generated autoincrement values or selectKey
* entries will modify the given parameter object properties. Only the number of rows affected will be returned.
*
* @param statement
* Unique identifier matching the statement to execute.
* @param parameter
* A parameter object to pass to the statement.
*
* @return int The number of rows affected by the insert.
*/
int insert(String statement, Object parameter);
/**
* Execute an update statement. The number of rows affected will be returned.
*
* @param statement
* Unique identifier matching the statement to execute.
*
* @return int The number of rows affected by the update.
*/
int update(String statement);
/**
* Execute an update statement. The number of rows affected will be returned.
*
* @param statement
* Unique identifier matching the statement to execute.
* @param parameter
* A parameter object to pass to the statement.
*
* @return int The number of rows affected by the update.
*/
int update(String statement, Object parameter);
/**
* Execute a delete statement. The number of rows affected will be returned.
*
* @param statement
* Unique identifier matching the statement to execute.
*
* @return int The number of rows affected by the delete.
*/
int delete(String statement);
/**
* Execute a delete statement. The number of rows affected will be returned.
*
* @param statement
* Unique identifier matching the statement to execute.
* @param parameter
* A parameter object to pass to the statement.
*
* @return int The number of rows affected by the delete.
*/
int delete(String statement, Object parameter);
/**
* Flushes batch statements and commits database connection. Note that database connection will not be committed if no
* updates/deletes/inserts were called. To force the commit call {@link SqlSession#commit(boolean)}
*/
void commit();
/**
* Flushes batch statements and commits database connection.
*
* @param force
* forces connection commit
*/
void commit(boolean force);
/**
* Discards pending batch statements and rolls database connection back. Note that database connection will not be
* rolled back if no updates/deletes/inserts were called. To force the rollback call
* {@link SqlSession#rollback(boolean)}
*/
void rollback();
/**
* Discards pending batch statements and rolls database connection back. Note that database connection will not be
* rolled back if no updates/deletes/inserts were called.
*
* @param force
* forces connection rollback
*/
void rollback(boolean force);
/**
* Flushes batch statements.
*
* @return BatchResult list of updated records
*
* @since 3.0.6
*/
List<BatchResult> flushStatements();
/**
* Closes the session.
*/
@Override
void close();
/**
* Clears local session cache.
*/
void clearCache();
/**
* Retrieves current configuration.
*
* @return Configuration
*/
Configuration getConfiguration();
/**
* Retrieves a mapper.
*
* @param <T>
* the mapper type
* @param type
* Mapper | SqlSession |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java | {
"start": 71526,
"end": 74337
} | class ____ implements Mapper.TypeParser {
private final BiFunction<String, MappingParserContext, Builder> builderFunction;
private final BiConsumer<String, MappingParserContext> contextValidator;
private final IndexVersion minimumCompatibilityVersion; // see Mapper.TypeParser#supportsVersion()
/**
* Creates a new TypeParser
* @param builderFunction a function that produces a Builder from a name and parsercontext
*/
public TypeParser(BiFunction<String, MappingParserContext, Builder> builderFunction) {
this(builderFunction, (n, c) -> {}, IndexVersions.MINIMUM_READONLY_COMPATIBLE);
}
/**
* Variant of {@link #TypeParser(BiFunction)} that allows to define a minimumCompatibilityVersion to
* allow parsing mapping definitions of legacy indices (see {@link Mapper.TypeParser#supportsVersion(IndexVersion)}).
*/
private TypeParser(BiFunction<String, MappingParserContext, Builder> builderFunction, IndexVersion minimumCompatibilityVersion) {
this(builderFunction, (n, c) -> {}, minimumCompatibilityVersion);
}
public TypeParser(
BiFunction<String, MappingParserContext, Builder> builderFunction,
BiConsumer<String, MappingParserContext> contextValidator
) {
this(builderFunction, contextValidator, IndexVersions.MINIMUM_READONLY_COMPATIBLE);
}
public TypeParser(
BiFunction<String, MappingParserContext, Builder> builderFunction,
List<BiConsumer<String, MappingParserContext>> contextValidator
) {
this(builderFunction, (n, c) -> contextValidator.forEach(v -> v.accept(n, c)), IndexVersions.MINIMUM_READONLY_COMPATIBLE);
}
private TypeParser(
BiFunction<String, MappingParserContext, Builder> builderFunction,
BiConsumer<String, MappingParserContext> contextValidator,
IndexVersion minimumCompatibilityVersion
) {
this.builderFunction = builderFunction;
this.contextValidator = contextValidator;
this.minimumCompatibilityVersion = minimumCompatibilityVersion;
}
@Override
public Builder parse(String name, Map<String, Object> node, MappingParserContext parserContext) throws MapperParsingException {
contextValidator.accept(name, parserContext);
Builder builder = builderFunction.apply(name, parserContext);
builder.parse(name, parserContext, node);
return builder;
}
@Override
public boolean supportsVersion(IndexVersion indexCreatedVersion) {
return indexCreatedVersion.onOrAfter(minimumCompatibilityVersion);
}
}
}
| TypeParser |
java | assertj__assertj-core | assertj-core/src/main/java/org/assertj/core/error/ShouldNotMatchPattern.java | {
"start": 850,
"end": 1473
} | class ____ extends BasicErrorMessageFactory {
/**
* Creates a new <code>{@link ShouldStartWith}</code>.
* @param actual the actual value in the failed assertion.
* @param pattern a regular expression pattern.
* @return the created {@code ErrorMessageFactory}.
*/
public static ErrorMessageFactory shouldNotMatch(CharSequence actual, CharSequence pattern) {
return new ShouldNotMatchPattern(actual, pattern);
}
private ShouldNotMatchPattern(CharSequence actual, CharSequence pattern) {
super("%nExpecting actual:%n %s%nnot to match pattern:%n %s", actual, pattern);
}
}
| ShouldNotMatchPattern |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/GoogleMailEndpointBuilderFactory.java | {
"start": 47976,
"end": 48313
} | class ____ extends AbstractEndpointBuilder implements GoogleMailEndpointBuilder, AdvancedGoogleMailEndpointBuilder {
public GoogleMailEndpointBuilderImpl(String path) {
super(componentName, path);
}
}
return new GoogleMailEndpointBuilderImpl(path);
}
} | GoogleMailEndpointBuilderImpl |
java | spring-cloud__spring-cloud-gateway | spring-cloud-gateway-server-webflux/src/test/java/org/springframework/cloud/gateway/filter/ratelimit/RedisRateLimiterLuaScriptTests.java | {
"start": 1710,
"end": 5744
} | class ____ {
static final String KEY_PREFIX = "redis-rate-limiter-lua-script-tests";
static final Long REDIS_LUA_MAX_SAFE_INTEGER = 9007199254740991L;
@Container
public static GenericContainer redis = new GenericContainer<>("redis:5.0.14-alpine").withExposedPorts(6379);
@Autowired
ReactiveStringRedisTemplate redisTemplate;
@Autowired
RedisScript<List<Long>> redisScript;
@DynamicPropertySource
static void containerProperties(DynamicPropertyRegistry registry) {
registry.add("spring.data.redis.host", redis::getContainerIpAddress);
registry.add("spring.data.redis.port", redis::getFirstMappedPort);
}
static List<String> getKeys(String id) {
String prefix = KEY_PREFIX + ".{" + id;
String tokens = prefix + "}.tokens";
String timestamp = prefix + "}.timestamp";
return Arrays.asList(tokens, timestamp);
}
static List<String> getArgs(long rate, long capacity, long now, long requested) {
return Arrays.asList(rate + "", capacity + "", now + "", requested + "");
}
@Test
public void testNewAccess() {
long rate = 1;
long capacity = 10;
long now = System.currentTimeMillis();
long requested = 1;
List<String> keys = getKeys("new_access");
List<String> args = getArgs(rate, capacity, now, requested);
List<Long> result = redisTemplate.execute(redisScript, keys, args).blockFirst();
assertThat(result.get(0)).isEqualTo(1);
assertThat(result.get(1)).isEqualTo(9);
for (String key : keys) {
long ttl = redisTemplate.getExpire(key).map(duration -> duration.getSeconds()).block();
long fillTime = (capacity / rate);
assertThat(ttl).isGreaterThanOrEqualTo(fillTime);
}
}
@Test
public void testTokenFilled() {
long rate = 1;
long capacity = 10;
long now = System.currentTimeMillis();
long requested = 5;
List<String> keys = getKeys("token_filled");
List<String> args = getArgs(rate, capacity, now, requested);
redisTemplate.execute(redisScript, keys, args).blockFirst();
now = now + 3;
args = getArgs(rate, capacity, now, requested);
List<Long> result = redisTemplate.execute(redisScript, keys, args).blockFirst();
assertThat(result.get(0)).isEqualTo(1);
assertThat(result.get(1)).isEqualTo(3);
for (String key : keys) {
long ttl = redisTemplate.getExpire(key).map(duration -> duration.getSeconds()).block();
long fillTime = (capacity / rate);
assertThat(ttl).isGreaterThanOrEqualTo(fillTime);
}
}
@Test
public void testAfterTillTime() {
long rate = 1;
long capacity = 10;
long now = System.currentTimeMillis();
long requested = 1;
List<String> keys = getKeys("after_fill_time");
List<String> args = getArgs(rate, capacity, now, requested);
redisTemplate.execute(redisScript, keys, args).blockFirst();
long fillTime = capacity / rate;
now = now + fillTime;
args = getArgs(rate, capacity, now, requested);
List<Long> result = redisTemplate.execute(redisScript, keys, args).blockFirst();
assertThat(result.get(0)).isEqualTo(1);
assertThat(result.get(1)).isEqualTo(9);
}
@Test
public void testTokensNotEnough() {
long rate = 1;
long capacity = 10;
long now = System.currentTimeMillis();
long requested = 20;
List<String> keys = getKeys("tokens_not_enough");
List<String> args = getArgs(rate, capacity, now, requested);
List<Long> result = redisTemplate.execute(redisScript, keys, args).blockFirst();
assertThat(result.get(0)).isEqualTo(0);
assertThat(result.get(1)).isEqualTo(10);
}
@Test
public void testCapacityExceedsMaxInt() {
long rate = 1;
long capacity = REDIS_LUA_MAX_SAFE_INTEGER;
long now = System.currentTimeMillis();
long requested = 1;
List<String> keys = getKeys("capacity_exceeds_max_int");
List<String> args = getArgs(rate, capacity, now, requested);
List<Long> result = redisTemplate.execute(redisScript, keys, args).blockFirst();
assertThat(result.get(0)).isEqualTo(1);
assertThat(result.get(1)).isEqualTo(REDIS_LUA_MAX_SAFE_INTEGER - 1);
}
@EnableAutoConfiguration
@SpringBootConfiguration
public static | RedisRateLimiterLuaScriptTests |
java | spring-projects__spring-data-jpa | spring-data-jpa/src/main/java/org/springframework/data/jpa/repository/query/Jpa21Utils.java | {
"start": 1503,
"end": 7599
} | class ____ {
private Jpa21Utils() {
// prevent instantiation
}
public static QueryHints getFetchGraphHint(EntityManager em, JpaEntityGraph entityGraph, Class<?> entityType) {
MutableQueryHints result = new MutableQueryHints();
EntityGraph<?> graph = tryGetFetchGraph(em, entityGraph, entityType);
result.add(entityGraph.getType().getKey(), graph);
return result;
}
/**
* Adds a JPA 2.1 fetch-graph or load-graph hint to the given {@link Query} if running under JPA 2.1.
*
* @see <a href="https://jakarta.ee/specifications/persistence/3.1/jakarta-persistence-spec-3.1#a2814">Jakarta
* Persistence Specification - Use of Entity Graphs in find and query operations</a>
* @param em must not be {@literal null}.
* @param jpaEntityGraph must not be {@literal null}.
* @param entityType must not be {@literal null}.
* @return the {@link EntityGraph} described by the given {@code entityGraph}.
*/
private static EntityGraph<?> tryGetFetchGraph(EntityManager em, JpaEntityGraph jpaEntityGraph, Class<?> entityType) {
Assert.notNull(em, "EntityManager must not be null");
Assert.notNull(jpaEntityGraph, "EntityGraph must not be null");
Assert.notNull(entityType, "EntityType must not be null");
if (StringUtils.hasText(jpaEntityGraph.getName())) {
try {
// check whether an entityGraph with that name is already registered.
return em.getEntityGraph(jpaEntityGraph.getName());
} catch (Exception ignore) {}
}
return createDynamicEntityGraph(em, jpaEntityGraph, entityType);
}
/**
* Creates a dynamic {@link EntityGraph} from the given {@link JpaEntityGraph} information.
*
* @param em must not be {@literal null}.
* @param jpaEntityGraph must not be {@literal null}.
* @param entityType must not be {@literal null}.
* @return
* @since 1.9
*/
private static EntityGraph<?> createDynamicEntityGraph(EntityManager em, JpaEntityGraph jpaEntityGraph,
Class<?> entityType) {
Assert.notNull(em, "EntityManager must not be null");
Assert.notNull(jpaEntityGraph, "JpaEntityGraph must not be null");
Assert.notNull(entityType, "Entity type must not be null");
EntityGraph<?> entityGraph = em.createEntityGraph(entityType);
configureFetchGraphFrom(jpaEntityGraph, entityGraph);
return entityGraph;
}
/**
* Configures the given {@link EntityGraph} with the fetch graph information stored in {@link JpaEntityGraph}.
*
* @param jpaEntityGraph
* @param entityGraph
*/
static void configureFetchGraphFrom(JpaEntityGraph jpaEntityGraph, EntityGraph<?> entityGraph) {
List<String> attributePaths = new ArrayList<>(jpaEntityGraph.getAttributePaths());
// Sort to ensure that the intermediate entity subgraphs are created accordingly.
Collections.sort(attributePaths);
for (String path : attributePaths) {
String[] pathComponents = StringUtils.delimitedListToStringArray(path, ".");
createGraph(pathComponents, 0, entityGraph, null);
}
}
private static void createGraph(String[] pathComponents, int offset, EntityGraph<?> root,
@Nullable Subgraph<?> parent) {
String attributeName = pathComponents[offset];
// we found our leaf property, now let's see if it already exists and add it if not
if (pathComponents.length - 1 == offset) {
if (parent == null && !exists(attributeName, root.getAttributeNodes())) {
root.addAttributeNodes(attributeName);
} else if (parent != null && !exists(attributeName, parent.getAttributeNodes())) {
parent.addAttributeNodes(attributeName);
}
return;
}
AttributeNode<?> node = findAttributeNode(attributeName, root, parent);
if (node != null) {
Subgraph<?> subgraph = getSubgraph(node);
if (subgraph == null) {
subgraph = parent != null ? parent.addSubgraph(attributeName) : root.addSubgraph(attributeName);
}
createGraph(pathComponents, offset + 1, root, subgraph);
return;
}
if (parent == null) {
createGraph(pathComponents, offset + 1, root, root.addSubgraph(attributeName));
} else {
createGraph(pathComponents, offset + 1, root, parent.addSubgraph(attributeName));
}
}
/**
* Checks the given {@link List} of {@link AttributeNode}s for the existence of an {@link AttributeNode} matching the
* given {@literal attributeNodeName}.
*
* @param attributeNodeName
* @param nodes
* @return
*/
private static boolean exists(String attributeNodeName, List<AttributeNode<?>> nodes) {
return findAttributeNode(attributeNodeName, nodes) != null;
}
/**
* Find the {@link AttributeNode} matching the given {@literal attributeNodeName} in given {@link Subgraph} or
* {@link EntityGraph} favoring matches {@link Subgraph} over {@link EntityGraph}.
*
* @param attributeNodeName
* @param entityGraph
* @param parent
* @return {@literal null} if not found.
*/
private static @Nullable AttributeNode<?> findAttributeNode(String attributeNodeName, EntityGraph<?> entityGraph,
@Nullable Subgraph<?> parent) {
return findAttributeNode(attributeNodeName,
parent != null ? parent.getAttributeNodes() : entityGraph.getAttributeNodes());
}
/**
* Find the {@link AttributeNode} matching the given {@literal attributeNodeName} in given {@link List} of
* {@link AttributeNode}s.
*
* @param attributeNodeName
* @param nodes
* @return {@literal null} if not found.
*/
private static @Nullable AttributeNode<?> findAttributeNode(String attributeNodeName, List<AttributeNode<?>> nodes) {
for (AttributeNode<?> node : nodes) {
if (ObjectUtils.nullSafeEquals(node.getAttributeName(), attributeNodeName)) {
return node;
}
}
return null;
}
/**
* Extracts the first {@link Subgraph} from the given {@link AttributeNode}. Ignores any potential different
* {@link Subgraph}s registered for more concrete {@link Class}es as the dynamically created graph does not
* distinguish between those.
*
* @param node
* @return
*/
private static @Nullable Subgraph<?> getSubgraph(AttributeNode<?> node) {
return node.getSubgraphs().isEmpty() ? null : node.getSubgraphs().values().iterator().next();
}
}
| Jpa21Utils |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/dialect/mysql/ast/expr/MySqlExpr.java | {
"start": 721,
"end": 761
} | interface ____ extends SQLExpr {
}
| MySqlExpr |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/ser/jdk/VectorsAsBinarySerTest.java | {
"start": 538,
"end": 924
} | class ____ extends DatabindTestUtil
{
private final static float[] FLOAT_VECTOR = new float[] { 1.0f, 0.5f, -1.25f };
private final static String FLOAT_VECTOR_STR = "[1.0,0.5,-1.25]";
private final static double[] DOUBLE_VECTOR = new double[] { -1.0, 1.5, 0.0125 };
private final static String DOUBLE_VECTOR_STR = "[-1.0,1.5,0.0125]";
static | VectorsAsBinarySerTest |
java | alibaba__nacos | core/src/main/java/com/alibaba/nacos/core/paramcheck/impl/InstanceRequestParamExtractor.java | {
"start": 1100,
"end": 2020
} | class ____ extends AbstractRpcParamExtractor {
@Override
public List<ParamInfo> extractParam(Request request) {
InstanceRequest req = (InstanceRequest) request;
ParamInfo paramInfo = new ParamInfo();
paramInfo.setNamespaceId(req.getNamespace());
paramInfo.setServiceName(req.getServiceName());
paramInfo.setGroup(req.getGroupName());
Instance instance = req.getInstance();
ArrayList<ParamInfo> paramInfos = new ArrayList<>();
if (instance == null) {
paramInfos.add(paramInfo);
return paramInfos;
}
paramInfo.setIp(instance.getIp());
paramInfo.setPort(String.valueOf(instance.getPort()));
paramInfo.setCluster(instance.getClusterName());
paramInfo.setMetadata(instance.getMetadata());
paramInfos.add(paramInfo);
return paramInfos;
}
}
| InstanceRequestParamExtractor |
java | quarkusio__quarkus | devtools/cli/src/main/java/io/quarkus/cli/Test.java | {
"start": 1520,
"end": 4555
} | class ____.",
"If continuous testing is disabled then the value is passed as-is to the underlying build tool." })
String filter;
@Parameters(description = "Parameters passed to the application.")
List<String> params = new ArrayList<>();
@Override
public Integer call() {
try {
output.debug("Run project in test mode with initial parameters: %s", this);
output.throwIfUnmatchedArguments(spec.commandLine());
BuildSystemRunner runner = getRunner();
if (runOnce) {
BuildOptions buildOptions = new BuildOptions();
buildOptions.clean = testOptions.clean;
buildOptions.offline = testOptions.offline;
buildOptions.skipTests = !testOptions.runTests;
BuildCommandArgs commandArgs = runner.prepareTest(buildOptions, new RunModeOption(), params, filter);
if (testOptions.isDryRun()) {
dryRunTest(spec.commandLine().getHelp(), runner.getBuildTool(), commandArgs, false);
return CommandLine.ExitCode.OK;
}
return runner.run(commandArgs);
}
if (filter != null) {
params.add("-Dquarkus.test.include-pattern=" + filter);
}
List<Supplier<BuildSystemRunner.BuildCommandArgs>> commandArgs = runner.prepareDevTestMode(
false, testOptions, debugOptions, params);
if (testOptions.isDryRun()) {
dryRunTest(spec.commandLine().getHelp(), runner.getBuildTool(), commandArgs.iterator().next().get(), true);
return CommandLine.ExitCode.OK;
}
int ret = 1;
for (Supplier<BuildSystemRunner.BuildCommandArgs> i : commandArgs) {
ret = runner.run(i.get());
if (ret != 0) {
return ret;
}
}
return ret;
} catch (Exception e) {
return output.handleCommandException(e,
"Unable to launch project in test mode: " + e.getMessage());
}
}
void dryRunTest(CommandLine.Help help, BuildTool buildTool, BuildSystemRunner.BuildCommandArgs args, boolean isContinuous) {
output.printText("\nRun current project in" + (isContinuous ? " continuous" : "") + " test mode\n",
"\t" + projectRoot().toString());
Map<String, String> dryRunOutput = new TreeMap<>();
dryRunOutput.put("Build tool", buildTool.name());
output.info(help.createTextTable(dryRunOutput).toString());
output.printText("\nCommand line:\n",
args.showCommand());
}
@Override
public String toString() {
return "Test [debugOptions=" + debugOptions
+ ", testOptions=" + testOptions
+ ", properties=" + propertiesOptions.properties
+ ", output=" + output
+ ", params=" + params + "]";
}
}
| name |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/security/oauthbearer/internals/expiring/ExpiringCredentialRefreshingLogin.java | {
"start": 2737,
"end": 9389
} | class ____ implements Runnable {
@Override
public void run() {
log.info("[Principal={}]: Expiring credential re-login thread started.", principalLogText());
while (true) {
/*
* Refresh thread's main loop. Each expiring credential lives for one iteration
* of the loop. Thread will exit if the loop exits from here.
*/
long nowMs = currentMs();
Long nextRefreshMs = refreshMs(nowMs);
if (nextRefreshMs == null) {
loginContextFactory.refresherThreadDone();
return;
}
// safety check motivated by KAFKA-7945,
// should generally never happen except due to a bug
if (nextRefreshMs < nowMs) {
log.warn("[Principal={}]: Expiring credential re-login sleep time was calculated to be in the past! Will explicitly adjust. ({})", principalLogText(),
new Date(nextRefreshMs));
nextRefreshMs = nowMs + 10 * 1000; // refresh in 10 seconds
}
log.info("[Principal={}]: Expiring credential re-login sleeping until: {}", principalLogText(),
new Date(nextRefreshMs));
time.sleep(nextRefreshMs - nowMs);
if (Thread.currentThread().isInterrupted()) {
log.info("[Principal={}]: Expiring credential re-login thread has been interrupted and will exit.",
principalLogText());
loginContextFactory.refresherThreadDone();
return;
}
while (true) {
/*
* Perform a re-login over and over again with some intervening delay
* unless/until either the refresh succeeds or we are interrupted.
*/
try {
reLogin();
break; // success
} catch (ExitRefresherThreadDueToIllegalStateException e) {
log.error(e.getMessage(), e);
loginContextFactory.refresherThreadDone();
return;
} catch (LoginException loginException) {
log.warn(String.format(
"[Principal=%s]: LoginException during login retry; will sleep %d seconds before trying again.",
principalLogText(), DELAY_SECONDS_BEFORE_NEXT_RETRY_WHEN_RELOGIN_FAILS),
loginException);
// Sleep and allow loop to run/try again unless interrupted
time.sleep(DELAY_SECONDS_BEFORE_NEXT_RETRY_WHEN_RELOGIN_FAILS * 1000);
if (Thread.currentThread().isInterrupted()) {
log.error(
"[Principal={}]: Interrupted while trying to perform a subsequent expiring credential re-login after one or more initial re-login failures: re-login thread exiting now: {}",
principalLogText(), loginException.getMessage());
loginContextFactory.refresherThreadDone();
return;
}
}
}
}
}
}
private static final Logger log = LoggerFactory.getLogger(ExpiringCredentialRefreshingLogin.class);
private static final long DELAY_SECONDS_BEFORE_NEXT_RETRY_WHEN_RELOGIN_FAILS = 10L;
private static final Random RNG = new Random();
private final Time time;
private Thread refresherThread;
private final LoginContextFactory loginContextFactory;
private final String contextName;
private final Configuration configuration;
private final ExpiringCredentialRefreshConfig expiringCredentialRefreshConfig;
private final AuthenticateCallbackHandler callbackHandler;
// mark volatile due to existence of public subject() method
private volatile Subject subject = null;
private boolean hasExpiringCredential = false;
private String principalName = null;
private LoginContext loginContext = null;
private ExpiringCredential expiringCredential = null;
private final Class<?> mandatoryClassToSynchronizeOnPriorToRefresh;
public ExpiringCredentialRefreshingLogin(String contextName, Configuration configuration,
ExpiringCredentialRefreshConfig expiringCredentialRefreshConfig,
AuthenticateCallbackHandler callbackHandler, Class<?> mandatoryClassToSynchronizeOnPriorToRefresh) {
this(contextName, configuration, expiringCredentialRefreshConfig, callbackHandler,
mandatoryClassToSynchronizeOnPriorToRefresh, new LoginContextFactory(), Time.SYSTEM);
}
public ExpiringCredentialRefreshingLogin(String contextName, Configuration configuration,
ExpiringCredentialRefreshConfig expiringCredentialRefreshConfig,
AuthenticateCallbackHandler callbackHandler, Class<?> mandatoryClassToSynchronizeOnPriorToRefresh,
LoginContextFactory loginContextFactory, Time time) {
this.contextName = Objects.requireNonNull(contextName);
this.configuration = Objects.requireNonNull(configuration);
this.expiringCredentialRefreshConfig = Objects.requireNonNull(expiringCredentialRefreshConfig);
this.callbackHandler = callbackHandler;
this.mandatoryClassToSynchronizeOnPriorToRefresh = Objects
.requireNonNull(mandatoryClassToSynchronizeOnPriorToRefresh);
this.loginContextFactory = loginContextFactory;
this.time = Objects.requireNonNull(time);
}
public Subject subject() {
return subject; // field requires volatile keyword
}
public String contextName() {
return contextName;
}
public Configuration configuration() {
return configuration;
}
public AuthenticateCallbackHandler callbackHandler() {
return callbackHandler;
}
public String serviceName() {
return "kafka";
}
/**
* Performs login for each login module specified for the login context of this
* instance and starts the thread used to periodically re-login.
* <p>
* The synchronized keyword is not necessary because an implementation of
* {@link Login} will delegate to this code (e.g. OAuthBearerRefreshingLogin),
* and the {@code login()} method on the delegating | Refresher |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/similarity/ScriptedSimilarity.java | {
"start": 1011,
"end": 6060
} | class ____ extends Similarity {
final String weightScriptSource;
final String scriptSource;
final SimilarityWeightScript.Factory weightScriptFactory;
final SimilarityScript.Factory scriptFactory;
final boolean discountOverlaps;
/** Sole constructor. */
public ScriptedSimilarity(
String weightScriptString,
SimilarityWeightScript.Factory weightScriptFactory,
String scriptString,
SimilarityScript.Factory scriptFactory,
boolean discountOverlaps
) {
this.weightScriptSource = weightScriptString;
this.weightScriptFactory = weightScriptFactory;
this.scriptSource = scriptString;
this.scriptFactory = scriptFactory;
this.discountOverlaps = discountOverlaps;
}
@Override
public String toString() {
return getClass().getSimpleName() + "(weightScript=[" + weightScriptSource + "], script=[" + scriptSource + "])";
}
@Override
public long computeNorm(FieldInvertState state) {
final int numTerms = discountOverlaps ? state.getLength() - state.getNumOverlap() : state.getLength();
return SmallFloat.intToByte4(numTerms);
}
/** Compute the part of the score that does not depend on the current document using the init_script. */
private double computeWeight(Query query, Field field, Term term) {
if (weightScriptFactory == null) {
return 1d;
}
SimilarityWeightScript weightScript = weightScriptFactory.newInstance();
return weightScript.execute(query, field, term);
}
@Override
public SimScorer scorer(float boost, CollectionStatistics collectionStats, TermStatistics... termStats) {
Query query = new Query(boost);
long docCount = collectionStats.docCount();
if (docCount == -1) {
docCount = collectionStats.maxDoc();
}
Field field = new Field(docCount, collectionStats.sumDocFreq(), collectionStats.sumTotalTermFreq());
Term[] terms = new Term[termStats.length];
for (int i = 0; i < termStats.length; ++i) {
terms[i] = new Term(termStats[i].docFreq(), termStats[i].totalTermFreq());
}
SimScorer[] scorers = new SimScorer[terms.length];
for (int i = 0; i < terms.length; ++i) {
final Term term = terms[i];
final SimilarityScript script = scriptFactory.newInstance();
final Doc doc = new Doc();
final double scoreWeight = computeWeight(query, field, term);
scorers[i] = new SimScorer() {
@Override
public float score(float freq, long norm) {
doc.freq = freq;
doc.norm = norm;
return (float) script.execute(scoreWeight, query, field, term, doc);
}
@Override
public Explanation explain(Explanation freq, long norm) {
float score = score(freq.getValue().floatValue(), norm);
return Explanation.match(
score,
"score from " + ScriptedSimilarity.this.toString() + " computed from:",
Explanation.match((float) scoreWeight, "weight"),
Explanation.match(query.boost, "query.boost"),
Explanation.match(field.docCount, "field.docCount"),
Explanation.match(field.sumDocFreq, "field.sumDocFreq"),
Explanation.match(field.sumTotalTermFreq, "field.sumTotalTermFreq"),
Explanation.match(term.docFreq, "term.docFreq"),
Explanation.match(term.totalTermFreq, "term.totalTermFreq"),
Explanation.match(freq.getValue(), "doc.freq", freq.getDetails()),
Explanation.match(doc.getLength(), "doc.length")
);
}
};
}
if (scorers.length == 1) {
return scorers[0];
} else {
// Sum scores across terms like a BooleanQuery would do
return new SimScorer() {
@Override
public float score(float freq, long norm) {
double sum = 0;
for (SimScorer scorer : scorers) {
sum += scorer.score(freq, norm);
}
return (float) sum;
}
@Override
public Explanation explain(Explanation freq, long norm) {
Explanation[] subs = new Explanation[scorers.length];
for (int i = 0; i < subs.length; ++i) {
subs[i] = scorers[i].explain(freq, norm);
}
return Explanation.match(score(freq.getValue().floatValue(), norm), "Sum of:", subs);
}
};
}
}
/** Scoring factors that come from the query. */
public static | ScriptedSimilarity |
java | spring-projects__spring-security | oauth2/oauth2-client/src/main/java/org/springframework/security/oauth2/client/endpoint/NimbusJwtClientAuthenticationParametersConverter.java | {
"start": 9721,
"end": 11111
} | class ____<T extends AbstractOAuth2AuthorizationGrantRequest> {
private final T authorizationGrantRequest;
private final JwsHeader.Builder headers;
private final JwtClaimsSet.Builder claims;
private JwtClientAuthenticationContext(T authorizationGrantRequest, JwsHeader.Builder headers,
JwtClaimsSet.Builder claims) {
this.authorizationGrantRequest = authorizationGrantRequest;
this.headers = headers;
this.claims = claims;
}
/**
* Returns the {@link AbstractOAuth2AuthorizationGrantRequest authorization grant
* request}.
* @return the {@link AbstractOAuth2AuthorizationGrantRequest authorization grant
* request}
*/
public T getAuthorizationGrantRequest() {
return this.authorizationGrantRequest;
}
/**
* Returns the {@link JwsHeader.Builder} to be used to customize headers of the
* JSON Web Token (JWS).
* @return the {@link JwsHeader.Builder} to be used to customize headers of the
* JSON Web Token (JWS)
*/
public JwsHeader.Builder getHeaders() {
return this.headers;
}
/**
* Returns the {@link JwtClaimsSet.Builder} to be used to customize claims of the
* JSON Web Token (JWS).
* @return the {@link JwtClaimsSet.Builder} to be used to customize claims of the
* JSON Web Token (JWS)
*/
public JwtClaimsSet.Builder getClaims() {
return this.claims;
}
}
}
| JwtClientAuthenticationContext |
java | google__guava | android/guava-testlib/src/com/google/common/testing/ClassSanityTester.java | {
"start": 2412,
"end": 2945
} | class ____ {
* public static Book hardcover(String title) {...}
* public static Book paperback(String title) {...}
* }
* </pre>
*
* <p>And all the created {@code Book} instances can be tested with:
*
* <pre>
* new ClassSanityTester()
* .forAllPublicStaticMethods(Books.class)
* .thatReturn(Book.class)
* .testEquals(); // or testNulls(), testSerializable() etc.
* </pre>
*
* @author Ben Yu
* @since 14.0
*/
@GwtIncompatible
@J2ktIncompatible
@NullUnmarked
@SuppressWarnings("nullness")
public final | Books |
java | junit-team__junit5 | junit-platform-commons/src/main/java/org/junit/platform/commons/support/ReflectionSupport.java | {
"start": 3435,
"end": 6785
} | class ____ be loaded;
* never {@code null}
* @since 1.10
* @see #tryToLoadClass(String)
* @see ResourceSupport#tryToGetResources(String, ClassLoader)
*/
@API(status = MAINTAINED, since = "1.13.3")
public static Try<Class<?>> tryToLoadClass(String name, ClassLoader classLoader) {
return ReflectionUtils.tryToLoadClass(name, classLoader);
}
/**
* Try to get the {@linkplain Resource resources} for the supplied classpath
* resource name.
*
* <p>The name of a <em>classpath resource</em> must follow the semantics
* for resource paths as defined in {@link ClassLoader#getResource(String)}.
*
* <p>If the supplied classpath resource name is prefixed with a slash
* ({@code /}), the slash will be removed.
*
* @param classpathResourceName the name of the resource to load; never
* {@code null} or blank
* @return a successful {@code Try} containing the set of loaded resources
* (potentially empty) or a failed {@code Try} containing the exception in
* case a failure occurred while trying to list resources; never
* {@code null}
* @since 1.12
* @see #tryToGetResources(String, ClassLoader)
* @deprecated Please use {@link ResourceSupport#tryToGetResources(String)} instead
*/
@API(status = DEPRECATED, since = "1.14")
@Deprecated(since = "1.14", forRemoval = true)
@SuppressWarnings("removal")
public static Try<Set<Resource>> tryToGetResources(String classpathResourceName) {
return ResourceSupport.tryToGetResources(classpathResourceName) //
.andThenTry(ReflectionSupport::toSupportResourcesSet);
}
/**
* Try to load the {@linkplain Resource resources} for the supplied classpath
* resource name, using the supplied {@link ClassLoader}.
*
* <p>The name of a <em>classpath resource</em> must follow the semantics
* for resource paths as defined in {@link ClassLoader#getResource(String)}.
*
* <p>If the supplied classpath resource name is prefixed with a slash
* ({@code /}), the slash will be removed.
*
* @param classpathResourceName the name of the resource to load; never
* {@code null} or blank
* @param classLoader the {@code ClassLoader} to use; never {@code null}
* @return a successful {@code Try} containing the set of loaded resources
* (potentially empty) or a failed {@code Try} containing the exception in
* case a failure occurred while trying to list resources; never
* {@code null}
* @since 1.12
* @see #tryToGetResources(String)
* @deprecated Please use {@link ResourceSupport#tryToGetResources(String, ClassLoader)} instead
*/
@API(status = DEPRECATED, since = "1.14")
@Deprecated(since = "1.14", forRemoval = true)
@SuppressWarnings("removal")
public static Try<Set<Resource>> tryToGetResources(String classpathResourceName, ClassLoader classLoader) {
return ResourceSupport.tryToGetResources(classpathResourceName, classLoader) //
.andThenTry(ReflectionSupport::toSupportResourcesSet);
}
/**
* Find all {@linkplain Class classes} in the supplied classpath {@code root}
* that match the specified {@code classFilter} and {@code classNameFilter}
* predicates.
*
* <p>The classpath scanning algorithm searches recursively in subpackages
* beginning with the root of the classpath.
*
* @param root the URI for the classpath root in which to scan; never
* {@code null}
* @param classFilter the | could |
java | resilience4j__resilience4j | resilience4j-timelimiter/src/main/java/io/github/resilience4j/timelimiter/internal/TimeLimiterImpl.java | {
"start": 5745,
"end": 6245
} | class ____ {
private Timeout() {
}
static ScheduledFuture<?> of(
CompletableFuture<?> future, ScheduledExecutorService scheduler, String name, long delay,
TimeUnit unit) {
return scheduler.schedule(() -> {
if (future != null && !future.isDone()) {
future.completeExceptionally(TimeLimiter.createdTimeoutExceptionWithName(name, null));
}
}, delay, unit);
}
}
} | Timeout |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/web/configurers/HttpSecuritySecurityMatchersTests.java | {
"start": 9766,
"end": 9958
} | class ____ {
@RequestMapping("/path")
String path() {
return "path";
}
}
}
@EnableWebSecurity
@Configuration
@EnableWebMvc
@Import(UsersConfig.class)
static | PathController |
java | quarkusio__quarkus | extensions/devui/deployment/src/main/java/io/quarkus/devui/deployment/DevUIProcessor.java | {
"start": 5539,
"end": 70625
} | class ____ {
private static final String FOOTER_LOG_NAMESPACE = "devui-footer-log";
private static final String JS_SUFFIX = ".js";
private static final String I18N_DIR = "dev-ui/i18n/";
private static final String DEVUI = "dev-ui";
private static final String UNDERSCORE = "_";
private static final String SLASH = "/";
private static final String SLASH_ALL = SLASH + "*";
private static final String JSONRPC = "json-rpc-ws";
private static final String CONSTRUCTOR = "<init>";
private final ClassLoader tccl = Thread.currentThread().getContextClassLoader();
private static final String JAR = "jar";
private static final GACT UI_JAR = new GACT("io.quarkus", "quarkus-devui-resources", null, JAR);
private static final String NAME = "name";
private static final String DESCRIPTION = "description";
private static final String ARTIFACT = "artifact";
private static final String METADATA = "metadata";
private static final String KEYWORDS = "keywords";
private static final String SHORT_NAME = "short-name";
private static final String GUIDE = "guide";
private static final String CATEGORIES = "categories";
private static final String STATUS = "status";
private static final String BUILT_WITH = "built-with-quarkus-core";
private static final String CONFIG = "config";
private static final String EXTENSION_DEPENDENCIES = "extension-dependencies";
private static final String CAPABILITIES = "capabilities";
private static final String PROVIDES = "provides";
private static final String UNLISTED = "unlisted";
private static final String HIDE = "hide-in-dev-ui";
private static final String CODESTART = "codestart";
private static final String LANGUAGES = "languages";
private static final String ICON_URL = "icon-url";
private static final String LIB_GA = "lib-ga";
private final Pattern libGAPattern = Pattern.compile("([^:\\[\\]]+):([^\\[\\]]+)(\\[(.+?)\\])?");
private static final Logger log = Logger.getLogger(DevUIProcessor.class);
@BuildStep(onlyIf = IsLocalDevelopment.class)
@Record(ExecutionTime.STATIC_INIT)
void registerDevUiHandlers(
DevUIConfig devUIConfig,
BeanContainerBuildItem beanContainer,
MvnpmBuildItem mvnpmBuildItem,
List<DevUIRoutesBuildItem> devUIRoutesBuildItems,
List<StaticContentBuildItem> staticContentBuildItems,
BuildProducer<RouteBuildItem> routeProducer,
DevUIRecorder recorder,
LaunchModeBuildItem launchModeBuildItem,
NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem,
HttpRootPathBuildItem httpRootPathBuildItem,
ShutdownContextBuildItem shutdownContext) throws IOException {
if (launchModeBuildItem.isNotLocalDevModeType()) {
return;
}
routeProducer.produce(nonApplicationRootPathBuildItem.routeBuilder()
.orderedRoute(DEVUI + SLASH_ALL, -2 * SecurityHandlerPriorities.CORS)
.handler(recorder.createLocalHostOnlyFilter(devUIConfig.hosts().orElse(null)))
.build());
if (devUIConfig.cors().enabled()) {
routeProducer.produce(nonApplicationRootPathBuildItem.routeBuilder()
.orderedRoute(DEVUI + SLASH_ALL, -1 * SecurityHandlerPriorities.CORS)
.handler(recorder.createDevUICorsFilter(devUIConfig.hosts().orElse(null)))
.build());
}
// Websocket for JsonRPC comms
routeProducer.produce(
nonApplicationRootPathBuildItem
.routeBuilder().route(DEVUI + SLASH + JSONRPC)
.handler(recorder.devUIWebSocketHandler())
.build());
// Static handler for components
for (DevUIRoutesBuildItem devUIRoutesBuildItem : devUIRoutesBuildItems) {
String route = devUIRoutesBuildItem.getPath();
String path = nonApplicationRootPathBuildItem.resolvePath(route);
Handler<RoutingContext> uihandler = recorder.uiHandler(
devUIRoutesBuildItem.getFinalDestination(),
path,
devUIRoutesBuildItem.getWebRootConfigurations(),
shutdownContext);
NonApplicationRootPathBuildItem.Builder builder = nonApplicationRootPathBuildItem.routeBuilder()
.route(route)
.handler(uihandler);
if (route.endsWith(DEVUI + SLASH)) {
builder = builder.displayOnNotFoundPage("Dev UI");
routeProducer.produce(builder.build());
}
routeProducer.produce(
nonApplicationRootPathBuildItem.routeBuilder().route(route + SLASH_ALL).handler(uihandler).build());
}
String basepath = nonApplicationRootPathBuildItem.resolvePath(DEVUI);
// For static content generated at build time
Path devUiBasePath = Files.createTempDirectory("quarkus-devui");
recorder.shutdownTask(shutdownContext, devUiBasePath.toString());
for (StaticContentBuildItem staticContentBuildItem : staticContentBuildItems) {
Map<String, String> urlAndPath = new HashMap<>();
Map<String, String> descriptions = new HashMap<>();
Map<String, String> mcpDefaultEnabled = new HashMap<>();
Map<String, String> contentTypes = new HashMap<>();
List<DevUIContent> content = staticContentBuildItem.getContent();
for (DevUIContent c : content) {
String parsedContent = Qute.fmt(new String(c.getTemplate()), c.getData());
Path tempFile = devUiBasePath
.resolve(c.getFileName());
Files.writeString(tempFile, parsedContent);
urlAndPath.put(c.getFileName(), tempFile.toString());
if (c.getDescriptions() != null && !c.getDescriptions().isEmpty()) {
descriptions.putAll(c.getDescriptions());
}
if (c.getMcpDefaultEnables() != null && !c.getMcpDefaultEnables().isEmpty()) {
mcpDefaultEnabled.putAll(c.getMcpDefaultEnables());
}
if (c.getContentTypes() != null && !c.getContentTypes().isEmpty()) {
contentTypes.putAll(c.getContentTypes());
}
}
Handler<RoutingContext> buildTimeStaticHandler = recorder.buildTimeStaticHandler(beanContainer.getValue(), basepath,
urlAndPath, descriptions, mcpDefaultEnabled, contentTypes);
routeProducer.produce(
nonApplicationRootPathBuildItem.routeBuilder().route(DEVUI + SLASH_ALL)
.handler(buildTimeStaticHandler)
.build());
}
Handler<RoutingContext> endpointInfoHandler = recorder.endpointInfoHandler(basepath);
routeProducer.produce(
nonApplicationRootPathBuildItem.routeBuilder().route(DEVUI + SLASH + "endpoints" + SLASH + "*")
.handler(endpointInfoHandler)
.build());
// For the Vaadin router (So that bookmarks/url refreshes work)
for (DevUIRoutesBuildItem devUIRoutesBuildItem : devUIRoutesBuildItems) {
String route = devUIRoutesBuildItem.getPath();
basepath = nonApplicationRootPathBuildItem.resolvePath(route);
Handler<RoutingContext> routerhandler = recorder.vaadinRouterHandler(basepath);
routeProducer.produce(
nonApplicationRootPathBuildItem.routeBuilder().route(route + SLASH_ALL).handler(routerhandler).build());
}
// Static mvnpm jars
String contextRoot = nonApplicationRootPathBuildItem.getNonApplicationRootPath();
routeProducer.produce(
nonApplicationRootPathBuildItem.routeBuilder()
.route("_static" + SLASH_ALL)
.handler(recorder.mvnpmHandler(contextRoot, mvnpmBuildItem.getMvnpmJars()))
.build());
// Redirect /q/dev -> /q/dev-ui
routeProducer.produce(nonApplicationRootPathBuildItem.routeBuilder()
.route("dev")
.handler(recorder.redirect(contextRoot))
.build());
// Redirect naked to welcome if there is no index.html
if (!hasOwnIndexHtml()) {
routeProducer.produce(httpRootPathBuildItem.routeBuilder()
.orderedRoute(SLASH, Integer.MAX_VALUE)
.handler(recorder.redirect(contextRoot, "welcome"))
.build());
}
}
private boolean hasOwnIndexHtml() {
try {
Enumeration<URL> jarsWithIndexHtml = Thread.currentThread().getContextClassLoader()
.getResources("META-INF/resources/index.html");
return jarsWithIndexHtml.hasMoreElements();
} catch (IOException ex) {
throw new UncheckedIOException(ex);
}
}
/**
* This makes sure the Runtime JsonRPC Classes for both the internal Dev UI and extensions is available as a bean and on the
* index.
*/
@BuildStep(onlyIf = IsLocalDevelopment.class)
void additionalBean(BuildProducer<AdditionalBeanBuildItem> additionalBeanProducer,
BuildProducer<AdditionalIndexedClassesBuildItem> additionalIndexProducer,
List<JsonRPCProvidersBuildItem> jsonRPCProvidersBuildItems) {
additionalBeanProducer.produce(AdditionalBeanBuildItem.builder()
.addBeanClass(VertxRouteInfoService.class)
.setUnremovable().build());
// Make sure all JsonRPC Providers is in the index
for (JsonRPCProvidersBuildItem jsonRPCProvidersBuildItem : jsonRPCProvidersBuildItems) {
Class c = jsonRPCProvidersBuildItem.getJsonRPCMethodProviderClass();
additionalIndexProducer.produce(new AdditionalIndexedClassesBuildItem(c.getName()));
DotName defaultBeanScope = jsonRPCProvidersBuildItem.getDefaultBeanScope() == null
? BuiltinScope.APPLICATION.getName()
: jsonRPCProvidersBuildItem.getDefaultBeanScope();
additionalBeanProducer.produce(AdditionalBeanBuildItem.builder()
.addBeanClass(c)
.setDefaultScope(defaultBeanScope)
.setUnremovable().build());
}
additionalBeanProducer.produce(AdditionalBeanBuildItem.builder()
.addBeanClass(JsonRpcRouter.class)
.setDefaultScope(BuiltinScope.APPLICATION.getName())
.setUnremovable().build());
additionalBeanProducer.produce(AdditionalBeanBuildItem.builder()
.addBeanClass(DevUIBuildTimeStaticService.class)
.setDefaultScope(BuiltinScope.APPLICATION.getName())
.setUnremovable().build());
}
/**
* This creates a set of supported locales
*/
@BuildStep(onlyIf = IsLocalDevelopment.class)
BuildTimeConstBuildItem findAllSupportedLocales() {
BuildTimeConstBuildItem localesInfo = new BuildTimeConstBuildItem("devui-locales");
Set<LanguageCountry> locales = new HashSet<>();
try {
Enumeration<URL> urls = tccl.getResources(I18N_DIR);
while (urls.hasMoreElements()) {
URL url = urls.nextElement();
String protocol = url.getProtocol();
if ("file".equals(protocol)) {
scanFileDirectory(url, locales);
} else if ("jar".equals(protocol)) {
scanJarDirectory(url, locales);
}
}
if (!locales.isEmpty()) {
List<LanguageCountry> sorted = locales.stream()
.sorted(Comparator.comparing(LanguageCountry::name, String.CASE_INSENSITIVE_ORDER))
.toList();
localesInfo.addBuildTimeData("locales", sorted);
}
} catch (IOException | URISyntaxException ex) {
ex.printStackTrace();
locales.add(new LanguageCountry("en-GB", "English (UK)")); // default
List<LanguageCountry> sorted = locales.stream()
.sorted(Comparator.comparing(LanguageCountry::name, String.CASE_INSENSITIVE_ORDER))
.toList();
localesInfo.addBuildTimeData("locales", sorted);
}
return localesInfo;
}
private static void scanFileDirectory(URL dirUrl, Set<LanguageCountry> locales) throws IOException, URISyntaxException {
URI uri = dirUrl.toURI();
Path dir = Paths.get(uri);
if (!Files.isDirectory(dir)) {
return;
}
try (var stream = Files.list(dir)) {
stream.filter(Files::isRegularFile)
.forEach(path -> {
String fileName = path.getFileName().toString();
addIfLocaleFile(fileName, locales);
});
}
}
private static void scanJarDirectory(URL jarDirUrl, Set<LanguageCountry> locales) throws IOException {
JarURLConnection conn = (JarURLConnection) jarDirUrl.openConnection();
try (JarFile jarFile = conn.getJarFile()) {
Enumeration<JarEntry> entries = jarFile.entries();
while (entries.hasMoreElements()) {
JarEntry entry = entries.nextElement();
if (entry.isDirectory()) {
continue;
}
String name = entry.getName();
// We only care about entries under dev-ui/i18n/
if (name.startsWith(I18N_DIR)) {
String fileName = name.substring(I18N_DIR.length());
// ignore nested dirs under dev-ui/i18n/
if (fileName.contains("/")) {
continue;
}
addIfLocaleFile(fileName, locales);
}
}
}
}
private static void addIfLocaleFile(String fileName, Set<LanguageCountry> locales) {
if (!fileName.endsWith(JS_SUFFIX)) {
return;
}
if (!fileName.contains("-")) {
// only accept e.g. en-GB.js, fr-FR.js, not plain en.js
return;
}
String code = fileName.substring(0, fileName.length() - JS_SUFFIX.length());
Locale locale = Locale.forLanguageTag(code);
String displayLanguage = locale.getDisplayLanguage();
String countryCode = locale.getCountry();
locales.add(new LanguageCountry(code, displayLanguage + " (" + countryCode + ")"));
}
/**
* This goes through all jsonRPC methods and discover the methods using Jandex
*/
@BuildStep(onlyIf = IsLocalDevelopment.class)
void findAllJsonRPCMethods(BuildProducer<JsonRPCRuntimeMethodsBuildItem> jsonRPCMethodsProvider,
BuildProducer<BuildTimeConstBuildItem> buildTimeConstProducer,
LaunchModeBuildItem launchModeBuildItem,
CombinedIndexBuildItem combinedIndexBuildItem,
CurateOutcomeBuildItem curateOutcomeBuildItem,
List<JsonRPCProvidersBuildItem> jsonRPCProvidersBuildItems,
DeploymentMethodBuildItem deploymentMethodBuildItem) {
if (launchModeBuildItem.isNotLocalDevModeType()) {
return;
}
IndexView index = combinedIndexBuildItem.getIndex();
Map<String, RuntimeJsonRpcMethod> runtimeMethodsMap = new HashMap<>();// All methods to execute against the runtime classpath
Map<String, RuntimeJsonRpcMethod> runtimeSubscriptionsMap = new HashMap<>();// All subscriptions to execute against the runtime classpath
DotName descriptionAnnotation = DotName.createSimple(JsonRpcDescription.class);
DotName devMCPEnableByDefaultAnnotation = DotName.createSimple(DevMCPEnableByDefault.class);
// Let's use the Jandex index to find all methods
for (JsonRPCProvidersBuildItem jsonRPCProvidersBuildItem : jsonRPCProvidersBuildItems) {
Class clazz = jsonRPCProvidersBuildItem.getJsonRPCMethodProviderClass();
String extension = jsonRPCProvidersBuildItem.getExtensionPathName(curateOutcomeBuildItem);
ClassInfo classInfo = index.getClassByName(DotName.createSimple(clazz.getName()));
if (classInfo != null) {// skip if not found
for (MethodInfo method : classInfo.methods()) {
// Ignore constructor, Only allow public methods, Only allow method with response
if (!method.name().equals(CONSTRUCTOR) && Modifier.isPublic(method.flags())
&& method.returnType().kind() != Type.Kind.VOID) {
String methodName = extension + UNDERSCORE + method.name();
Map<String, AbstractJsonRpcMethod.Parameter> parameters = new LinkedHashMap<>(); // Keep the order
for (int i = 0; i < method.parametersCount(); i++) {
String description = null;
boolean required = true;
Type parameterType = method.parameterType(i);
if (DotNames.OPTIONAL.equals(parameterType.name())) {
required = false;
parameterType = parameterType.asParameterizedType().arguments().get(0);
}
AnnotationInstance jsonRpcDescriptionAnnotation = method.parameters().get(i)
.annotation(descriptionAnnotation);
if (jsonRpcDescriptionAnnotation != null) {
AnnotationValue descriptionValue = jsonRpcDescriptionAnnotation.value();
if (descriptionValue != null && !descriptionValue.asString().isBlank()) {
description = descriptionValue.asString();
}
}
Class<?> parameterClass = toClass(parameterType);
String parameterName = method.parameterName(i);
parameters.put(parameterName,
new AbstractJsonRpcMethod.Parameter(parameterClass, description, required));
}
// Look for @JsonRpcUsage annotation
EnumSet<Usage> usage = EnumSet.noneOf(Usage.class);
AnnotationInstance jsonRpcUsageAnnotation = method.annotation(DotName.createSimple(JsonRpcUsage.class));
if (jsonRpcUsageAnnotation != null) {
AnnotationInstance[] usageArray = jsonRpcUsageAnnotation.value().asNestedArray();
for (AnnotationInstance usageInstance : usageArray) {
String usageStr = usageInstance.value().asEnum();
usage.add(Usage.valueOf(usageStr));
}
}
// Look for @JsonRpcDescription annotation
String description = null;
boolean mcpEnabledByDefault = false;
AnnotationInstance jsonRpcDescriptionAnnotation = method
.annotation(descriptionAnnotation);
if (jsonRpcDescriptionAnnotation != null) {
AnnotationValue descriptionValue = jsonRpcDescriptionAnnotation.value();
if (descriptionValue != null && !descriptionValue.asString().isBlank()) {
description = descriptionValue.asString();
usage = Usage.devUIandDevMCP();
}
AnnotationInstance devMCPEnableByDefaultAnnotationInstance = method
.annotation(devMCPEnableByDefaultAnnotation);
if (devMCPEnableByDefaultAnnotationInstance != null) {
mcpEnabledByDefault = true;
}
} else {
usage = Usage.onlyDevUI();
}
RuntimeJsonRpcMethod runtimeJsonRpcMethod = new RuntimeJsonRpcMethod(methodName, description,
parameters,
usage,
mcpEnabledByDefault,
clazz,
method.hasAnnotation(Blocking.class), method.hasAnnotation(NonBlocking.class));
// Create list of available methods for the Javascript side.
if (method.returnType().name().equals(DotName.createSimple(Multi.class.getName()))) {
runtimeSubscriptionsMap.put(methodName, runtimeJsonRpcMethod);
} else {
runtimeMethodsMap.put(methodName, runtimeJsonRpcMethod);
}
}
}
}
}
jsonRPCMethodsProvider.produce(new JsonRPCRuntimeMethodsBuildItem(runtimeMethodsMap, runtimeSubscriptionsMap));
// Get all names for UI validation
Set<String> allMethodsNames = Stream
.<Map<String, ?>> of(runtimeMethodsMap, deploymentMethodBuildItem.getMethods(),
deploymentMethodBuildItem.getRecordedMethods())
.flatMap(m -> m.keySet().stream())
.collect(Collectors.toSet());
Set<String> allSubscriptionNames = Stream
.<Map<String, ?>> of(runtimeSubscriptionsMap, deploymentMethodBuildItem.getSubscriptions(),
deploymentMethodBuildItem.getRecordedSubscriptions())
.flatMap(m -> m.keySet().stream())
.collect(Collectors.toSet());
BuildTimeConstBuildItem methodInfo = new BuildTimeConstBuildItem("devui-jsonrpc");
if (!allSubscriptionNames.isEmpty()) {
methodInfo.addBuildTimeData("jsonRPCSubscriptions", allSubscriptionNames);
}
if (!allMethodsNames.isEmpty()) {
methodInfo.addBuildTimeData("jsonRPCMethods", allMethodsNames);
}
buildTimeConstProducer.produce(methodInfo);
}
@BuildStep(onlyIf = IsLocalDevelopment.class)
@Record(ExecutionTime.RUNTIME_INIT)
void createJsonRpcRouter(DevUIRecorder recorder,
BeanContainerBuildItem beanContainer,
JsonRPCRuntimeMethodsBuildItem jsonRPCMethodsBuildItem,
DeploymentMethodBuildItem deploymentMethodBuildItem) {
if (jsonRPCMethodsBuildItem != null) {
Map<String, RuntimeJsonRpcMethod> runtimeMethodsMap = jsonRPCMethodsBuildItem.getRuntimeMethodsMap();
Map<String, RuntimeJsonRpcMethod> runtimeSubscriptionsMap = jsonRPCMethodsBuildItem.getRuntimeSubscriptionsMap();
DevConsoleManager.setGlobal(DevUIRecorder.DEV_MANAGER_GLOBALS_JSON_MAPPER_FACTORY,
JsonMapper.Factory.deploymentLinker().createLinkData(new DevUIDatabindCodec.Factory()));
recorder.createJsonRpcRouter(beanContainer.getValue(),
runtimeToJsonRpcMethods(runtimeMethodsMap),
runtimeToJsonRpcMethods(runtimeSubscriptionsMap),
deploymentToJsonRpcMethods(deploymentMethodBuildItem.getMethods()),
deploymentToJsonRpcMethods(deploymentMethodBuildItem.getSubscriptions()),
recordedToJsonRpcMethods(deploymentMethodBuildItem.getRecordedMethods()),
recordedToJsonRpcMethods(deploymentMethodBuildItem.getRecordedSubscriptions()));
}
}
private Map<String, JsonRpcMethod> runtimeToJsonRpcMethods(Map<String, RuntimeJsonRpcMethod> m) {
return mapToJsonRpcMethods(m, this::runtimeToJsonRpcMethod);
}
private Map<String, JsonRpcMethod> deploymentToJsonRpcMethods(Map<String, DeploymentJsonRpcMethod> m) {
return mapToJsonRpcMethods(m, this::toJsonRpcMethod);
}
private Map<String, JsonRpcMethod> recordedToJsonRpcMethods(Map<String, RecordedJsonRpcMethod> m) {
return mapToJsonRpcMethods(m, this::recordedToJsonRpcMethod);
}
private <T extends AbstractJsonRpcMethod> Map<String, JsonRpcMethod> mapToJsonRpcMethods(
Map<String, T> input,
Function<T, JsonRpcMethod> converter) {
return input.entrySet().stream()
.collect(Collectors.toMap(Map.Entry::getKey, e -> converter.apply(e.getValue())));
}
private JsonRpcMethod runtimeToJsonRpcMethod(RuntimeJsonRpcMethod i) {
JsonRpcMethod o = toJsonRpcMethod(i);
o.setBean(i.getBean());
o.setIsExplicitlyBlocking(i.isExplicitlyBlocking());
o.setIsExplicitlyNonBlocking(i.isExplicitlyNonBlocking());
return o;
}
private JsonRpcMethod recordedToJsonRpcMethod(RecordedJsonRpcMethod i) {
JsonRpcMethod o = toJsonRpcMethod(i);
o.setRuntimeValue(i.getRuntimeValue());
return o;
}
private JsonRpcMethod toJsonRpcMethod(AbstractJsonRpcMethod i) {
JsonRpcMethod o = new JsonRpcMethod();
o.setMethodName(i.getMethodName());
o.setDescription(i.getDescription());
o.setUsage(List.copyOf(i.getUsage()));
o.setMcpEnabledByDefault(i.isMcpEnabledByDefault());
if (i.hasParameters()) {
for (Map.Entry<String, AbstractJsonRpcMethod.Parameter> ip : i.getParameters().entrySet()) {
o.addParameter(ip.getKey(), ip.getValue().getType(), ip.getValue().getDescription(),
ip.getValue().isRequired());
}
}
return o;
}
@BuildStep(onlyIf = IsLocalDevelopment.class)
void processFooterLogs(BuildProducer<BuildTimeActionBuildItem> buildTimeActionProducer,
BuildProducer<FooterPageBuildItem> footerPageProducer,
List<FooterLogBuildItem> footerLogBuildItems) {
List<BuildTimeActionBuildItem> devServiceLogs = new ArrayList<>();
List<FooterPageBuildItem> footers = new ArrayList<>();
for (FooterLogBuildItem footerLogBuildItem : footerLogBuildItems) {
// Create the Json-RPC service that will stream the log
String name = footerLogBuildItem.getName().replaceAll(" ", "");
BuildTimeActionBuildItem devServiceLogActions = new BuildTimeActionBuildItem(FOOTER_LOG_NAMESPACE);
if (footerLogBuildItem.hasRuntimePublisher()) {
devServiceLogActions.subscriptionBuilder()
.methodName(name + "Log")
.description("Streams the " + name + " log")
.runtime(footerLogBuildItem.getRuntimePublisher())
.build();
} else {
devServiceLogActions.subscriptionBuilder()
.methodName(name + "Log")
.description("Streams the " + name + " log")
.function(ignored -> {
try {
return footerLogBuildItem.getPublisher();
} catch (Exception e) {
throw new RuntimeException(e);
}
})
.build();
}
devServiceLogs.add(devServiceLogActions);
// Create the Footer in the Dev UI
WebComponentPageBuilder footerLogComponent = Page.webComponentPageBuilder().internal()
.namespace(FOOTER_LOG_NAMESPACE)
.icon("font-awesome-regular:file-lines")
.title(capitalizeFirstLetter(footerLogBuildItem.getName()))
.metadata("jsonRpcMethodName", footerLogBuildItem.getName() + "Log")
.componentLink("qwc-footer-log.js");
FooterPageBuildItem footerPageBuildItem = new FooterPageBuildItem(FOOTER_LOG_NAMESPACE, footerLogComponent);
footers.add(footerPageBuildItem);
}
buildTimeActionProducer.produce(devServiceLogs);
footerPageProducer.produce(footers);
}
private String capitalizeFirstLetter(String input) {
if (input == null || input.isEmpty()) {
return input;
}
return input.substring(0, 1).toUpperCase() + input.substring(1);
}
/**
* This build all the pages for dev ui, based on the extension included
*/
@BuildStep(onlyIf = IsLocalDevelopment.class)
@SuppressWarnings("unchecked")
void getAllExtensions(List<CardPageBuildItem> cardPageBuildItems,
List<MenuPageBuildItem> menuPageBuildItems,
List<FooterPageBuildItem> footerPageBuildItems,
List<SettingPageBuildItem> settingPageBuildItems,
List<UnlistedPageBuildItem> unlistedPageBuildItems,
LaunchModeBuildItem launchModeBuildItem,
CurateOutcomeBuildItem curateOutcomeBuildItem,
BuildProducer<ExtensionsBuildItem> extensionsProducer,
BuildProducer<WebJarBuildItem> webJarBuildProducer,
BuildProducer<DevUIWebJarBuildItem> devUIWebJarProducer,
Capabilities capabilities) {
if (launchModeBuildItem.isNotLocalDevModeType()) {
// produce extension build item as cascade of build steps rely on it
var emptyExtensionBuildItem = new ExtensionsBuildItem(List.of(), List.of(), List.of(), List.of(), List.of(),
List.of());
extensionsProducer.produce(emptyExtensionBuildItem);
return;
}
// First create the static resources for our own internal components
webJarBuildProducer.produce(WebJarBuildItem.builder()
.artifactKey(UI_JAR)
.root(DEVUI + SLASH).build());
devUIWebJarProducer.produce(new DevUIWebJarBuildItem(UI_JAR, DEVUI));
final boolean assistantIsAvailable = capabilities.isPresent(Capability.ASSISTANT);
// Now go through all extensions and check them for active components
Map<String, CardPageBuildItem> cardPagesMap = getCardPagesMap(curateOutcomeBuildItem, cardPageBuildItems);
Map<String, MenuPageBuildItem> menuPagesMap = getMenuPagesMap(curateOutcomeBuildItem, menuPageBuildItems);
Map<String, List<FooterPageBuildItem>> footerPagesMap = getFooterPagesMap(curateOutcomeBuildItem, footerPageBuildItems);
Map<String, List<SettingPageBuildItem>> settingPagesMap = getSettingPagesMap(curateOutcomeBuildItem,
settingPageBuildItems);
Map<String, List<UnlistedPageBuildItem>> unlistedPagesMap = getUnlistedPagesMap(curateOutcomeBuildItem,
unlistedPageBuildItems);
final Yaml yaml = new Yaml();
List<Extension> activeExtensions = new ArrayList<>();
List<Extension> inactiveExtensions = new ArrayList<>();
List<Extension> sectionMenuExtensions = new ArrayList<>();
List<Extension> footerTabExtensions = new ArrayList<>();
List<Extension> settingTabExtensions = new ArrayList<>();
List<Extension> unlistedExtensions = new ArrayList<>();
for (ResolvedDependency runtimeExt : curateOutcomeBuildItem.getApplicationModel()
.getDependencies(DependencyFlags.RUNTIME_EXTENSION_ARTIFACT)) {
runtimeExt.getContentTree().accept(BootstrapConstants.EXTENSION_METADATA_PATH, extYamlVisit -> {
if (extYamlVisit == null) {
// this could be an exception but previously the code was simply looking for this resource on the classpath
log.error("Failed to locate " + BootstrapConstants.EXTENSION_METADATA_PATH + " in "
+ runtimeExt.toCompactCoords());
return;
}
final Path extYaml = extYamlVisit.getPath();
try {
Extension extension = new Extension();
final String extensionYaml;
try (Scanner scanner = new Scanner(Files.newBufferedReader(extYaml, StandardCharsets.UTF_8))) {
scanner.useDelimiter("\\A");
extensionYaml = scanner.hasNext() ? scanner.next() : null;
}
if (extensionYaml == null) {
// This is a internal extension (like this one, Dev UI)
return;
}
final Map<String, Object> extensionMap = yaml.load(extensionYaml);
if (extensionMap.containsKey(NAME)) {
Map<String, Object> metaData = (Map<String, Object>) extensionMap.getOrDefault(METADATA, null);
if (metaData != null) {
boolean isHidden = Boolean.valueOf(String.valueOf(metaData.getOrDefault(HIDE, false)));
if (!isHidden) {
String namespace = getExtensionNamespace(extensionMap);
extension.setNamespace(namespace);
extension.setName((String) extensionMap.get(NAME));
extension.setDescription((String) extensionMap.getOrDefault(DESCRIPTION, null));
extension.setArtifact((String) extensionMap.getOrDefault(ARTIFACT, null));
extension.setKeywords((List<String>) metaData.getOrDefault(KEYWORDS, null));
extension.setShortName((String) metaData.getOrDefault(SHORT_NAME, null));
if (metaData.containsKey(GUIDE)) {
String guide = (String) metaData.get(GUIDE);
try {
extension.setGuide(new URL(guide));
} catch (MalformedURLException mue) {
log.warn("Could not set Guide URL [" + guide + "] for exception [" + namespace + "]");
}
}
extension.setCategories((List<String>) metaData.getOrDefault(CATEGORIES, null));
extension.setStatus(collectionToString(metaData, STATUS));
extension.setBuiltWith((String) metaData.getOrDefault(BUILT_WITH, null));
extension.setConfigFilter((List<String>) metaData.getOrDefault(CONFIG, null));
extension.setExtensionDependencies(
(List<String>) metaData.getOrDefault(EXTENSION_DEPENDENCIES, null));
extension.setUnlisted(String.valueOf(metaData.getOrDefault(UNLISTED, false)));
if (metaData.containsKey(CAPABILITIES)) {
Map<String, Object> cap = (Map<String, Object>) metaData.get(CAPABILITIES);
extension.setProvidesCapabilities((List<String>) cap.getOrDefault(PROVIDES, null));
}
if (metaData.containsKey(CODESTART)) {
Map<String, Object> codestartMap = (Map<String, Object>) metaData.get(CODESTART);
if (codestartMap != null) {
Codestart codestart = new Codestart();
codestart.setName((String) codestartMap.getOrDefault(NAME, null));
codestart.setLanguages(listOrString(codestartMap, LANGUAGES));
codestart.setArtifact((String) codestartMap.getOrDefault(ARTIFACT, null));
extension.setCodestart(codestart);
}
}
if (cardPagesMap.containsKey(namespace) && cardPagesMap.get(namespace).hasPages()) { // Active
CardPageBuildItem cardPageBuildItem = cardPagesMap.get(namespace);
// Add all card links
List<PageBuilder> cardPageBuilders = cardPageBuildItem.getPages();
Map<String, BuildTimeData> buildTimeData = cardPageBuildItem.getBuildTimeData();
for (PageBuilder pageBuilder : cardPageBuilders) {
Page page = buildFinalPage(pageBuilder, extension, buildTimeData);
if (!page.isAssistantPage() || assistantIsAvailable) {
extension.addCardPage(page);
}
}
// See if there is a custom card component
cardPageBuildItem.getOptionalCard().ifPresent((card) -> {
card.setNamespace(extension.getNamespace());
extension.setCard(card);
});
// See if there is a headless component
String headlessJs = cardPageBuildItem.getHeadlessComponentLink();
if (headlessJs != null) {
extension.setHeadlessComponent(headlessJs);
}
addLogo(extension, cardPageBuildItem, metaData);
addLibraryLinks(extension, cardPageBuildItem, curateOutcomeBuildItem, metaData);
// Also make sure the static resources for that static resource is available
produceResources(runtimeExt, webJarBuildProducer, devUIWebJarProducer);
activeExtensions.add(extension);
} else { // Inactive
if (addLogo(extension, cardPagesMap.get(namespace), metaData)) {
// Also make sure the static resources for that static resource is available
produceResources(runtimeExt, webJarBuildProducer, devUIWebJarProducer);
}
addLibraryLinks(extension, cardPagesMap.get(namespace), curateOutcomeBuildItem,
metaData);
inactiveExtensions.add(extension);
}
// Menus on the sections menu
if (menuPagesMap.containsKey(namespace)) {
MenuPageBuildItem menuPageBuildItem = menuPagesMap.get(namespace);
List<PageBuilder> menuPageBuilders = menuPageBuildItem.getPages();
Map<String, BuildTimeData> buildTimeData = menuPageBuildItem.getBuildTimeData();
for (PageBuilder pageBuilder : menuPageBuilders) {
Page page = buildFinalPage(pageBuilder, extension, buildTimeData);
if (!page.isAssistantPage() || assistantIsAvailable) {
extension.addMenuPage(page);
}
}
// See if there is a headless component
String headlessJs = menuPageBuildItem.getHeadlessComponentLink();
if (headlessJs != null) {
extension.setHeadlessComponent(headlessJs);
}
// Also make sure the static resources for that static resource is available
produceResources(runtimeExt, webJarBuildProducer, devUIWebJarProducer);
sectionMenuExtensions.add(extension);
}
// Tabs in the footer
if (footerPagesMap.containsKey(namespace)) {
List<FooterPageBuildItem> fbis = footerPagesMap.remove(namespace);
for (FooterPageBuildItem footerPageBuildItem : fbis) {
List<PageBuilder> footerPageBuilders = footerPageBuildItem.getPages();
Map<String, BuildTimeData> buildTimeData = footerPageBuildItem.getBuildTimeData();
for (PageBuilder pageBuilder : footerPageBuilders) {
Page page = buildFinalPage(pageBuilder, extension, buildTimeData);
if (!page.isAssistantPage() || assistantIsAvailable) {
extension.addFooterPage(page);
}
}
// See if there is a headless component
String headlessJs = footerPageBuildItem.getHeadlessComponentLink();
if (headlessJs != null) {
extension.setHeadlessComponent(headlessJs);
}
// Also make sure the static resources for that static resource is available
produceResources(runtimeExt, webJarBuildProducer, devUIWebJarProducer);
footerTabExtensions.add(extension);
}
}
// Tabs in the settings page
if (settingPagesMap.containsKey(namespace)) {
List<SettingPageBuildItem> sbis = settingPagesMap.remove(namespace);
for (SettingPageBuildItem settingPageBuildItem : sbis) {
List<PageBuilder> settingPageBuilders = settingPageBuildItem.getPages();
Map<String, BuildTimeData> buildTimeData = settingPageBuildItem.getBuildTimeData();
for (PageBuilder pageBuilder : settingPageBuilders) {
Page page = buildFinalPage(pageBuilder, extension, buildTimeData);
if (!page.isAssistantPage() || assistantIsAvailable) {
extension.addSettingPage(page);
}
}
// See if there is a headless component
String headlessJs = settingPageBuildItem.getHeadlessComponentLink();
if (headlessJs != null) {
extension.setHeadlessComponent(headlessJs);
}
// Also make sure the static resources for that static resource is available
produceResources(runtimeExt, webJarBuildProducer, devUIWebJarProducer);
settingTabExtensions.add(extension);
}
}
// Unlisted pages
if (unlistedPagesMap.containsKey(namespace)) {
List<UnlistedPageBuildItem> ubis = unlistedPagesMap.remove(namespace);
for (UnlistedPageBuildItem unlistedPageBuildItem : ubis) {
List<PageBuilder> unlistedPageBuilders = unlistedPageBuildItem.getPages();
Map<String, BuildTimeData> buildTimeData = unlistedPageBuildItem.getBuildTimeData();
for (PageBuilder pageBuilder : unlistedPageBuilders) {
Page page = buildFinalPage(pageBuilder, extension, buildTimeData);
if (!page.isAssistantPage() || assistantIsAvailable) {
extension.addUnlistedPage(page);
}
}
// See if there is a headless component
String headlessJs = unlistedPageBuildItem.getHeadlessComponentLink();
if (headlessJs != null) {
extension.setHeadlessComponent(headlessJs);
}
// Also make sure the static resources for that static resource is available
produceResources(runtimeExt, webJarBuildProducer, devUIWebJarProducer);
unlistedExtensions.add(extension);
}
}
}
}
Collections.sort(activeExtensions, sortingComparator);
Collections.sort(inactiveExtensions, sortingComparator);
}
} catch (IOException | RuntimeException e) {
// don't abort, just log, to prevent a single extension from breaking entire dev ui
log.error("Failed to process extension descriptor " + extYaml.toUri(), e);
}
});
}
// Also add footers for extensions that might not have a runtime
if (!footerPagesMap.isEmpty()) {
for (Map.Entry<String, List<FooterPageBuildItem>> footer : footerPagesMap.entrySet()) {
List<FooterPageBuildItem> fbis = footer.getValue();
for (FooterPageBuildItem footerPageBuildItem : fbis) {
Extension deploymentOnlyExtension = new Extension();
deploymentOnlyExtension.setName(footer.getKey());
deploymentOnlyExtension.setNamespace(FOOTER_LOG_NAMESPACE);
List<PageBuilder> footerPageBuilders = footerPageBuildItem.getPages();
for (PageBuilder pageBuilder : footerPageBuilders) {
pageBuilder.namespace(deploymentOnlyExtension.getNamespace());
pageBuilder.extension(deploymentOnlyExtension.getName());
Page page = pageBuilder.build();
deploymentOnlyExtension.addFooterPage(page);
}
footerTabExtensions.add(deploymentOnlyExtension);
}
}
}
// Also add setting for extensions that might not have a runtime
if (!settingPagesMap.isEmpty()) {
for (Map.Entry<String, List<SettingPageBuildItem>> setting : settingPagesMap.entrySet()) {
List<SettingPageBuildItem> sbis = setting.getValue();
for (SettingPageBuildItem settingPageBuildItem : sbis) {
Extension deploymentOnlyExtension = new Extension();
deploymentOnlyExtension.setName(setting.getKey());
List<PageBuilder> settingPageBuilders = settingPageBuildItem.getPages();
for (PageBuilder pageBuilder : settingPageBuilders) {
pageBuilder.namespace(deploymentOnlyExtension.getNamespace());
pageBuilder.extension(deploymentOnlyExtension.getName());
Page page = pageBuilder.build();
deploymentOnlyExtension.addSettingPage(page);
}
settingTabExtensions.add(deploymentOnlyExtension);
}
}
}
// Also add unlisting pages for extensions that might not have a runtime
if (!unlistedPagesMap.isEmpty()) {
for (Map.Entry<String, List<UnlistedPageBuildItem>> setting : unlistedPagesMap.entrySet()) {
List<UnlistedPageBuildItem> ubis = setting.getValue();
for (UnlistedPageBuildItem unlistedPageBuildItem : ubis) {
Extension deploymentOnlyExtension = new Extension();
deploymentOnlyExtension.setName(setting.getKey());
List<PageBuilder> unlistedPageBuilders = unlistedPageBuildItem.getPages();
for (PageBuilder pageBuilder : unlistedPageBuilders) {
pageBuilder.namespace(deploymentOnlyExtension.getNamespace());
pageBuilder.extension(deploymentOnlyExtension.getName());
Page page = pageBuilder.build();
deploymentOnlyExtension.addUnlistedPage(page);
}
unlistedExtensions.add(deploymentOnlyExtension);
}
}
}
extensionsProducer.produce(
new ExtensionsBuildItem(activeExtensions, inactiveExtensions, sectionMenuExtensions, footerTabExtensions,
settingTabExtensions, unlistedExtensions));
}
private void addLibraryLinks(Extension extension, CardPageBuildItem cardPageBuildItem,
CurateOutcomeBuildItem curateOutcomeBuildItem, Map<String, Object> metaData) {
if (cardPageBuildItem != null && !cardPageBuildItem.hasLibraryVersions() && !metaData.containsKey(LIB_GA)) {
return;
}
// Build a lookup map once
Map<String, String> versionMap = curateOutcomeBuildItem.getApplicationModel().getDependencies().stream()
.collect(Collectors.toMap(
rd -> rd.getGroupId() + ":" + rd.getArtifactId(),
ResolvedDependency::getVersion,
(existing, replacement) -> existing // keep the first one
));
if (cardPageBuildItem != null && cardPageBuildItem.hasLibraryVersions()) {
for (LibraryLink lib : cardPageBuildItem.getLibraryVersions()) {
String key = lib.getGroupId() + ":" + lib.getArtifactId();
String version = versionMap.get(key);
if (version != null) {
lib.setVersion(version);
extension.addLibraryLink(lib);
}
}
}
if (metaData.containsKey(LIB_GA) && !extension.hasLibraryLinks()) {
String libGA = (String) metaData.get(LIB_GA);
Matcher matcher = libGAPattern.matcher(libGA);
if (matcher.matches()) {
String groupId = matcher.group(1);
String artifactId = matcher.group(2);
URL url = null;
if (matcher.group(4) != null) {
try {
url = URI.create(matcher.group(4)).toURL();
} catch (MalformedURLException ex) {
ex.printStackTrace();
}
}
String version = versionMap.get(groupId + ":" + artifactId);
if (version != null) {
LibraryLink l = new LibraryLink(groupId, artifactId, artifactId, url);
l.setVersion(version);
extension.addLibraryLink(l);
}
}
}
}
private boolean addLogo(Extension extension, CardPageBuildItem cardPageBuildItem, Map<String, Object> metaData) {
if (cardPageBuildItem != null && cardPageBuildItem.hasLogo()) {
extension.setLogo(cardPageBuildItem.getDarkLogo(), cardPageBuildItem.getLightLogo());
return true;
} else if (metaData.containsKey(ICON_URL)) {
String iconUrl = (String) metaData.get(ICON_URL);
extension.setLogo(iconUrl, iconUrl);
return true;
}
return false;
}
private String collectionToString(Map<String, Object> metaData, String key) {
Object value = metaData.getOrDefault(key, null);
if (value == null) {
return null;
} else if (String.class.isAssignableFrom(value.getClass())) {
return (String) value;
} else if (List.class.isAssignableFrom(value.getClass())) {
List values = (List) value;
return (String) values.stream()
.map(n -> String.valueOf(n))
.collect(Collectors.joining(", "));
}
return String.valueOf(value);
}
private List<String> listOrString(Map<String, Object> metaData, String key) {
Object value = metaData.getOrDefault(key, null);
if (value == null) {
return null;
} else if (String.class.isAssignableFrom(value.getClass())) {
return List.of((String) value);
} else if (List.class.isAssignableFrom(value.getClass())) {
return (List) value;
}
return List.of(String.valueOf(value));
}
private void produceResources(ResolvedDependency runtimeExt,
BuildProducer<WebJarBuildItem> webJarBuildProducer,
BuildProducer<DevUIWebJarBuildItem> devUIWebJarProducer) {
String namespace = getNamespace(runtimeExt.getKey());
if (namespace.isEmpty()) {
namespace = "devui";
}
String buildTimeDataImport = namespace + "-data";
final GACT deploymentKey = getDeploymentKey(runtimeExt);
webJarBuildProducer.produce(WebJarBuildItem.builder()
.artifactKey(deploymentKey)
.root(DEVUI + SLASH)
.filter(new WebJarResourcesFilter() {
@Override
public WebJarResourcesFilter.FilterResult apply(String fileName, InputStream file) throws IOException {
if (fileName.endsWith(".js")) {
String content = new String(file.readAllBytes(), StandardCharsets.UTF_8);
content = content.replaceAll("build-time-data", buildTimeDataImport);
return new WebJarResourcesFilter.FilterResult(
new ByteArrayInputStream(content.getBytes(StandardCharsets.UTF_8)), true);
}
return new WebJarResourcesFilter.FilterResult(file, false);
}
})
.build());
devUIWebJarProducer.produce(new DevUIWebJarBuildItem(deploymentKey, DEVUI));
}
private static GACT getDeploymentKey(ResolvedDependency runtimeExt) {
return runtimeExt.getContentTree().apply(BootstrapConstants.DESCRIPTOR_PATH, extPropsVisit -> {
if (extPropsVisit == null) {
throw new RuntimeException("Failed to locate " + BootstrapConstants.DESCRIPTOR_PATH
+ " in " + runtimeExt.toCompactCoords());
}
final Properties props = new Properties();
try (BufferedReader reader = Files.newBufferedReader(extPropsVisit.getPath())) {
props.load(reader);
} catch (IOException e) {
throw new RuntimeException("Failed to read " + extPropsVisit.getUrl(), e);
}
final String deploymentCoords = props.getProperty(BootstrapConstants.PROP_DEPLOYMENT_ARTIFACT);
if (deploymentCoords == null) {
throw new RuntimeException(
"Failed to locate " + BootstrapConstants.PROP_DEPLOYMENT_ARTIFACT + " in " + extPropsVisit.getUrl());
}
var coords = GACTV.fromString(deploymentCoords);
return new GACT(coords.getGroupId(), coords.getArtifactId(), coords.getClassifier(), coords.getType());
});
}
@BuildStep(onlyIf = IsLocalDevelopment.class)
void createAllRoutes(WebJarResultsBuildItem webJarResultsBuildItem,
LaunchModeBuildItem launchModeBuildItem,
List<DevUIWebJarBuildItem> devUIWebJarBuiltItems,
BuildProducer<DevUIRoutesBuildItem> devUIRoutesProducer) {
if (launchModeBuildItem.isNotLocalDevModeType()) {
return;
}
for (DevUIWebJarBuildItem devUIWebJarBuiltItem : devUIWebJarBuiltItems) {
WebJarResultsBuildItem.WebJarResult result = webJarResultsBuildItem
.byArtifactKey(devUIWebJarBuiltItem.getArtifactKey());
if (result != null) {
String namespace = getNamespace(devUIWebJarBuiltItem.getArtifactKey());
devUIRoutesProducer.produce(new DevUIRoutesBuildItem(namespace, devUIWebJarBuiltItem.getPath(),
result.getFinalDestination(), result.getWebRootConfigurations()));
}
}
}
private String getNamespace(ArtifactKey artifactKey) {
String namespace = artifactKey.getArtifactId();
if (namespace.equals("quarkus-devui-resources")) {
// Internal
namespace = "";
} else if (namespace.endsWith("-deployment")) {
int end = namespace.lastIndexOf("-");
namespace = namespace.substring(0, end);
}
return namespace;
}
private Page buildFinalPage(PageBuilder pageBuilder, Extension extension, Map<String, BuildTimeData> buildTimeData) {
pageBuilder.namespace(extension.getNamespace());
pageBuilder.extension(extension.getName());
// TODO: Have a nice factory way to load this...
// Some preprocessing for certain builds
if (pageBuilder.getClass().equals(QuteDataPageBuilder.class)) {
return buildQutePage(pageBuilder, buildTimeData);
}
return pageBuilder.build();
}
private Page buildQutePage(PageBuilder pageBuilder, Map<String, BuildTimeData> buildTimeData) {
try {
QuteDataPageBuilder quteDataPageBuilder = (QuteDataPageBuilder) pageBuilder;
String templatePath = quteDataPageBuilder.getTemplatePath();
ClassPathUtils.consumeAsPaths(templatePath, p -> {
try {
String template = Files.readString(p);
Map<String, Object> contentMap = buildTimeData.entrySet().stream()
.collect(Collectors.toMap(
Map.Entry::getKey,
e -> e.getValue().getContent()));
String fragment = Qute.fmt(template, contentMap);
pageBuilder.metadata("htmlFragment", fragment);
} catch (IOException ex) {
throw new UncheckedIOException(ex);
}
});
} catch (IOException ex) {
throw new UncheckedIOException(ex);
}
return pageBuilder.build();
}
private Class toClass(Type type) {
if (type.kind().equals(Type.Kind.PRIMITIVE)) {
return JandexReflection.loadRawType(type);
} else if (type.kind().equals(Type.Kind.VOID)) {
throw new RuntimeException("Void method return detected, JsonRPC Method needs to return something.");
} else {
try {
return tccl.loadClass(type.name().toString());
} catch (ClassNotFoundException ex) {
throw new RuntimeException(ex);
}
}
}
private Map<String, CardPageBuildItem> getCardPagesMap(CurateOutcomeBuildItem curateOutcomeBuildItem,
List<CardPageBuildItem> pages) {
Map<String, CardPageBuildItem> m = new HashMap<>();
for (CardPageBuildItem pageBuildItem : pages) {
String name = pageBuildItem.getExtensionPathName(curateOutcomeBuildItem);
m.put(name, pageBuildItem);
}
return m;
}
private Map<String, MenuPageBuildItem> getMenuPagesMap(CurateOutcomeBuildItem curateOutcomeBuildItem,
List<MenuPageBuildItem> pages) {
Map<String, MenuPageBuildItem> m = new HashMap<>();
for (MenuPageBuildItem pageBuildItem : pages) {
m.put(pageBuildItem.getExtensionPathName(curateOutcomeBuildItem), pageBuildItem);
}
return m;
}
private Map<String, List<FooterPageBuildItem>> getFooterPagesMap(CurateOutcomeBuildItem curateOutcomeBuildItem,
List<FooterPageBuildItem> pages) {
Map<String, List<FooterPageBuildItem>> m = new HashMap<>();
for (FooterPageBuildItem pageBuildItem : pages) {
String key = pageBuildItem.getExtensionPathName(curateOutcomeBuildItem);
if (m.containsKey(key)) {
m.get(key).add(pageBuildItem);
} else {
List<FooterPageBuildItem> fbi = new ArrayList<>();
fbi.add(pageBuildItem);
m.put(key, fbi);
}
}
return m;
}
private Map<String, List<SettingPageBuildItem>> getSettingPagesMap(CurateOutcomeBuildItem curateOutcomeBuildItem,
List<SettingPageBuildItem> pages) {
Map<String, List<SettingPageBuildItem>> m = new HashMap<>();
for (SettingPageBuildItem pageBuildItem : pages) {
String key = pageBuildItem.getExtensionPathName(curateOutcomeBuildItem);
if (m.containsKey(key)) {
m.get(key).add(pageBuildItem);
} else {
List<SettingPageBuildItem> sbi = new ArrayList<>();
sbi.add(pageBuildItem);
m.put(key, sbi);
}
}
return m;
}
private Map<String, List<UnlistedPageBuildItem>> getUnlistedPagesMap(CurateOutcomeBuildItem curateOutcomeBuildItem,
List<UnlistedPageBuildItem> pages) {
Map<String, List<UnlistedPageBuildItem>> m = new HashMap<>();
for (UnlistedPageBuildItem pageBuildItem : pages) {
String key = pageBuildItem.getExtensionPathName(curateOutcomeBuildItem);
if (m.containsKey(key)) {
m.get(key).add(pageBuildItem);
} else {
List<UnlistedPageBuildItem> ubi = new ArrayList<>();
ubi.add(pageBuildItem);
m.put(key, ubi);
}
}
return m;
}
private String getExtensionNamespace(Map<String, Object> extensionMap) {
final String artifactId;
final String artifact = (String) extensionMap.get("artifact");
if (artifact == null) {
// trying quarkus 1.x format
artifactId = (String) extensionMap.get("artifact-id");
if (artifactId == null) {
throw new RuntimeException(
"Failed to locate 'artifact' or 'group-id' and 'artifact-id' among metadata keys "
+ extensionMap.keySet());
}
} else {
final GACTV coords = GACTV.fromString(artifact);
artifactId = coords.getArtifactId();
}
return artifactId;
}
// Sort extensions with Guide first and then alphabetical
private final Comparator sortingComparator = new Comparator<Extension>() {
@Override
public int compare(Extension t, Extension t1) {
if (t.getGuide() != null && t1.getGuide() != null) {
return t.getName().compareTo(t1.getName());
} else if (t.getGuide() == null) {
return 1;
} else {
return -1;
}
}
};
static record LanguageCountry(
String code,
String name) {
}
}
| DevUIProcessor |
java | apache__flink | flink-python/src/main/java/org/apache/flink/streaming/api/operators/python/embedded/EmbeddedPythonKeyedProcessOperator.java | {
"start": 2657,
"end": 6396
} | class ____<K, IN, OUT>
extends AbstractOneInputEmbeddedPythonFunctionOperator<IN, OUT>
implements Triggerable<K, VoidNamespace> {
private static final long serialVersionUID = 1L;
/** The TypeInformation of the key. */
private transient TypeInformation<K> keyTypeInfo;
private transient ContextImpl context;
private transient OnTimerContextImpl onTimerContext;
private transient PythonTypeUtils.DataConverter<K, Object> keyConverter;
public EmbeddedPythonKeyedProcessOperator(
Configuration config,
DataStreamPythonFunctionInfo pythonFunctionInfo,
TypeInformation<IN> inputTypeInfo,
TypeInformation<OUT> outputTypeInfo) {
super(config, pythonFunctionInfo, inputTypeInfo, outputTypeInfo);
}
@Override
public void open() throws Exception {
keyTypeInfo = ((RowTypeInfo) this.getInputTypeInfo()).getTypeAt(0);
keyConverter = PythonTypeUtils.TypeInfoToDataConverter.typeInfoDataConverter(keyTypeInfo);
InternalTimerService<VoidNamespace> internalTimerService =
getInternalTimerService("user-timers", VoidNamespaceSerializer.INSTANCE, this);
TimerService timerService = new SimpleTimerService(internalTimerService);
context = new ContextImpl(timerService);
onTimerContext = new OnTimerContextImpl(timerService);
super.open();
}
@Override
public List<FlinkFnApi.UserDefinedDataStreamFunction> createUserDefinedFunctionsProto() {
return ProtoUtils.createUserDefinedDataStreamStatefulFunctionProtos(
getPythonFunctionInfo(),
getRuntimeContext(),
getJobParameters(),
keyTypeInfo,
inBatchExecutionMode(getKeyedStateBackend()),
config.get(PYTHON_METRIC_ENABLED),
config.get(PYTHON_PROFILE_ENABLED),
hasSideOutput,
config.get(STATE_CACHE_SIZE),
config.get(MAP_STATE_READ_CACHE_SIZE),
config.get(MAP_STATE_WRITE_CACHE_SIZE));
}
@Override
public void onEventTime(InternalTimer<K, VoidNamespace> timer) throws Exception {
collector.setAbsoluteTimestamp(timer.getTimestamp());
invokeUserFunction(TimeDomain.EVENT_TIME, timer);
}
@Override
public void onProcessingTime(InternalTimer<K, VoidNamespace> timer) throws Exception {
collector.eraseTimestamp();
invokeUserFunction(TimeDomain.PROCESSING_TIME, timer);
}
@Override
public Object getFunctionContext() {
return context;
}
@Override
public Object getTimerContext() {
return onTimerContext;
}
@Override
public <T> AbstractEmbeddedDataStreamPythonFunctionOperator<T> copy(
DataStreamPythonFunctionInfo pythonFunctionInfo, TypeInformation<T> outputTypeInfo) {
return new EmbeddedPythonKeyedProcessOperator<>(
config, pythonFunctionInfo, getInputTypeInfo(), outputTypeInfo);
}
private void invokeUserFunction(TimeDomain timeDomain, InternalTimer<K, VoidNamespace> timer)
throws Exception {
onTimerContext.timeDomain = timeDomain;
onTimerContext.timer = timer;
PyIterator results =
(PyIterator)
interpreter.invokeMethod("operation", "on_timer", timer.getTimestamp());
while (results.hasNext()) {
OUT result = outputDataConverter.toInternal(results.next());
collector.collect(result);
}
results.close();
onTimerContext.timeDomain = null;
onTimerContext.timer = null;
}
private | EmbeddedPythonKeyedProcessOperator |
java | qos-ch__slf4j | slf4j-migrator/src/main/java/org/slf4j/migrator/RuleSetFactory.java | {
"start": 1461,
"end": 1620
} | class ____ Pattern matching with java.util.regex using Patterns defined
* in concrete implementations
*
* @author jean-noelcharpin
*
*/
public abstract | runs |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/formatstring/LenientFormatStringUtils.java | {
"start": 1149,
"end": 3308
} | class ____ {
/**
* Returns the index of the lenient format string parameter in {@code tree}, or {@code -1} if
* there is none.
*/
public static int getLenientFormatStringPosition(ExpressionTree tree, VisitorState state) {
for (LenientFormatMethod method : LENIENT_FORMATTING_METHODS) {
if (method.matcher().matches(tree, state)) {
if (tree instanceof MethodInvocationTree methodInvocation
&& method.formatStringPosition < methodInvocation.getArguments().size()) {
// e.g. Preconditions.checkNotNull(String) isn't a format method
return method.formatStringPosition;
}
}
}
return -1;
}
private static final ImmutableList<LenientFormatMethod> LENIENT_FORMATTING_METHODS =
ImmutableList.of(
new LenientFormatMethod(
staticMethod()
.onClass("com.google.common.base.Preconditions")
.withNameMatching(compile("^check(?!ElementIndex|PositionIndex).*")),
1),
new LenientFormatMethod(
staticMethod()
.onClass("com.google.common.base.Verify")
.withNameMatching(compile("^verify.*")),
1),
new LenientFormatMethod(
staticMethod().onClass("com.google.common.base.Strings").named("lenientFormat"), 0),
new LenientFormatMethod(
staticMethod().onClass("com.google.common.truth.Truth").named("assertWithMessage"),
0),
new LenientFormatMethod(
instanceMethod().onDescendantOf("com.google.common.truth.Subject").named("check"), 0),
new LenientFormatMethod(
instanceMethod()
.onDescendantOf("com.google.common.truth.StandardSubjectBuilder")
.named("withMessage"),
0));
/**
* @param formatStringPosition position of the format string; we assume every argument afterwards
* is a format argument.
*/
private record LenientFormatMethod(Matcher<ExpressionTree> matcher, int formatStringPosition) {}
private LenientFormatStringUtils() {}
}
| LenientFormatStringUtils |
java | elastic__elasticsearch | test/framework/src/test/java/org/elasticsearch/test/compiler/InMemoryJavaCompilerTests.java | {
"start": 2169,
"end": 2940
} | class ____ implements java.util.function.Supplier<String> {
@Override public String get() {
return "Hello World!";
}
}
""");
var result = compile(sources);
assertThat(result, notNullValue());
assertThat(result, allOf(hasEntry(is("module-info"), notNullValue()), hasEntry(is("p.Foo"), notNullValue())));
}
public void testCompileModuleProvider() {
Map<String, CharSequence> sources = Map.of("module-info", """
module x.foo.impl {
exports p;
opens q;
provides java.util.function.IntSupplier with p.FooIntSupplier;
}
""", "p.FooIntSupplier", """
package p;
public | Foo |
java | apache__flink | flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/StreamingJobGraphGeneratorWithOperatorAttributesTest.java | {
"start": 2434,
"end": 18644
} | class ____ {
@Test
void testOutputOnlyAfterEndOfStreamEnableChain() {
final StreamExecutionEnvironment env =
StreamExecutionEnvironment.getExecutionEnvironment(new Configuration());
final DataStream<Integer> source = env.fromData(1, 2, 3).name("source");
source.keyBy(x -> x)
.transform(
"transform",
Types.INT,
new StreamOperatorWithConfigurableOperatorAttributes<>(
x -> x,
new OperatorAttributesBuilder()
.setOutputOnlyAfterEndOfStream(true)
.build()))
.map(x -> x)
.sinkTo(new DiscardingSink<>())
.disableChaining()
.name("sink");
final StreamGraph streamGraph = env.getStreamGraph(false);
Map<String, StreamNode> nodeMap = new HashMap<>();
for (StreamNode node : streamGraph.getStreamNodes()) {
nodeMap.put(node.getOperatorName(), node);
}
assertThat(nodeMap).hasSize(4);
assertThat(nodeMap.get("Source: source").isOutputOnlyAfterEndOfStream()).isFalse();
assertThat(nodeMap.get("transform").isOutputOnlyAfterEndOfStream()).isTrue();
assertThat(nodeMap.get("Map").isOutputOnlyAfterEndOfStream()).isFalse();
assertThat(nodeMap.get("sink: Writer").isOutputOnlyAfterEndOfStream()).isFalse();
assertManagedMemoryWeightsSize(nodeMap.get("Source: source"), 0);
assertManagedMemoryWeightsSize(nodeMap.get("transform"), 1);
assertManagedMemoryWeightsSize(nodeMap.get("Map"), 0);
assertManagedMemoryWeightsSize(nodeMap.get("sink: Writer"), 0);
JobGraph jobGraph = StreamingJobGraphGenerator.createJobGraph(streamGraph);
Map<String, JobVertex> vertexMap = new HashMap<>();
for (JobVertex vertex : jobGraph.getVertices()) {
vertexMap.put(vertex.getName(), vertex);
}
assertThat(vertexMap).hasSize(3);
assertHasOutputPartitionType(
vertexMap.get("Source: source"), ResultPartitionType.PIPELINED_BOUNDED);
assertHasOutputPartitionType(
vertexMap.get("transform -> Map"), ResultPartitionType.BLOCKING);
assertThat(vertexMap.get("Source: source").isAnyOutputBlocking()).isFalse();
assertThat(vertexMap.get("transform -> Map").isAnyOutputBlocking()).isTrue();
assertThat(vertexMap.get("sink: Writer").isAnyOutputBlocking()).isFalse();
}
@Test
void testOutputOnlyAfterEndOfStreamDisableChain() {
final StreamExecutionEnvironment env =
StreamExecutionEnvironment.getExecutionEnvironment(new Configuration());
final DataStream<Integer> source = env.fromData(1, 2, 3).name("source");
source.keyBy(x -> x)
.transform(
"transform",
Types.INT,
new StreamOperatorWithConfigurableOperatorAttributes<>(
x -> x,
new OperatorAttributesBuilder()
.setOutputOnlyAfterEndOfStream(true)
.build()))
.map(x -> x)
.sinkTo(new DiscardingSink<>())
.disableChaining()
.name("sink");
final StreamGraph streamGraph = env.getStreamGraph(false);
Map<String, StreamNode> nodeMap = new HashMap<>();
for (StreamNode node : streamGraph.getStreamNodes()) {
nodeMap.put(node.getOperatorName(), node);
}
assertThat(nodeMap).hasSize(4);
assertThat(nodeMap.get("Source: source").isOutputOnlyAfterEndOfStream()).isFalse();
assertThat(nodeMap.get("transform").isOutputOnlyAfterEndOfStream()).isTrue();
assertThat(nodeMap.get("Map").isOutputOnlyAfterEndOfStream()).isFalse();
assertThat(nodeMap.get("sink: Writer").isOutputOnlyAfterEndOfStream()).isFalse();
assertManagedMemoryWeightsSize(nodeMap.get("Source: source"), 0);
assertManagedMemoryWeightsSize(nodeMap.get("transform"), 1);
assertManagedMemoryWeightsSize(nodeMap.get("Map"), 0);
assertManagedMemoryWeightsSize(nodeMap.get("sink: Writer"), 0);
env.disableOperatorChaining();
JobGraph jobGraph = StreamingJobGraphGenerator.createJobGraph(env.getStreamGraph(false));
Map<String, JobVertex> vertexMap = new HashMap<>();
for (JobVertex vertex : jobGraph.getVertices()) {
vertexMap.put(vertex.getName(), vertex);
}
assertThat(vertexMap).hasSize(4);
assertHasOutputPartitionType(
vertexMap.get("Source: source"), ResultPartitionType.PIPELINED_BOUNDED);
assertHasOutputPartitionType(vertexMap.get("transform"), ResultPartitionType.BLOCKING);
assertHasOutputPartitionType(vertexMap.get("Map"), ResultPartitionType.PIPELINED_BOUNDED);
assertThat(vertexMap.get("Source: source").isAnyOutputBlocking()).isFalse();
assertThat(vertexMap.get("transform").isAnyOutputBlocking()).isTrue();
assertThat(vertexMap.get("Map").isAnyOutputBlocking()).isFalse();
assertThat(vertexMap.get("sink: Writer").isAnyOutputBlocking()).isFalse();
}
@Test
void testOutputOnlyAfterEndOfStreamPropagateToUpstreamWithinChain() {
final StreamExecutionEnvironment env =
StreamExecutionEnvironment.getExecutionEnvironment(new Configuration());
final DataStream<Integer> source = env.fromData(1, 2, 3).name("source");
source.keyBy(x -> x)
.map(x -> x)
.transform(
"transform",
Types.INT,
new StreamOperatorWithConfigurableOperatorAttributes<>(
x -> x,
new OperatorAttributesBuilder()
.setOutputOnlyAfterEndOfStream(true)
.build()))
.sinkTo(new DiscardingSink<>())
.disableChaining()
.name("sink");
final StreamGraph streamGraph = env.getStreamGraph(false);
Map<String, StreamNode> nodeMap = new HashMap<>();
for (StreamNode node : streamGraph.getStreamNodes()) {
nodeMap.put(node.getOperatorName(), node);
}
assertThat(nodeMap).hasSize(4);
assertThat(nodeMap.get("Source: source").isOutputOnlyAfterEndOfStream()).isFalse();
assertThat(nodeMap.get("transform").isOutputOnlyAfterEndOfStream()).isTrue();
assertThat(nodeMap.get("Map").isOutputOnlyAfterEndOfStream()).isFalse();
assertThat(nodeMap.get("sink: Writer").isOutputOnlyAfterEndOfStream()).isFalse();
assertManagedMemoryWeightsSize(nodeMap.get("Source: source"), 0);
assertManagedMemoryWeightsSize(nodeMap.get("Map"), 0);
assertManagedMemoryWeightsSize(nodeMap.get("transform"), 0);
assertManagedMemoryWeightsSize(nodeMap.get("sink: Writer"), 0);
JobGraph jobGraph = StreamingJobGraphGenerator.createJobGraph(streamGraph);
Map<String, JobVertex> vertexMap = new HashMap<>();
for (JobVertex vertex : jobGraph.getVertices()) {
vertexMap.put(vertex.getName(), vertex);
}
assertThat(vertexMap).hasSize(3);
assertHasOutputPartitionType(
vertexMap.get("Source: source"), ResultPartitionType.PIPELINED_BOUNDED);
assertHasOutputPartitionType(
vertexMap.get("Map -> transform"), ResultPartitionType.BLOCKING);
assertThat(vertexMap.get("Source: source").isAnyOutputBlocking()).isFalse();
assertThat(vertexMap.get("Map -> transform").isAnyOutputBlocking()).isTrue();
assertThat(vertexMap.get("sink: Writer").isAnyOutputBlocking()).isFalse();
}
@Test
void testApplyBatchExecutionSettingsOnTwoInputOperator() {
final StreamExecutionEnvironment env =
StreamExecutionEnvironment.getExecutionEnvironment(new Configuration());
final DataStream<Integer> source1 = env.fromData(1, 2, 3).name("source1");
final DataStream<Integer> source2 = env.fromData(1, 2, 3).name("source2");
source1.keyBy(x -> x)
.connect(source2.keyBy(x -> x))
.transform(
"transform",
Types.INT,
new TwoInputStreamOperatorWithConfigurableOperatorAttributes<>(
new OperatorAttributesBuilder()
.setOutputOnlyAfterEndOfStream(true)
.build()))
.sinkTo(new DiscardingSink<>())
.name("sink");
final StreamGraph streamGraph = env.getStreamGraph(false);
Map<String, StreamNode> nodeMap = new HashMap<>();
for (StreamNode node : streamGraph.getStreamNodes()) {
nodeMap.put(node.getOperatorName(), node);
}
assertThat(nodeMap).hasSize(4);
assertManagedMemoryWeightsSize(nodeMap.get("Source: source1"), 0);
assertManagedMemoryWeightsSize(nodeMap.get("Source: source2"), 0);
assertManagedMemoryWeightsSize(nodeMap.get("transform"), 1);
assertManagedMemoryWeightsSize(nodeMap.get("sink: Writer"), 0);
}
@Test
void testOneInputOperatorWithInternalSorterSupported() {
final StreamExecutionEnvironment env =
StreamExecutionEnvironment.getExecutionEnvironment(new Configuration());
final DataStream<Integer> source1 = env.fromData(1, 2, 3).name("source1");
source1.keyBy(x -> x)
.transform(
"internalSorter",
Types.INT,
new StreamOperatorWithConfigurableOperatorAttributes<>(
(MapFunction<Integer, Integer>) value -> value,
new OperatorAttributesBuilder()
.setOutputOnlyAfterEndOfStream(true)
.setInternalSorterSupported(true)
.build()))
.keyBy(x -> x)
.transform(
"noInternalSorter",
Types.INT,
new StreamOperatorWithConfigurableOperatorAttributes<>(
(MapFunction<Integer, Integer>) value -> value,
new OperatorAttributesBuilder()
.setOutputOnlyAfterEndOfStream(true)
.build()))
.sinkTo(new DiscardingSink<>())
.name("sink");
final StreamGraph streamGraph = env.getStreamGraph(false);
Map<String, StreamNode> nodeMap = new HashMap<>();
for (StreamNode node : streamGraph.getStreamNodes()) {
nodeMap.put(node.getOperatorName(), node);
}
assertThat(nodeMap.get("internalSorter").getInputRequirements()).isEmpty();
assertThat(nodeMap.get("noInternalSorter").getInputRequirements().get(0))
.isEqualTo(StreamConfig.InputRequirement.SORTED);
}
@Test
void testTwoInputOperatorWithInternalSorterSupported() {
final StreamExecutionEnvironment env =
StreamExecutionEnvironment.getExecutionEnvironment(new Configuration());
final DataStream<Integer> source1 = env.fromData(1, 2, 3).name("source1");
final DataStream<Integer> source2 = env.fromData(1, 2, 3).name("source2");
source1.keyBy(x -> x)
.connect(source2.keyBy(x -> x))
.transform(
"internalSorter",
Types.INT,
new TwoInputStreamOperatorWithConfigurableOperatorAttributes<>(
new OperatorAttributesBuilder()
.setOutputOnlyAfterEndOfStream(true)
.setInternalSorterSupported(true)
.build()))
.keyBy(x -> x)
.connect(source2.keyBy(x -> x))
.transform(
"noInternalSorter",
Types.INT,
new TwoInputStreamOperatorWithConfigurableOperatorAttributes<>(
new OperatorAttributesBuilder()
.setOutputOnlyAfterEndOfStream(true)
.build()))
.sinkTo(new DiscardingSink<>())
.name("sink");
final StreamGraph streamGraph = env.getStreamGraph(false);
Map<String, StreamNode> nodeMap = new HashMap<>();
for (StreamNode node : streamGraph.getStreamNodes()) {
nodeMap.put(node.getOperatorName(), node);
}
assertThat(nodeMap.get("internalSorter").getInputRequirements()).isEmpty();
assertThat(nodeMap.get("noInternalSorter").getInputRequirements().get(0))
.isEqualTo(StreamConfig.InputRequirement.SORTED);
assertThat(nodeMap.get("noInternalSorter").getInputRequirements().get(1))
.isEqualTo(StreamConfig.InputRequirement.SORTED);
}
@Test
void testMultipleInputOperatorWithInternalSorterSupported() {
final StreamExecutionEnvironment env =
StreamExecutionEnvironment.getExecutionEnvironment(new Configuration());
final DataStream<Integer> source1 = env.fromData(1, 2, 3).name("source1");
final DataStream<Integer> source2 = env.fromData(1, 2, 3).name("source2");
KeyedMultipleInputTransformation<Integer> transform =
new KeyedMultipleInputTransformation<>(
"internalSorter",
new OperatorAttributesConfigurableOperatorFactory<>(
new OperatorAttributesBuilder()
.setOutputOnlyAfterEndOfStream(true)
.setInternalSorterSupported(true)
.build()),
BasicTypeInfo.INT_TYPE_INFO,
3,
BasicTypeInfo.INT_TYPE_INFO);
transform.addInput(source1.keyBy(x -> x).getTransformation(), x -> x);
transform.addInput(source2.getTransformation(), null);
KeyedMultipleInputTransformation<Integer> transform2 =
new KeyedMultipleInputTransformation<>(
"noInternalSorter",
new OperatorAttributesConfigurableOperatorFactory<>(
new OperatorAttributesBuilder()
.setOutputOnlyAfterEndOfStream(true)
.build()),
BasicTypeInfo.INT_TYPE_INFO,
3,
BasicTypeInfo.INT_TYPE_INFO);
transform2.addInput(transform, null);
transform2.addInput(source2.keyBy(x -> x).getTransformation(), x -> x);
new DataStream<>(env, transform2).sinkTo(new DiscardingSink<>());
final StreamGraph streamGraph = env.getStreamGraph(false);
Map<String, StreamNode> nodeMap = new HashMap<>();
for (StreamNode node : streamGraph.getStreamNodes()) {
nodeMap.put(node.getOperatorName(), node);
}
assertThat(nodeMap.get("internalSorter").getInputRequirements()).isEmpty();
assertThat(nodeMap.get("noInternalSorter").getInputRequirements().get(0))
.isEqualTo(StreamConfig.InputRequirement.PASS_THROUGH);
assertThat(nodeMap.get("noInternalSorter").getInputRequirements().get(1))
.isEqualTo(StreamConfig.InputRequirement.SORTED);
}
private static | StreamingJobGraphGeneratorWithOperatorAttributesTest |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/oneway/Source.java | {
"start": 194,
"end": 529
} | class ____ {
private final int foo = 42;
private int bar;
private final long qax = 23L;
public int getFoo() {
return foo;
}
public void setBar(int bar) {
this.bar = bar;
}
public int retrieveBar() {
return bar;
}
public long getQax() {
return qax;
}
}
| Source |
java | apache__spark | common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/BlockPushReturnCode.java | {
"start": 1378,
"end": 2895
} | class ____ extends BlockTransferMessage {
public final byte returnCode;
// Block ID of the block that experiences a non-fatal block push failure.
// Will be an empty string for any successfully pushed block.
public final String failureBlockId;
public BlockPushReturnCode(byte returnCode, String failureBlockId) {
Objects.requireNonNull(BlockPushNonFatalFailure.getReturnCode(returnCode));
this.returnCode = returnCode;
this.failureBlockId = failureBlockId;
}
@Override
protected Type type() {
return Type.PUSH_BLOCK_RETURN_CODE;
}
@Override
public int hashCode() {
return Objects.hash(returnCode, failureBlockId);
}
@Override
public String toString() {
return "BlockPushReturnCode[returnCode=" + returnCode +
",failureBlockId=" + failureBlockId + "]";
}
@Override
public boolean equals(Object other) {
if (other instanceof BlockPushReturnCode o) {
return returnCode == o.returnCode && Objects.equals(failureBlockId, o.failureBlockId);
}
return false;
}
@Override
public int encodedLength() {
return 1 + Encoders.Strings.encodedLength(failureBlockId);
}
@Override
public void encode(ByteBuf buf) {
buf.writeByte(returnCode);
Encoders.Strings.encode(buf, failureBlockId);
}
public static BlockPushReturnCode decode(ByteBuf buf) {
byte type = buf.readByte();
String failureBlockId = Encoders.Strings.decode(buf);
return new BlockPushReturnCode(type, failureBlockId);
}
}
| BlockPushReturnCode |
java | apache__kafka | generator/src/main/java/org/apache/kafka/message/ClauseGenerator.java | {
"start": 873,
"end": 941
} | interface ____ {
void generate(Versions versions);
}
| ClauseGenerator |
java | spring-projects__spring-security | oauth2/oauth2-authorization-server/src/main/java/org/springframework/security/oauth2/server/authorization/OAuth2AuthorizationServerMetadata.java | {
"start": 1371,
"end": 2231
} | class ____ extends AbstractOAuth2AuthorizationServerMetadata {
@Serial
private static final long serialVersionUID = 3993358339217009284L;
private OAuth2AuthorizationServerMetadata(Map<String, Object> claims) {
super(claims);
}
/**
* Constructs a new {@link Builder} with empty claims.
* @return the {@link Builder}
*/
public static Builder builder() {
return new Builder();
}
/**
* Constructs a new {@link Builder} with the provided claims.
* @param claims the claims to initialize the builder
* @return the {@link Builder}
*/
public static Builder withClaims(Map<String, Object> claims) {
Assert.notEmpty(claims, "claims cannot be empty");
return new Builder().claims((c) -> c.putAll(claims));
}
/**
* Helps configure an {@link OAuth2AuthorizationServerMetadata}.
*/
public static final | OAuth2AuthorizationServerMetadata |
java | quarkusio__quarkus | extensions/smallrye-graphql-client/deployment/src/test/java/io/quarkus/smallrye/graphql/client/deployment/TypesafeGraphQLClientInjectionTest.java | {
"start": 715,
"end": 2105
} | class ____ {
static String url = "http://" + System.getProperty("quarkus.http.host", "localhost") + ":" +
System.getProperty("quarkus.http.test-port", "8081") + "/graphql";
@RegisterExtension
static QuarkusUnitTest test = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(TestingGraphQLApi.class, TestingGraphQLClientApi.class, Person.class, PersonDto.class)
.addAsResource(new StringAsset("typesafeclient/mp-graphql/url=" + url + "\n" +
"typesafeclient/mp-graphql/header/My-Header=My-Value"),
"application.properties")
.addAsManifestResource(EmptyAsset.INSTANCE, "beans.xml"));
@Inject
TestingGraphQLClientApi client;
@Test
public void performQuery() {
List<Person> people = client.people();
assertEquals("John", people.get(0).getFirstName());
assertEquals("Arthur", people.get(1).getFirstName());
}
/**
* Verify that configured HTTP headers are applied by the client.
* We do this by asking the server side to read the header received from the client and send
* its value back to the client.
*/
@Test
public void checkHeaders() {
assertEquals("My-Value", client.returnHeader("My-Header"));
}
}
| TypesafeGraphQLClientInjectionTest |
java | hibernate__hibernate-orm | hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/HANALegacyDialect.java | {
"start": 48032,
"end": 50178
} | class ____ implements NClob {
private String data;
public MaterializedNClob(String data) {
this.data = data;
}
@Override
public void truncate(long len) throws SQLException {
this.data = "";
}
@Override
public int setString(long pos, String str, int offset, int len) throws SQLException {
this.data = this.data.substring( 0, (int) ( pos - 1 ) ) + str.substring( offset, offset + len )
+ this.data.substring( (int) ( pos - 1 + len ) );
return len;
}
@Override
public int setString(long pos, String str) throws SQLException {
this.data = this.data.substring( 0, (int) ( pos - 1 ) ) + str + this.data.substring( (int) ( pos - 1 + str.length() ) );
return str.length();
}
@Override
public Writer setCharacterStream(long pos) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public OutputStream setAsciiStream(long pos) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public long position(Clob searchstr, long start) throws SQLException {
return this.data.indexOf( extractString( searchstr ), (int) ( start - 1 ) );
}
@Override
public long position(String searchstr, long start) throws SQLException {
return this.data.indexOf( searchstr, (int) ( start - 1 ) );
}
@Override
public long length() throws SQLException {
return this.data.length();
}
@Override
public String getSubString(long pos, int length) throws SQLException {
return this.data.substring( (int) ( pos - 1 ), (int) ( pos - 1 + length ) );
}
@Override
public Reader getCharacterStream(long pos, long length) throws SQLException {
return new StringReader( this.data.substring( (int) ( pos - 1 ), (int) ( pos - 1 + length ) ) );
}
@Override
public Reader getCharacterStream() throws SQLException {
return new StringReader( this.data );
}
@Override
public InputStream getAsciiStream() {
return new ByteArrayInputStream( this.data.getBytes( StandardCharsets.ISO_8859_1 ) );
}
@Override
public void free() throws SQLException {
this.data = null;
}
}
private static | MaterializedNClob |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/length/WithLongTypeStrings.java | {
"start": 342,
"end": 535
} | class ____ {
@Id
@GeneratedValue
public int id;
@JdbcTypeCode(SqlTypes.LONGVARCHAR)
public String longish;
@JdbcTypeCode(SqlTypes.LONG32VARCHAR)
public String long32;
}
| WithLongTypeStrings |
java | google__auto | value/src/test/java/com/google/auto/value/processor/AutoValueCompilationTest.java | {
"start": 26196,
"end": 26650
} | class ____ {",
" public abstract String buh();",
" public Nested create(String buh) {",
" return new AutoValue_Baz_Private_Nested(buh);",
" }",
" }",
" }",
"}");
Compilation compilation =
javac().withProcessors(new AutoValueProcessor()).compile(javaFileObject);
assertThat(compilation)
.hadErrorContaining("@AutoValue | Nested |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/core/annotation/AnnotationUtilsTests.java | {
"start": 56489,
"end": 56617
} | interface ____ {
ContextConfig[] value();
}
@Hierarchy({@ContextConfig("A"), @ContextConfig(location = "B")})
static | Hierarchy |
java | spring-projects__spring-boot | core/spring-boot-test/src/main/java/org/springframework/boot/test/json/JacksonTester.java | {
"start": 6971,
"end": 7365
} | class ____ extends FieldInitializer<JsonMapper> {
protected JacksonFieldInitializer() {
super(JacksonTester.class);
}
@Override
protected AbstractJsonMarshalTester<Object> createTester(Class<?> resourceLoadClass, ResolvableType type,
JsonMapper marshaller) {
return new JacksonTester<>(resourceLoadClass, type, marshaller);
}
}
private static final | JacksonFieldInitializer |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/RouteContextProcessorManualTest.java | {
"start": 3372,
"end": 3785
} | class ____ implements Processor {
public static final long MIN_PROCESS_TIME = 5;
public static final long MAX_PROCESS_TIME = 50;
@Override
public void process(Exchange arg0) throws Exception {
long processTime = (long) (MIN_PROCESS_TIME + Math.random() * (MAX_PROCESS_TIME - MIN_PROCESS_TIME));
Thread.sleep(processTime);
}
}
}
| RandomSleepProcessor |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/ToDynamicVariableErrorTest.java | {
"start": 1048,
"end": 11957
} | class ____ extends ContextTestSupport {
@Override
public boolean isUseRouteBuilder() {
return false;
}
@Test
public void testThrowException() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from("direct:receive")
.toD("direct:${header.where}", null, "bye")
.to("mock:result");
from("direct:foo")
.transform().simple("Bye ${body}")
.throwException(new IllegalArgumentException("Forced"));
}
});
context.start();
getMockEndpoint("mock:result").expectedMessageCount(0);
Exchange out = template.request("direct:receive", e -> {
e.getMessage().setHeader("where", "foo");
e.getMessage().setBody("World");
});
Assertions.assertTrue(out.isFailed());
Assertions.assertFalse(out.hasVariables());
// TODO: should this be World or Bye World?
Assertions.assertEquals("Bye World", out.getMessage().getBody());
assertMockEndpointsSatisfied();
}
@Test
public void testTryCatch() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from("direct:receive")
.toD("direct:${header.where}", null, "bye")
.to("mock:result");
from("direct:foo")
.transform().simple("Bye ${body}")
.doTry()
.throwException(new IllegalArgumentException("Forced"))
.doCatch(Exception.class)
.setBody(simple("Catch: ${body}"))
.end();
}
});
context.start();
getMockEndpoint("mock:result").expectedMessageCount(1);
Exchange out = template.request("direct:receive", e -> {
e.getMessage().setHeader("where", "foo");
e.getMessage().setBody("World");
});
Assertions.assertFalse(out.isFailed());
Assertions.assertTrue(out.hasVariables());
Assertions.assertEquals("World", out.getMessage().getBody());
Assertions.assertEquals("Catch: Bye World", out.getVariable("bye"));
assertMockEndpointsSatisfied();
}
@Test
public void testOnExceptionHandled() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
onException(Exception.class)
.handled(true)
.setBody(simple("Error: ${body}"));
from("direct:receive")
.toD("direct:${header.where}", null, "bye")
.to("mock:result");
from("direct:foo")
.transform().simple("Bye ${body}")
.throwException(new IllegalArgumentException("Forced"));
}
});
context.start();
getMockEndpoint("mock:result").expectedMessageCount(0);
Exchange out = template.request("direct:receive", e -> {
e.getMessage().setHeader("where", "foo");
e.getMessage().setBody("World");
});
Assertions.assertFalse(out.isFailed());
Assertions.assertFalse(out.hasVariables());
Assertions.assertEquals("Error: Bye World", out.getMessage().getBody());
assertMockEndpointsSatisfied();
}
@Test
public void testOnExceptionNotHandled() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
onException(Exception.class)
.handled(false)
.setBody(simple("Error: ${body}"));
from("direct:receive")
.toD("direct:${header.where}", null, "bye")
.to("mock:result");
from("direct:foo")
.transform().simple("Bye ${body}")
.throwException(new IllegalArgumentException("Forced"));
}
});
context.start();
getMockEndpoint("mock:result").expectedMessageCount(0);
Exchange out = template.request("direct:receive", e -> {
e.getMessage().setHeader("where", "foo");
e.getMessage().setBody("World");
});
Assertions.assertTrue(out.isFailed());
Assertions.assertFalse(out.hasVariables());
Assertions.assertEquals("Error: Bye World", out.getMessage().getBody());
assertMockEndpointsSatisfied();
}
@Test
public void testDeadLetterChannel() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
errorHandler(deadLetterChannel("mock:dead"));
from("direct:receive")
.toD("direct:${header.where}", null, "bye")
.to("mock:result");
from("direct:foo")
.transform().simple("Bye ${body}")
.throwException(new IllegalArgumentException("Forced"));
}
});
context.start();
getMockEndpoint("mock:result").expectedMessageCount(0);
getMockEndpoint("mock:dead").expectedMessageCount(1);
Exchange out = template.request("direct:receive", e -> {
e.getMessage().setHeader("where", "foo");
e.getMessage().setBody("World");
});
Assertions.assertFalse(out.isFailed());
Assertions.assertFalse(out.hasVariables());
Assertions.assertEquals("Bye World", out.getMessage().getBody());
assertMockEndpointsSatisfied();
}
@Test
public void testDefaultErrorHandler() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
errorHandler(defaultErrorHandler());
from("direct:receive")
.toD("direct:${header.where}", null, "bye")
.to("mock:result");
from("direct:foo")
.transform().simple("Bye ${body}")
.throwException(new IllegalArgumentException("Forced"));
}
});
context.start();
getMockEndpoint("mock:result").expectedMessageCount(0);
Exchange out = template.request("direct:receive", e -> {
e.getMessage().setHeader("where", "foo");
e.getMessage().setBody("World");
});
Assertions.assertTrue(out.isFailed());
Assertions.assertFalse(out.hasVariables());
Assertions.assertEquals("Bye World", out.getMessage().getBody());
assertMockEndpointsSatisfied();
}
@Test
public void testStop() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from("direct:receive")
.toD("direct:${header.where}", null, "bye")
.to("mock:result");
from("direct:foo")
.transform().simple("Bye ${body}")
.stop();
}
});
context.start();
getMockEndpoint("mock:result").expectedMessageCount(0);
Exchange out = template.request("direct:receive", e -> {
e.getMessage().setHeader("where", "foo");
e.getMessage().setBody("World");
});
Assertions.assertFalse(out.isFailed());
Assertions.assertFalse(out.hasVariables());
Assertions.assertEquals("Bye World", out.getMessage().getBody());
assertMockEndpointsSatisfied();
}
@Test
public void testRollback() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from("direct:receive")
.toD("direct:${header.where}", null, "bye")
.to("mock:result");
from("direct:foo")
.transform().simple("Bye ${body}")
.rollback();
}
});
context.start();
getMockEndpoint("mock:result").expectedMessageCount(0);
Exchange out = template.request("direct:receive", e -> {
e.getMessage().setHeader("where", "foo");
e.getMessage().setBody("World");
});
Assertions.assertTrue(out.isFailed());
Assertions.assertFalse(out.hasVariables());
Assertions.assertEquals("Bye World", out.getMessage().getBody());
assertMockEndpointsSatisfied();
}
@Test
public void testMarkRollbackLast() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from("direct:receive")
.toD("direct:${header.where}", null, "bye")
.to("mock:result");
from("direct:foo")
.transform().simple("Bye ${body}")
.markRollbackOnly();
}
});
context.start();
getMockEndpoint("mock:result").expectedMessageCount(0);
Exchange out = template.request("direct:receive", e -> {
e.getMessage().setHeader("where", "foo");
e.getMessage().setBody("World");
});
Assertions.assertFalse(out.isFailed());
Assertions.assertFalse(out.hasVariables());
Assertions.assertEquals("Bye World", out.getMessage().getBody());
assertMockEndpointsSatisfied();
}
@Test
public void testMarkRollbackOnlyLast() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from("direct:receive")
.toD("direct:${header.where}", null, "bye")
.to("mock:result");
from("direct:foo")
.transform().simple("Bye ${body}")
.markRollbackOnlyLast();
}
});
context.start();
getMockEndpoint("mock:result").expectedMessageCount(0);
Exchange out = template.request("direct:receive", e -> {
e.getMessage().setHeader("where", "foo");
e.getMessage().setBody("World");
});
Assertions.assertFalse(out.isFailed());
Assertions.assertFalse(out.hasVariables());
Assertions.assertEquals("Bye World", out.getMessage().getBody());
assertMockEndpointsSatisfied();
}
}
| ToDynamicVariableErrorTest |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/spi/InFlightMetadataCollector.java | {
"start": 13752,
"end": 14438
} | interface ____ extends Serializable {
void process(InFlightMetadataCollector metadataCollector);
}
void addDelayedPropertyReferenceHandler(DelayedPropertyReferenceHandler handler);
void addPropertyReference(String entityName, String propertyName);
void addUniquePropertyReference(String entityName, String propertyName);
void addPropertyReferencedAssociation(String entityName, String propertyName, String syntheticPropertyName);
String getPropertyReferencedAssociation(String entityName, String mappedBy);
void addMappedBy(String name, String mappedBy, String propertyName);
String getFromMappedBy(String ownerEntityName, String propertyName);
| DelayedPropertyReferenceHandler |
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/java/typeutils/TypeExtractor.java | {
"start": 80077,
"end": 88174
} | class
____ {
return curVarType;
}
}
}
}
}
// can not be materialized, most likely due to type erasure
// return the type variable of the deepest level
return inTypeTypeVar;
}
/**
* Creates type information from a given Class such as Integer, String[] or POJOs.
*
* <p>This method does not support ParameterizedTypes such as Tuples or complex type
* hierarchies. In most cases {@link TypeExtractor#createTypeInfo(Type)} is the recommended
* method for type extraction (a Class is a child of Type).
*
* @param clazz a Class to create TypeInformation for
* @return TypeInformation that describes the passed Class
*/
public static <X> TypeInformation<X> getForClass(Class<X> clazz) {
final List<Type> typeHierarchy = new ArrayList<>();
typeHierarchy.add(clazz);
return new TypeExtractor().privateGetForClass(clazz, typeHierarchy);
}
private <X> TypeInformation<X> privateGetForClass(Class<X> clazz, List<Type> typeHierarchy) {
return privateGetForClass(clazz, typeHierarchy, null, null, null);
}
@SuppressWarnings({"unchecked", "rawtypes"})
private <OUT, IN1, IN2> TypeInformation<OUT> privateGetForClass(
Class<OUT> clazz,
List<Type> typeHierarchy,
ParameterizedType parameterizedType,
TypeInformation<IN1> in1Type,
TypeInformation<IN2> in2Type) {
checkNotNull(clazz);
// check if type information can be produced using a factory
final TypeInformation<OUT> typeFromFactory =
createTypeInfoFromFactory(clazz, typeHierarchy, in1Type, in2Type);
if (typeFromFactory != null) {
return typeFromFactory;
}
// Object is handled as generic type info
if (clazz.equals(Object.class)) {
return new GenericTypeInfo<>(clazz);
}
// Class is handled as generic type info
if (clazz.equals(Class.class)) {
return new GenericTypeInfo<>(clazz);
}
// recursive types are handled as generic type info
if (countTypeInHierarchy(typeHierarchy, clazz) > 1) {
return new GenericTypeInfo<>(clazz);
}
// check for arrays
if (clazz.isArray()) {
// primitive arrays: int[], byte[], ...
PrimitiveArrayTypeInfo<OUT> primitiveArrayInfo =
PrimitiveArrayTypeInfo.getInfoFor(clazz);
if (primitiveArrayInfo != null) {
return primitiveArrayInfo;
}
// basic type arrays: String[], Integer[], Double[]
BasicArrayTypeInfo<OUT, ?> basicArrayInfo = BasicArrayTypeInfo.getInfoFor(clazz);
if (basicArrayInfo != null) {
return basicArrayInfo;
}
// object arrays
else {
TypeInformation<?> componentTypeInfo =
createTypeInfoWithTypeHierarchy(
typeHierarchy, clazz.getComponentType(), in1Type, in2Type);
return ObjectArrayTypeInfo.getInfoFor(clazz, componentTypeInfo);
}
}
// check for writable types
if (isHadoopWritable(clazz)) {
return createHadoopWritableTypeInfo(clazz);
}
// check for basic types
TypeInformation<OUT> basicTypeInfo = BasicTypeInfo.getInfoFor(clazz);
if (basicTypeInfo != null) {
return basicTypeInfo;
}
// check for SQL time types
TypeInformation<OUT> timeTypeInfo = SqlTimeTypeInfo.getInfoFor(clazz);
if (timeTypeInfo != null) {
return timeTypeInfo;
}
// check for subclasses of Value
if (Value.class.isAssignableFrom(clazz)) {
Class<? extends Value> valueClass = clazz.asSubclass(Value.class);
return (TypeInformation<OUT>) ValueTypeInfo.getValueTypeInfo(valueClass);
}
// check for subclasses of Tuple
if (Tuple.class.isAssignableFrom(clazz)) {
if (clazz == Tuple0.class) {
return new TupleTypeInfo(Tuple0.class);
}
throw new InvalidTypesException(
"Type information extraction for tuples (except Tuple0) cannot be done based on the class.");
}
// check for Enums
if (Enum.class.isAssignableFrom(clazz)) {
return new EnumTypeInfo(clazz);
}
// check for Variant
if (Variant.class.isAssignableFrom(clazz)) {
return (TypeInformation<OUT>) VariantTypeInfo.INSTANCE;
}
// check for parameterized Collections, requirement:
// 1. Interface types: the underlying implementation types are not preserved across
// serialization
// 2. Concrete type arguments: Flink needs them to dispatch serialization of element types
// Example:
// - OK: List<String>, Collection<String>
// - not OK: LinkedList<String> (implementation type), List (raw type), List<T> (generic
// type argument), or List<?> (wildcard type argument)
if (parameterizedType != null) {
Type[] actualTypeArguments = parameterizedType.getActualTypeArguments();
boolean allTypeArgumentsConcrete =
Arrays.stream(actualTypeArguments).allMatch(arg -> arg instanceof Class<?>);
if (allTypeArgumentsConcrete) {
if (clazz.isAssignableFrom(Map.class)) {
Class<?> keyClass = (Class<?>) actualTypeArguments[0];
Class<?> valueClass = (Class<?>) actualTypeArguments[1];
TypeInformation<?> keyTypeInfo = createTypeInfo(keyClass);
TypeInformation<?> valueTypeInfo = createTypeInfo(valueClass);
return (TypeInformation<OUT>)
new NullableMapTypeInfo<>(keyTypeInfo, valueTypeInfo);
} else if (clazz.isAssignableFrom(List.class)) {
Class<?> elementClass = (Class<?>) actualTypeArguments[0];
TypeInformation<?> elementTypeInfo = createTypeInfo(elementClass);
return (TypeInformation<OUT>) new NullableListTypeInfo<>(elementTypeInfo);
} else if (clazz.isAssignableFrom(Set.class)) {
Class<?> elementClass = (Class<?>) actualTypeArguments[0];
TypeInformation<?> elementTypeInfo = createTypeInfo(elementClass);
return (TypeInformation<OUT>) new NullableSetTypeInfo<>(elementTypeInfo);
}
}
}
// special case for POJOs generated by Avro.
if (AvroUtils.isAvroSpecificRecord(clazz)) {
return AvroUtils.getAvroUtils().createAvroTypeInfo(clazz);
}
if (Modifier.isInterface(clazz.getModifiers())) {
// Interface has no members and is therefore not handled as POJO
return new GenericTypeInfo<>(clazz);
}
try {
Type t = parameterizedType != null ? parameterizedType : clazz;
TypeInformation<OUT> pojoType =
analyzePojo(t, new ArrayList<>(typeHierarchy), in1Type, in2Type);
if (pojoType != null) {
return pojoType;
}
} catch (InvalidTypesException e) {
if (LOG.isDebugEnabled()) {
LOG.debug(
"Unable to handle type " + clazz + " as POJO. Message: " + e.getMessage(),
e);
}
// ignore and create generic type info
}
// return a generic type
return new GenericTypeInfo<>(clazz);
}
/**
* Checks if the given field is a valid pojo field: - it is public OR - there are getter and
* setter methods for the field.
*
* @param f field to check
* @param clazz | else |
java | quarkusio__quarkus | extensions/smallrye-fault-tolerance/deployment/src/test/java/io/quarkus/smallrye/faulttolerance/test/hotreload/HotReloadRoute.java | {
"start": 187,
"end": 405
} | class ____ {
public void route(@Observes Router router, HotReloadBean bean) {
router.get("/").handler(ctx -> {
ctx.response().setStatusCode(200).end(bean.hello());
});
}
}
| HotReloadRoute |
java | apache__logging-log4j2 | log4j-core-test/src/main/java/org/apache/logging/log4j/core/test/junit/JdbcRule.java | {
"start": 1708,
"end": 3889
} | class ____ implements TestRule {
private final ConnectionSource connectionSource;
private final String createTableStatement;
private final String dropTableStatement;
/**
* Creates a JdbcRule using a {@link ConnectionSource} and a table creation statement.
*
* @param connectionSource a required source for obtaining a Connection.
* @param createTableStatement an optional SQL DDL statement to create a table for use in a JUnit test.
* @param dropTableStatement an optional SQL DDL statement to drop the created table.
*/
public JdbcRule(
final ConnectionSource connectionSource,
final String createTableStatement,
final String dropTableStatement) {
this.connectionSource = Objects.requireNonNull(connectionSource, "connectionSource");
this.createTableStatement = createTableStatement;
this.dropTableStatement = dropTableStatement;
}
@Override
public org.junit.runners.model.Statement apply(
final org.junit.runners.model.Statement base, final Description description) {
return new org.junit.runners.model.Statement() {
@Override
public void evaluate() throws Throwable {
try (final Connection connection = getConnection();
final Statement statement = connection.createStatement()) {
try {
if (StringUtils.isNotEmpty(createTableStatement)) {
statement.executeUpdate(createTableStatement);
}
base.evaluate();
} finally {
if (StringUtils.isNotEmpty(dropTableStatement)) {
statement.executeUpdate(dropTableStatement);
}
statement.execute("SHUTDOWN");
}
}
}
};
}
public Connection getConnection() throws SQLException {
return connectionSource.getConnection();
}
public ConnectionSource getConnectionSource() {
return connectionSource;
}
}
| JdbcRule |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/interceptor/AdviceWithTasksOnExceptionTest.java | {
"start": 1126,
"end": 2530
} | class ____ extends ContextTestSupport {
@Test
public void testBeforeWithOnException() throws Exception {
AdviceWith.adviceWith(context.getRouteDefinitions().get(0), context, new AdviceWithRouteBuilder() {
@Override
public void configure() throws Exception {
// weave the node in the route which has id = bar
// and insert the following route path before the adviced node
weaveById("bar").before().to("mock:a").transform(constant("Bye World"));
}
});
getMockEndpoint("mock:foo").expectedBodiesReceived("Hello World");
getMockEndpoint("mock:a").expectedBodiesReceived("Hello World");
getMockEndpoint("mock:bar").expectedBodiesReceived("Bye World");
getMockEndpoint("mock:result").expectedBodiesReceived("Bye World");
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:start").onException(Exception.class).handled(true).to("mock:error").end().to("mock:foo")
.to("mock:bar").id("bar").to("mock:result");
}
};
}
}
| AdviceWithTasksOnExceptionTest |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-router/src/main/java/org/apache/hadoop/yarn/server/router/clientrm/FederationClientInterceptor.java | {
"start": 13314,
"end": 13582
} | class ____ provides an
* implementation for federation of YARN RM and scaling an application across
* multiple YARN SubClusters. All the federation specific implementation is
* encapsulated in this class. This is always the last interceptor in the chain.
*/
public | and |
java | apache__flink | flink-streaming-java/src/main/java/org/apache/flink/streaming/api/functions/windowing/delta/ExtractionAwareDeltaFunction.java | {
"start": 1627,
"end": 3590
} | class ____<DATA, TO> implements DeltaFunction<DATA> {
private static final long serialVersionUID = 6927486219702689554L;
private Extractor<DATA, TO> converter;
public ExtractionAwareDeltaFunction(Extractor<DATA, TO> converter) {
this.converter = converter;
}
/**
* This method takes the two data point and runs the set extractor on it. The delta function
* implemented at {@link #getNestedDelta} is then called with the extracted data. In case no
* extractor is set the input data gets passes to {@link #getNestedDelta} as-is. The return
* value is just forwarded from {@link #getNestedDelta}.
*
* @param oldDataPoint the older data point as raw data (before extraction).
* @param newDataPoint the new data point as raw data (before extraction).
* @return the delta between the two points.
*/
@SuppressWarnings("unchecked")
@Override
public double getDelta(DATA oldDataPoint, DATA newDataPoint) {
if (converter == null) {
// In case no conversion/extraction is required, we can cast DATA to
// TO
// => Therefore, "unchecked" warning is suppressed for this method.
return getNestedDelta((TO) oldDataPoint, (TO) newDataPoint);
} else {
return getNestedDelta(converter.extract(oldDataPoint), converter.extract(newDataPoint));
}
}
/**
* This method is exactly the same as {@link DeltaFunction#getDelta(Object, Object)} except that
* it gets the result of the previously done extractions as input. Therefore, this method only
* does the actual calculation of the delta but no data extraction or conversion.
*
* @param oldDataPoint the older data point.
* @param newDataPoint the new data point.
* @return the delta between the two points.
*/
public abstract double getNestedDelta(TO oldDataPoint, TO newDataPoint);
}
| ExtractionAwareDeltaFunction |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/util/FieldInfoTest.java | {
"start": 3741,
"end": 3830
} | class ____<T> {
public List<T> value;
}
public static | GenericListFieldEntity |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/interceptors/inheritance/Interceptor1.java | {
"start": 208,
"end": 406
} | class ____ extends OverridenInterceptor {
@AroundInvoke
@Override
public Object intercept(InvocationContext ctx) throws Exception {
return ctx.proceed() + "1";
}
}
| Interceptor1 |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/oracle/alter/OracleAlterProcedureTest2.java | {
"start": 936,
"end": 2403
} | class ____ extends OracleTest {
public void test_0() throws Exception {
String sql = //
"ALTER PROCEDURE \"ALIBABA1949\".\"FIX_PRODUCT_ADDITION_YZS_0210\" COMPILE REUSE SETTINGS";
OracleStatementParser parser = new OracleStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement statemen = statementList.get(0);
print(statementList);
assertEquals(1, statementList.size());
OracleSchemaStatVisitor visitor = new OracleSchemaStatVisitor();
statemen.accept(visitor);
System.out.println("Tables : " + visitor.getTables());
System.out.println("fields : " + visitor.getColumns());
System.out.println("coditions : " + visitor.getConditions());
System.out.println("relationships : " + visitor.getRelationships());
System.out.println("orderBy : " + visitor.getOrderByColumns());
assertEquals(0, visitor.getTables().size());
// assertTrue(visitor.getTables().containsKey(new TableStat.Name("\"DUAL\"")));
assertEquals(0, visitor.getColumns().size());
// assertTrue(visitor.getColumns().contains(new TableStat.Column("pivot_table", "*")));
// assertTrue(visitor.getColumns().contains(new TableStat.Column("pivot_table", "YEAR")));
// assertTrue(visitor.getColumns().contains(new TableStat.Column("pivot_table", "order_mode")));
}
}
| OracleAlterProcedureTest2 |
java | apache__camel | components/camel-ai/camel-langchain4j-chat/src/generated/java/org/apache/camel/component/langchain4j/chat/LangChain4jChatEndpointConfigurer.java | {
"start": 743,
"end": 4126
} | class ____ extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
LangChain4jChatEndpoint target = (LangChain4jChatEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "bridgeerrorhandler":
case "bridgeErrorHandler": target.setBridgeErrorHandler(property(camelContext, boolean.class, value)); return true;
case "chatmodel":
case "chatModel": target.getConfiguration().setChatModel(property(camelContext, dev.langchain4j.model.chat.ChatModel.class, value)); return true;
case "chatoperation":
case "chatOperation": target.getConfiguration().setChatOperation(property(camelContext, org.apache.camel.component.langchain4j.chat.LangChain4jChatOperations.class, value)); return true;
case "exceptionhandler":
case "exceptionHandler": target.setExceptionHandler(property(camelContext, org.apache.camel.spi.ExceptionHandler.class, value)); return true;
case "exchangepattern":
case "exchangePattern": target.setExchangePattern(property(camelContext, org.apache.camel.ExchangePattern.class, value)); return true;
case "lazystartproducer":
case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true;
default: return false;
}
}
@Override
public String[] getAutowiredNames() {
return new String[]{"chatModel"};
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "bridgeerrorhandler":
case "bridgeErrorHandler": return boolean.class;
case "chatmodel":
case "chatModel": return dev.langchain4j.model.chat.ChatModel.class;
case "chatoperation":
case "chatOperation": return org.apache.camel.component.langchain4j.chat.LangChain4jChatOperations.class;
case "exceptionhandler":
case "exceptionHandler": return org.apache.camel.spi.ExceptionHandler.class;
case "exchangepattern":
case "exchangePattern": return org.apache.camel.ExchangePattern.class;
case "lazystartproducer":
case "lazyStartProducer": return boolean.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
LangChain4jChatEndpoint target = (LangChain4jChatEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "bridgeerrorhandler":
case "bridgeErrorHandler": return target.isBridgeErrorHandler();
case "chatmodel":
case "chatModel": return target.getConfiguration().getChatModel();
case "chatoperation":
case "chatOperation": return target.getConfiguration().getChatOperation();
case "exceptionhandler":
case "exceptionHandler": return target.getExceptionHandler();
case "exchangepattern":
case "exchangePattern": return target.getExchangePattern();
case "lazystartproducer":
case "lazyStartProducer": return target.isLazyStartProducer();
default: return null;
}
}
}
| LangChain4jChatEndpointConfigurer |
java | apache__camel | core/camel-main/src/main/java/org/apache/camel/main/MainLifecycleStrategy.java | {
"start": 1187,
"end": 1789
} | class ____ extends LifecycleStrategySupport {
private static final Logger LOG = LoggerFactory.getLogger(MainLifecycleStrategy.class);
private final MainShutdownStrategy shutdownStrategy;
public MainLifecycleStrategy(MainShutdownStrategy shutdownStrategy) {
this.shutdownStrategy = shutdownStrategy;
}
@Override
public void onContextStopping(CamelContext context) {
LOG.debug("CamelContext: {} is stopping, triggering shutdown of the JVM.", context.getName());
// trigger stopping the Main
shutdownStrategy.shutdown();
}
}
| MainLifecycleStrategy |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractor.java | {
"start": 910,
"end": 1607
} | class ____ extends AbstractAggregationDataExtractor {
AggregationDataExtractor(
Client client,
AggregationDataExtractorContext dataExtractorContext,
DatafeedTimingStatsReporter timingStatsReporter
) {
super(client, dataExtractorContext, timingStatsReporter);
}
@Override
protected SearchRequestBuilder buildSearchRequest(SearchSourceBuilder searchSourceBuilder) {
return new SearchRequestBuilder(client).setSource(searchSourceBuilder)
.setIndicesOptions(context.queryContext.indicesOptions)
.setAllowPartialSearchResults(false)
.setIndices(context.queryContext.indices);
}
}
| AggregationDataExtractor |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/util/xml/AbstractStaxXMLReader.java | {
"start": 1529,
"end": 6280
} | class ____ extends AbstractXMLReader {
private static final String NAMESPACES_FEATURE_NAME = "http://xml.org/sax/features/namespaces";
private static final String NAMESPACE_PREFIXES_FEATURE_NAME = "http://xml.org/sax/features/namespace-prefixes";
private static final String IS_STANDALONE_FEATURE_NAME = "http://xml.org/sax/features/is-standalone";
private boolean namespacesFeature = true;
private boolean namespacePrefixesFeature = false;
private @Nullable Boolean isStandalone;
private final Map<String, String> namespaces = new LinkedHashMap<>();
@Override
public boolean getFeature(String name) throws SAXNotRecognizedException, SAXNotSupportedException {
return switch (name) {
case NAMESPACES_FEATURE_NAME -> this.namespacesFeature;
case NAMESPACE_PREFIXES_FEATURE_NAME -> this.namespacePrefixesFeature;
case IS_STANDALONE_FEATURE_NAME -> {
if (this.isStandalone != null) {
yield this.isStandalone;
}
else {
throw new SAXNotSupportedException("startDocument() callback not completed yet");
}
}
default -> super.getFeature(name);
};
}
@Override
public void setFeature(String name, boolean value) throws SAXNotRecognizedException, SAXNotSupportedException {
if (NAMESPACES_FEATURE_NAME.equals(name)) {
this.namespacesFeature = value;
}
else if (NAMESPACE_PREFIXES_FEATURE_NAME.equals(name)) {
this.namespacePrefixesFeature = value;
}
else {
super.setFeature(name, value);
}
}
protected void setStandalone(boolean standalone) {
this.isStandalone = standalone;
}
/**
* Indicates whether the SAX feature {@code http://xml.org/sax/features/namespaces} is turned on.
*/
protected boolean hasNamespacesFeature() {
return this.namespacesFeature;
}
/**
* Indicates whether the SAX feature {@code http://xml.org/sax/features/namespaces-prefixes} is turned on.
*/
protected boolean hasNamespacePrefixesFeature() {
return this.namespacePrefixesFeature;
}
/**
* Convert a {@code QName} to a qualified name, as used by DOM and SAX.
* The returned string has a format of {@code prefix:localName} if the
* prefix is set, or just {@code localName} if not.
* @param qName the {@code QName}
* @return the qualified name
*/
protected String toQualifiedName(QName qName) {
String prefix = qName.getPrefix();
if (!StringUtils.hasLength(prefix)) {
return qName.getLocalPart();
}
else {
return prefix + ":" + qName.getLocalPart();
}
}
/**
* Parse the StAX XML reader passed at construction-time.
* <p><b>NOTE:</b>: The given {@code InputSource} is not read, but ignored.
* @param ignored is ignored
* @throws SAXException a SAX exception, possibly wrapping a {@code XMLStreamException}
*/
@Override
public final void parse(InputSource ignored) throws SAXException {
parse();
}
/**
* Parse the StAX XML reader passed at construction-time.
* <p><b>NOTE:</b>: The given system identifier is not read, but ignored.
* @param ignored is ignored
* @throws SAXException a SAX exception, possibly wrapping a {@code XMLStreamException}
*/
@Override
public final void parse(String ignored) throws SAXException {
parse();
}
private void parse() throws SAXException {
try {
parseInternal();
}
catch (XMLStreamException ex) {
Locator locator = null;
if (ex.getLocation() != null) {
locator = new StaxLocator(ex.getLocation());
}
SAXParseException saxException = new SAXParseException(ex.getMessage(), locator, ex);
if (getErrorHandler() != null) {
getErrorHandler().fatalError(saxException);
}
else {
throw saxException;
}
}
}
/**
* Template method that parses the StAX reader passed at construction-time.
*/
protected abstract void parseInternal() throws SAXException, XMLStreamException;
/**
* Start the prefix mapping for the given prefix.
* @see org.xml.sax.ContentHandler#startPrefixMapping(String, String)
*/
protected void startPrefixMapping(@Nullable String prefix, String namespace) throws SAXException {
if (getContentHandler() != null && StringUtils.hasLength(namespace)) {
if (prefix == null) {
prefix = "";
}
if (!namespace.equals(this.namespaces.get(prefix))) {
getContentHandler().startPrefixMapping(prefix, namespace);
this.namespaces.put(prefix, namespace);
}
}
}
/**
* End the prefix mapping for the given prefix.
* @see org.xml.sax.ContentHandler#endPrefixMapping(String)
*/
protected void endPrefixMapping(String prefix) throws SAXException {
if (getContentHandler() != null && this.namespaces.containsKey(prefix)) {
getContentHandler().endPrefixMapping(prefix);
this.namespaces.remove(prefix);
}
}
/**
* Implementation of the {@code Locator} | AbstractStaxXMLReader |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/AbstractItemSetMapReducer.java | {
"start": 1708,
"end": 2392
} | interface ____ several hooks to run initialization and finalization.
*
* As aggregations are not exactly a map-reduce framework some other moving parts are required:
*
* - code to serialize/deserialize data for sending it over the wire
* - data is stored inside of the map-reducer instance but not separately
* - result output as XContent
* - named writable magic
*
* @param <MapContext> context to be used for collecting
* @param <MapFinalContext> context after all data of one partition has been mapped
* @param <ReduceContext> context to be used for reducing data
* @param <Result> the result object that holds the result of this map-reducer
*
*/
public abstract | offers |
java | apache__flink | flink-state-backends/flink-statebackend-forst/src/main/java/org/apache/flink/state/forst/fs/filemapping/FileOwnership.java | {
"start": 917,
"end": 1560
} | enum ____ {
/**
* The file is privately owned by DB. "Owned by DB" means the file's lifecycle is managed by DB,
* i.e., it will be deleted when DB decides to dispose of it. "Privately" indicates that its
* ownership cannot be transferred to JM.
*/
PRIVATE_OWNED_BY_DB,
/**
* The file is owned by DB but is shareable. "Shareable" means its ownership can be transferred
* to JM in the future.
*/
SHAREABLE_OWNED_BY_DB,
/**
* The file is not owned by DB. That means its lifecycle is not managed by DB, and only JM can
* decide when to delete it.
*/
NOT_OWNED
}
| FileOwnership |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/web/server/OAuth2ResourceServerSpecTests.java | {
"start": 33698,
"end": 34234
} | class ____ {
@Bean
SecurityWebFilterChain springSecurity(ServerHttpSecurity http) {
// @formatter:off
http
.oauth2ResourceServer((oauth2) -> oauth2
.jwt((jwt) -> jwt
.authenticationManager(authenticationManager())
)
);
// @formatter:on
return http.build();
}
@Bean
ReactiveAuthenticationManager authenticationManager() {
return mock(ReactiveAuthenticationManager.class);
}
}
@Configuration
@EnableWebFlux
@EnableWebFluxSecurity
static | CustomAuthenticationManagerInLambdaConfig |
java | google__guava | android/guava-tests/test/com/google/common/io/LittleEndianDataOutputStreamTest.java | {
"start": 990,
"end": 1091
} | class ____ {@link LittleEndianDataOutputStream}.
*
* @author Keith Bottner
*/
@NullUnmarked
public | for |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/loadGenerator/TestLoadGenerator.java | {
"start": 1514,
"end": 9282
} | class ____ extends Configured implements Tool {
private static final Configuration CONF = new HdfsConfiguration();
private static final int DEFAULT_BLOCK_SIZE = 10;
private static final File OUT_DIR = PathUtils.getTestDir(TestLoadGenerator.class);
private static final File DIR_STRUCTURE_FILE =
new File(OUT_DIR, StructureGenerator.DIR_STRUCTURE_FILE_NAME);
private static final File FILE_STRUCTURE_FILE =
new File(OUT_DIR, StructureGenerator.FILE_STRUCTURE_FILE_NAME);
private static final String DIR_STRUCTURE_FIRST_LINE = "/dir0";
private static final String DIR_STRUCTURE_SECOND_LINE = "/dir1";
private static final String FILE_STRUCTURE_FIRST_LINE =
"/dir0/_file_0 0.3754598635933768";
private static final String FILE_STRUCTURE_SECOND_LINE =
"/dir1/_file_1 1.4729310851145203";
static {
CONF.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, DEFAULT_BLOCK_SIZE);
CONF.setInt(DFSConfigKeys.DFS_BYTES_PER_CHECKSUM_KEY, DEFAULT_BLOCK_SIZE);
CONF.setLong(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 1L);
}
/** Test if the structure generator works fine */
@Test
public void testStructureGenerator() throws Exception {
StructureGenerator sg = new StructureGenerator();
String[] args = new String[]{"-maxDepth", "2", "-minWidth", "1",
"-maxWidth", "2", "-numOfFiles", "2",
"-avgFileSize", "1", "-outDir", OUT_DIR.getAbsolutePath(), "-seed", "1"};
final int MAX_DEPTH = 1;
final int MIN_WIDTH = 3;
final int MAX_WIDTH = 5;
final int NUM_OF_FILES = 7;
final int AVG_FILE_SIZE = 9;
final int SEED = 13;
try {
// successful case
assertEquals(0, sg.run(args));
BufferedReader in = new BufferedReader(new FileReader(DIR_STRUCTURE_FILE));
assertEquals(DIR_STRUCTURE_FIRST_LINE, in.readLine());
assertEquals(DIR_STRUCTURE_SECOND_LINE, in.readLine());
assertEquals(null, in.readLine());
in.close();
in = new BufferedReader(new FileReader(FILE_STRUCTURE_FILE));
assertEquals(FILE_STRUCTURE_FIRST_LINE, in.readLine());
assertEquals(FILE_STRUCTURE_SECOND_LINE, in.readLine());
assertEquals(null, in.readLine());
in.close();
String oldArg = args[MAX_DEPTH];
args[MAX_DEPTH] = "0";
assertEquals(-1, sg.run(args));
args[MAX_DEPTH] = oldArg;
oldArg = args[MIN_WIDTH];
args[MIN_WIDTH] = "-1";
assertEquals(-1, sg.run(args));
args[MIN_WIDTH] = oldArg;
oldArg = args[MAX_WIDTH];
args[MAX_WIDTH] = "-1";
assertEquals(-1, sg.run(args));
args[MAX_WIDTH] = oldArg;
oldArg = args[NUM_OF_FILES];
args[NUM_OF_FILES] = "-1";
assertEquals(-1, sg.run(args));
args[NUM_OF_FILES] = oldArg;
oldArg = args[NUM_OF_FILES];
args[NUM_OF_FILES] = "-1";
assertEquals(-1, sg.run(args));
args[NUM_OF_FILES] = oldArg;
oldArg = args[AVG_FILE_SIZE];
args[AVG_FILE_SIZE] = "-1";
assertEquals(-1, sg.run(args));
args[AVG_FILE_SIZE] = oldArg;
oldArg = args[SEED];
args[SEED] = "34.d4";
assertEquals(-1, sg.run(args));
args[SEED] = oldArg;
} finally {
DIR_STRUCTURE_FILE.delete();
FILE_STRUCTURE_FILE.delete();
}
}
/** Test if the load generator works fine */
@Test
public void testLoadGenerator() throws Exception {
final String TEST_SPACE_ROOT = "/test";
final String SCRIPT_TEST_DIR = OUT_DIR.getAbsolutePath();
String script = SCRIPT_TEST_DIR + "/" + "loadgenscript";
String script2 = SCRIPT_TEST_DIR + "/" + "loadgenscript2";
File scriptFile1 = new File(script);
File scriptFile2 = new File(script2);
FileWriter writer = new FileWriter(DIR_STRUCTURE_FILE);
writer.write(DIR_STRUCTURE_FIRST_LINE+"\n");
writer.write(DIR_STRUCTURE_SECOND_LINE+"\n");
writer.close();
writer = new FileWriter(FILE_STRUCTURE_FILE);
writer.write(FILE_STRUCTURE_FIRST_LINE+"\n");
writer.write(FILE_STRUCTURE_SECOND_LINE+"\n");
writer.close();
MiniDFSCluster cluster = new MiniDFSCluster.Builder(CONF).numDataNodes(3).build();
cluster.waitActive();
try {
DataGenerator dg = new DataGenerator();
dg.setConf(CONF);
String [] args = new String[] {"-inDir", OUT_DIR.getAbsolutePath(), "-root", TEST_SPACE_ROOT};
assertEquals(0, dg.run(args));
final int READ_PROBABILITY = 1;
final int WRITE_PROBABILITY = 3;
final int MAX_DELAY_BETWEEN_OPS = 7;
final int NUM_OF_THREADS = 9;
final int START_TIME = 11;
final int ELAPSED_TIME = 13;
LoadGenerator lg = new LoadGenerator();
lg.setConf(CONF);
args = new String[] {"-readProbability", "0.3", "-writeProbability", "0.3",
"-root", TEST_SPACE_ROOT, "-maxDelayBetweenOps", "0",
"-numOfThreads", "1", "-startTime",
Long.toString(Time.now()), "-elapsedTime", "10"};
assertEquals(0, lg.run(args));
String oldArg = args[READ_PROBABILITY];
args[READ_PROBABILITY] = "1.1";
assertEquals(-1, lg.run(args));
args[READ_PROBABILITY] = "-1.1";
assertEquals(-1, lg.run(args));
args[READ_PROBABILITY] = oldArg;
oldArg = args[WRITE_PROBABILITY];
args[WRITE_PROBABILITY] = "1.1";
assertEquals(-1, lg.run(args));
args[WRITE_PROBABILITY] = "-1.1";
assertEquals(-1, lg.run(args));
args[WRITE_PROBABILITY] = "0.9";
assertEquals(-1, lg.run(args));
args[READ_PROBABILITY] = oldArg;
oldArg = args[MAX_DELAY_BETWEEN_OPS];
args[MAX_DELAY_BETWEEN_OPS] = "1.x1";
assertEquals(-1, lg.run(args));
args[MAX_DELAY_BETWEEN_OPS] = oldArg;
oldArg = args[MAX_DELAY_BETWEEN_OPS];
args[MAX_DELAY_BETWEEN_OPS] = "1.x1";
assertEquals(-1, lg.run(args));
args[MAX_DELAY_BETWEEN_OPS] = oldArg;
oldArg = args[NUM_OF_THREADS];
args[NUM_OF_THREADS] = "-1";
assertEquals(-1, lg.run(args));
args[NUM_OF_THREADS] = oldArg;
oldArg = args[START_TIME];
args[START_TIME] = "-1";
assertEquals(-1, lg.run(args));
args[START_TIME] = oldArg;
oldArg = args[ELAPSED_TIME];
args[ELAPSED_TIME] = "-1";
assertEquals(-1, lg.run(args));
args[ELAPSED_TIME] = oldArg;
// test scripted operation
// Test with good script
FileWriter fw = new FileWriter(scriptFile1);
fw.write("2 .22 .33\n");
fw.write("3 .10 .6\n");
fw.write("6 0 .7\n");
fw.close();
String[] scriptArgs = new String[] {
"-root", TEST_SPACE_ROOT, "-maxDelayBetweenOps", "0",
"-numOfThreads", "10", "-startTime",
Long.toString(Time.now()), "-scriptFile", script};
assertEquals(0, lg.run(scriptArgs));
// Test with bad script
fw = new FileWriter(scriptFile2);
fw.write("2 .22 .33\n");
fw.write("3 blah blah blah .6\n");
fw.write("6 0 .7\n");
fw.close();
scriptArgs[scriptArgs.length - 1] = script2;
assertEquals(-1, lg.run(scriptArgs));
} finally {
cluster.shutdown();
DIR_STRUCTURE_FILE.delete();
FILE_STRUCTURE_FILE.delete();
scriptFile1.delete();
scriptFile2.delete();
}
}
/**
* @param args
*/
public static void main(String[] args) throws Exception {
int res = ToolRunner.run(new TestLoadGenerator(), args);
System.exit(res);
}
@Override
public int run(String[] args) throws Exception {
TestLoadGenerator loadGeneratorTest = new TestLoadGenerator();
loadGeneratorTest.testStructureGenerator();
loadGeneratorTest.testLoadGenerator();
return 0;
}
}
| TestLoadGenerator |
java | apache__camel | components/camel-soap/src/test/java/org/apache/camel/dataformat/soap/SoapToSoapDontIgnoreTest.java | {
"start": 1470,
"end": 4506
} | class ____ extends CamelTestSupport {
private static SoapDataFormat soapjaxbModel;
private static SoapDataFormat soapjaxbModelDontIgnoreUnmarshalled;
private static Map<String, String> namespacePrefixMap;
@BeforeAll
public static void setup() {
namespacePrefixMap = new HashMap<>();
namespacePrefixMap.put("http://schemas.xmlsoap.org/soap/envelope/", "soap");
namespacePrefixMap.put("http://www.w3.org/2001/XMLSchema", "xsd");
namespacePrefixMap.put("http://www.w3.org/2001/XMLSchema-instance", "xsi");
namespacePrefixMap.put("http://www.example.com/contact", "cont");
namespacePrefixMap.put("http://www.example.com/soapheaders", "custom");
soapjaxbModel = new SoapDataFormat("com.example.contact:com.example.soapheaders");
soapjaxbModel.setNamespacePrefix(namespacePrefixMap);
soapjaxbModel.setPrettyPrint(true);
soapjaxbModel.setIgnoreUnmarshalledHeaders(false);
soapjaxbModel.setIgnoreJAXBElement(false);
soapjaxbModel.setElementNameStrategy(new TypeNameStrategy());
soapjaxbModelDontIgnoreUnmarshalled = new SoapDataFormat(
"com.example.contact:com.example.soapheaders");
soapjaxbModelDontIgnoreUnmarshalled.setNamespacePrefix(namespacePrefixMap);
soapjaxbModelDontIgnoreUnmarshalled.setPrettyPrint(true);
soapjaxbModelDontIgnoreUnmarshalled.setIgnoreUnmarshalledHeaders(false);
soapjaxbModelDontIgnoreUnmarshalled.setElementNameStrategy(new TypeNameStrategy());
}
@AfterAll
public static void teardown() {
soapjaxbModel = null;
namespacePrefixMap = null;
}
@Test
public void testSoapMarshal() throws Exception {
MockEndpoint endpoint = getMockEndpoint("mock:end");
endpoint.setExpectedMessageCount(1);
template.sendBody("direct:start", createRequest());
MockEndpoint.assertIsSatisfied(context);
Exchange result = endpoint.assertExchangeReceived(0);
byte[] body = result.getIn().getBody(byte[].class);
InputStream stream = new ByteArrayInputStream(body);
SOAPMessage request = MessageFactory.newInstance().createMessage(null, stream);
assertTrue(null != request.getSOAPHeader()
&& request.getSOAPHeader().extractAllHeaderElements().hasNext(),
"Expected headers");
}
private InputStream createRequest() throws Exception {
InputStream stream = this.getClass().getResourceAsStream("SoapMarshalHeadersTest.xml");
return stream;
}
@Override
protected RouteBuilder createRouteBuilder() {
context.getGlobalOptions().put(Exchange.LOG_DEBUG_BODY_MAX_CHARS, "0");
context.setTracing(true);
return new RouteBuilder() {
public void configure() {
from("direct:start").unmarshal(soapjaxbModel).marshal(soapjaxbModelDontIgnoreUnmarshalled)
.to("mock:end");
}
};
}
}
| SoapToSoapDontIgnoreTest |
java | apache__logging-log4j2 | log4j-core-test/src/test/java/org/apache/logging/log4j/core/util/datetime/FixedDateFormatTest.java | {
"start": 2189,
"end": 22193
} | class ____ {
private boolean containsNanos(final FixedFormat fixedFormat) {
final String pattern = fixedFormat.getPattern();
return pattern.endsWith("n") || pattern.matches(".+n+X*") || pattern.matches(".+n+Z*");
}
@Test
void testConstructorDisallowsNullFormat() {
assertThrows(NullPointerException.class, () -> new FixedDateFormat(null, TimeZone.getDefault()));
}
@Test
void testConstructorDisallowsNullTimeZone() {
assertThrows(NullPointerException.class, () -> new FixedDateFormat(FixedFormat.ABSOLUTE, null));
}
@Test
void testCreateIfSupported_customTimeZoneIfOptionsArrayWithTimeZoneElement() {
final FixedDateFormat fmt = FixedDateFormat.createIfSupported(DEFAULT.getPattern(), "GMT+08:00", "");
assertEquals(DEFAULT.getPattern(), fmt.getFormat());
assertEquals(TimeZone.getTimeZone("GMT+08:00"), fmt.getTimeZone());
}
@Test
void testCreateIfSupported_defaultIfOptionsArrayEmpty() {
final FixedDateFormat fmt = FixedDateFormat.createIfSupported(Strings.EMPTY_ARRAY);
assertEquals(DEFAULT.getPattern(), fmt.getFormat());
}
@Test
void testCreateIfSupported_defaultIfOptionsArrayNull() {
final FixedDateFormat fmt = FixedDateFormat.createIfSupported((String[]) null);
assertEquals(DEFAULT.getPattern(), fmt.getFormat());
}
@Test
void testCreateIfSupported_defaultIfOptionsArrayWithSingleNullElement() {
final FixedDateFormat fmt = FixedDateFormat.createIfSupported(new String[1]);
assertEquals(DEFAULT.getPattern(), fmt.getFormat());
assertEquals(TimeZone.getDefault(), fmt.getTimeZone());
}
@Test
void testCreateIfSupported_defaultTimeZoneIfOptionsArrayWithSecondNullElement() {
final FixedDateFormat fmt = FixedDateFormat.createIfSupported(DEFAULT.getPattern(), null, "");
assertEquals(DEFAULT.getPattern(), fmt.getFormat());
assertEquals(TimeZone.getDefault(), fmt.getTimeZone());
}
@Test
void testCreateIfSupported_nonNullIfNameMatches() {
for (final FixedDateFormat.FixedFormat format : FixedDateFormat.FixedFormat.values()) {
final String[] options = {format.name()};
assertNotNull(FixedDateFormat.createIfSupported(options), format.name());
}
}
@Test
void testCreateIfSupported_nonNullIfPatternMatches() {
for (final FixedDateFormat.FixedFormat format : FixedDateFormat.FixedFormat.values()) {
final String[] options = {format.getPattern()};
assertNotNull(FixedDateFormat.createIfSupported(options), format.name());
}
}
@Test
void testCreateIfSupported_nullIfNameDoesNotMatch() {
final String[] options = {"DEFAULT3"};
assertNull(FixedDateFormat.createIfSupported(options), "DEFAULT3");
}
@Test
void testCreateIfSupported_nullIfPatternDoesNotMatch() {
final String[] options = {"y M d H m s"};
assertNull(FixedDateFormat.createIfSupported(options), "y M d H m s");
}
@Test
void testDaylightSavingToSummerTime() throws Exception {
final Calendar calendar = Calendar.getInstance();
calendar.setTime(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss Z").parse("2017-03-12 00:00:00 UTC"));
final SimpleDateFormat usCentral = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss,SSS", Locale.US);
usCentral.setTimeZone(TimeZone.getTimeZone("US/Central"));
final SimpleDateFormat utc = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss,SSS", Locale.US);
utc.setTimeZone(TimeZone.getTimeZone("UTC"));
final FixedDateFormat fixedUsCentral = FixedDateFormat.create(DEFAULT, TimeZone.getTimeZone("US/Central"));
final FixedDateFormat fixedUtc = FixedDateFormat.create(DEFAULT, TimeZone.getTimeZone("UTC"));
final String[][] expectedDstAndNoDst = {
// US/Central, UTC
{"2017-03-11 18:00:00,000", "2017-03-12 00:00:00,000"}, //
{"2017-03-11 19:00:00,000", "2017-03-12 01:00:00,000"}, //
{"2017-03-11 20:00:00,000", "2017-03-12 02:00:00,000"}, //
{"2017-03-11 21:00:00,000", "2017-03-12 03:00:00,000"}, //
{"2017-03-11 22:00:00,000", "2017-03-12 04:00:00,000"}, //
{"2017-03-11 23:00:00,000", "2017-03-12 05:00:00,000"}, //
{"2017-03-12 00:00:00,000", "2017-03-12 06:00:00,000"}, //
{"2017-03-12 01:00:00,000", "2017-03-12 07:00:00,000"}, //
{"2017-03-12 03:00:00,000", "2017-03-12 08:00:00,000"}, // DST jump at 2am US central time
{"2017-03-12 04:00:00,000", "2017-03-12 09:00:00,000"}, //
{"2017-03-12 05:00:00,000", "2017-03-12 10:00:00,000"}, //
{"2017-03-12 06:00:00,000", "2017-03-12 11:00:00,000"}, //
{"2017-03-12 07:00:00,000", "2017-03-12 12:00:00,000"}, //
{"2017-03-12 08:00:00,000", "2017-03-12 13:00:00,000"}, //
{"2017-03-12 09:00:00,000", "2017-03-12 14:00:00,000"}, //
{"2017-03-12 10:00:00,000", "2017-03-12 15:00:00,000"}, //
{"2017-03-12 11:00:00,000", "2017-03-12 16:00:00,000"}, //
{"2017-03-12 12:00:00,000", "2017-03-12 17:00:00,000"}, //
{"2017-03-12 13:00:00,000", "2017-03-12 18:00:00,000"}, //
{"2017-03-12 14:00:00,000", "2017-03-12 19:00:00,000"}, //
{"2017-03-12 15:00:00,000", "2017-03-12 20:00:00,000"}, //
{"2017-03-12 16:00:00,000", "2017-03-12 21:00:00,000"}, //
{"2017-03-12 17:00:00,000", "2017-03-12 22:00:00,000"}, //
{"2017-03-12 18:00:00,000", "2017-03-12 23:00:00,000"}, // 24
{"2017-03-12 19:00:00,000", "2017-03-13 00:00:00,000"}, //
{"2017-03-12 20:00:00,000", "2017-03-13 01:00:00,000"}, //
{"2017-03-12 21:00:00,000", "2017-03-13 02:00:00,000"}, //
{"2017-03-12 22:00:00,000", "2017-03-13 03:00:00,000"}, //
{"2017-03-12 23:00:00,000", "2017-03-13 04:00:00,000"}, //
{"2017-03-13 00:00:00,000", "2017-03-13 05:00:00,000"}, //
{"2017-03-13 01:00:00,000", "2017-03-13 06:00:00,000"}, //
{"2017-03-13 02:00:00,000", "2017-03-13 07:00:00,000"}, //
{"2017-03-13 03:00:00,000", "2017-03-13 08:00:00,000"}, //
{"2017-03-13 04:00:00,000", "2017-03-13 09:00:00,000"}, //
{"2017-03-13 05:00:00,000", "2017-03-13 10:00:00,000"}, //
{"2017-03-13 06:00:00,000", "2017-03-13 11:00:00,000"}, //
};
for (int i = 0; i < 36; i++) {
final Date date = calendar.getTime();
assertEquals(expectedDstAndNoDst[i][0], usCentral.format(date), "SimpleDateFormat TZ=US Central");
assertEquals(expectedDstAndNoDst[i][1], utc.format(date), "SimpleDateFormat TZ=UTC");
assertEquals(
expectedDstAndNoDst[i][0], fixedUsCentral.format(date.getTime()), "FixedDateFormat TZ=US Central");
assertEquals(expectedDstAndNoDst[i][1], fixedUtc.format(date.getTime()), "FixedDateFormat TZ=UTC");
calendar.add(Calendar.HOUR_OF_DAY, 1);
}
}
@Test
void testDaylightSavingToWinterTime() throws Exception {
final Calendar calendar = Calendar.getInstance();
calendar.setTime(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss Z").parse("2017-11-05 00:00:00 UTC"));
final SimpleDateFormat usCentral = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss,SSS", Locale.US);
usCentral.setTimeZone(TimeZone.getTimeZone("US/Central"));
final SimpleDateFormat utc = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss,SSS", Locale.US);
utc.setTimeZone(TimeZone.getTimeZone("UTC"));
final FixedDateFormat fixedUsCentral = FixedDateFormat.create(DEFAULT, TimeZone.getTimeZone("US/Central"));
final FixedDateFormat fixedUtc = FixedDateFormat.create(DEFAULT, TimeZone.getTimeZone("UTC"));
final String[][] expectedDstAndNoDst = {
// US/Central, UTC
{"2017-11-04 19:00:00,000", "2017-11-05 00:00:00,000"}, //
{"2017-11-04 20:00:00,000", "2017-11-05 01:00:00,000"}, //
{"2017-11-04 21:00:00,000", "2017-11-05 02:00:00,000"}, //
{"2017-11-04 22:00:00,000", "2017-11-05 03:00:00,000"}, //
{"2017-11-04 23:00:00,000", "2017-11-05 04:00:00,000"}, //
{"2017-11-05 00:00:00,000", "2017-11-05 05:00:00,000"}, //
{"2017-11-05 01:00:00,000", "2017-11-05 06:00:00,000"}, // DST jump at 2am US central time
{"2017-11-05 01:00:00,000", "2017-11-05 07:00:00,000"}, //
{"2017-11-05 02:00:00,000", "2017-11-05 08:00:00,000"}, //
{"2017-11-05 03:00:00,000", "2017-11-05 09:00:00,000"}, //
{"2017-11-05 04:00:00,000", "2017-11-05 10:00:00,000"}, //
{"2017-11-05 05:00:00,000", "2017-11-05 11:00:00,000"}, //
{"2017-11-05 06:00:00,000", "2017-11-05 12:00:00,000"}, //
{"2017-11-05 07:00:00,000", "2017-11-05 13:00:00,000"}, //
{"2017-11-05 08:00:00,000", "2017-11-05 14:00:00,000"}, //
{"2017-11-05 09:00:00,000", "2017-11-05 15:00:00,000"}, //
{"2017-11-05 10:00:00,000", "2017-11-05 16:00:00,000"}, //
{"2017-11-05 11:00:00,000", "2017-11-05 17:00:00,000"}, //
{"2017-11-05 12:00:00,000", "2017-11-05 18:00:00,000"}, //
{"2017-11-05 13:00:00,000", "2017-11-05 19:00:00,000"}, //
{"2017-11-05 14:00:00,000", "2017-11-05 20:00:00,000"}, //
{"2017-11-05 15:00:00,000", "2017-11-05 21:00:00,000"}, //
{"2017-11-05 16:00:00,000", "2017-11-05 22:00:00,000"}, //
{"2017-11-05 17:00:00,000", "2017-11-05 23:00:00,000"}, // 24
{"2017-11-05 18:00:00,000", "2017-11-06 00:00:00,000"}, //
{"2017-11-05 19:00:00,000", "2017-11-06 01:00:00,000"}, //
{"2017-11-05 20:00:00,000", "2017-11-06 02:00:00,000"}, //
{"2017-11-05 21:00:00,000", "2017-11-06 03:00:00,000"}, //
{"2017-11-05 22:00:00,000", "2017-11-06 04:00:00,000"}, //
{"2017-11-05 23:00:00,000", "2017-11-06 05:00:00,000"}, //
{"2017-11-06 00:00:00,000", "2017-11-06 06:00:00,000"}, //
{"2017-11-06 01:00:00,000", "2017-11-06 07:00:00,000"}, //
{"2017-11-06 02:00:00,000", "2017-11-06 08:00:00,000"}, //
{"2017-11-06 03:00:00,000", "2017-11-06 09:00:00,000"}, //
{"2017-11-06 04:00:00,000", "2017-11-06 10:00:00,000"}, //
{"2017-11-06 05:00:00,000", "2017-11-06 11:00:00,000"}, //
};
for (int i = 0; i < 36; i++) {
final Date date = calendar.getTime();
// System.out.println(usCentral.format(date) + ", Fixed: " + fixedUsCentral.format(date.getTime()) + ", utc:
// " + utc.format(date));
assertEquals(expectedDstAndNoDst[i][0], usCentral.format(date), "SimpleDateFormat TZ=US Central");
assertEquals(expectedDstAndNoDst[i][1], utc.format(date), "SimpleDateFormat TZ=UTC");
assertEquals(
expectedDstAndNoDst[i][0], fixedUsCentral.format(date.getTime()), "FixedDateFormat TZ=US Central");
assertEquals(expectedDstAndNoDst[i][1], fixedUtc.format(date.getTime()), "FixedDateFormat TZ=US Central");
calendar.add(Calendar.HOUR_OF_DAY, 1);
}
}
@Test
void testFixedFormat_getDatePatternLengthReturnsDatePatternLength() {
assertEquals("yyyyMMdd".length(), FixedFormat.COMPACT.getDatePatternLength());
assertEquals("yyyy-MM-dd ".length(), DEFAULT.getDatePatternLength());
}
@Test
void testFixedFormat_getDatePatternLengthZeroIfNoDateInPattern() {
assertEquals(0, FixedFormat.ABSOLUTE.getDatePatternLength());
assertEquals(0, FixedFormat.ABSOLUTE_PERIOD.getDatePatternLength());
}
@Test
void testFixedFormat_getDatePatternNullIfNoDateInPattern() {
assertNull(FixedFormat.ABSOLUTE.getDatePattern());
assertNull(FixedFormat.ABSOLUTE_PERIOD.getDatePattern());
}
@Test
void testFixedFormat_getDatePatternReturnsDatePatternIfExists() {
assertEquals("yyyyMMdd", FixedFormat.COMPACT.getDatePattern());
assertEquals("yyyy-MM-dd ", DEFAULT.getDatePattern());
}
@Test
void testFixedFormat_getFastDateFormatNonNullIfDateInPattern() {
assertNotNull(FixedFormat.COMPACT.getFastDateFormat());
assertNotNull(DEFAULT.getFastDateFormat());
assertEquals("yyyyMMdd", FixedFormat.COMPACT.getFastDateFormat().getPattern());
assertEquals("yyyy-MM-dd ", DEFAULT.getFastDateFormat().getPattern());
}
@Test
void testFixedFormat_getFastDateFormatNullIfNoDateInPattern() {
assertNull(FixedFormat.ABSOLUTE.getFastDateFormat());
assertNull(FixedFormat.ABSOLUTE_PERIOD.getFastDateFormat());
}
@Test
void testFormatLong() {
final long now = System.currentTimeMillis();
final long start = now - TimeUnit.HOURS.toMillis(25);
final long end = now + TimeUnit.HOURS.toMillis(25);
for (final FixedFormat format : FixedFormat.values()) {
final String pattern = format.getPattern();
if (containsNanos(format) || format.getFixedTimeZoneFormat() != null) {
continue; // cannot compile precise timestamp formats with SimpleDateFormat
}
final SimpleDateFormat simpleDF = new SimpleDateFormat(pattern, Locale.getDefault());
final FixedDateFormat customTF = new FixedDateFormat(format, TimeZone.getDefault());
for (long time = start; time < end; time += 12345) {
final String actual = customTF.format(time);
final String expected = simpleDF.format(new Date(time));
assertEquals(expected, actual, format + "(" + pattern + ")" + "/" + time);
}
}
}
@Test
void testFormatLong_goingBackInTime() {
final long now = System.currentTimeMillis();
final long start = now - TimeUnit.HOURS.toMillis(25);
final long end = now + TimeUnit.HOURS.toMillis(25);
for (final FixedFormat format : FixedFormat.values()) {
final String pattern = format.getPattern();
if (containsNanos(format) || format.getFixedTimeZoneFormat() != null) {
continue; // cannot compile precise timestamp formats with SimpleDateFormat
}
final SimpleDateFormat simpleDF = new SimpleDateFormat(pattern, Locale.getDefault());
final FixedDateFormat customTF = new FixedDateFormat(format, TimeZone.getDefault());
for (long time = end; time > start; time -= 12345) {
final String actual = customTF.format(time);
final String expected = simpleDF.format(new Date(time));
assertEquals(expected, actual, format + "(" + pattern + ")" + "/" + time);
}
}
}
/**
* This test case validates date pattern before and after DST
* Base Date : 12 Mar 2017
* Daylight Savings started on : 02:00 AM
*/
@Test
void testFormatLong_goingBackInTime_DST() {
final Calendar instance = Calendar.getInstance(TimeZone.getTimeZone("EST"));
instance.set(2017, 2, 12, 2, 0);
final long now = instance.getTimeInMillis();
final long start = now - TimeUnit.HOURS.toMillis(1);
final long end = now + TimeUnit.HOURS.toMillis(1);
for (final FixedFormat format : FixedFormat.values()) {
final String pattern = format.getPattern();
if (containsNanos(format) || format.getFixedTimeZoneFormat() != null) {
continue; // cannot compile precise timestamp formats with SimpleDateFormat
}
final SimpleDateFormat simpleDF = new SimpleDateFormat(pattern, Locale.getDefault());
final FixedDateFormat customTF = new FixedDateFormat(format, TimeZone.getDefault());
for (long time = end; time > start; time -= 12345) {
final String actual = customTF.format(time);
final String expected = simpleDF.format(new Date(time));
assertEquals(expected, actual, format + "(" + pattern + ")" + "/" + time);
}
}
}
@Test
void testFormatLongCharArrayInt() {
final long now = System.currentTimeMillis();
final long start = now - TimeUnit.HOURS.toMillis(25);
final long end = now + TimeUnit.HOURS.toMillis(25);
final char[] buffer = new char[128];
for (final FixedFormat format : FixedFormat.values()) {
final String pattern = format.getPattern();
if (containsNanos(format) || format.getFixedTimeZoneFormat() != null) {
// cannot compile precise timestamp formats with SimpleDateFormat
// This format() API not include the TZ
continue;
}
final SimpleDateFormat simpleDF = new SimpleDateFormat(pattern, Locale.getDefault());
final FixedDateFormat customTF = new FixedDateFormat(format, TimeZone.getDefault());
for (long time = start; time < end; time += 12345) {
final int length = customTF.format(time, buffer, 23);
final String actual = new String(buffer, 23, length);
final String expected = simpleDF.format(new Date(time));
assertEquals(expected, actual, format + "(" + pattern + ")" + "/" + time);
}
}
}
@Test
void testFormatLongCharArrayInt_goingBackInTime() {
final long now = System.currentTimeMillis();
final long start = now - TimeUnit.HOURS.toMillis(25);
final long end = now + TimeUnit.HOURS.toMillis(25);
final char[] buffer = new char[128];
for (final FixedFormat format : FixedFormat.values()) {
final String pattern = format.getPattern();
if (pattern.endsWith("n")
|| pattern.matches(".+n+X*")
|| pattern.matches(".+n+Z*")
|| format.getFixedTimeZoneFormat() != null) {
continue; // cannot compile precise timestamp formats with SimpleDateFormat
}
final SimpleDateFormat simpleDF = new SimpleDateFormat(pattern, Locale.getDefault());
final FixedDateFormat customTF = new FixedDateFormat(format, TimeZone.getDefault());
for (long time = end; time > start; time -= 12345) {
final int length = customTF.format(time, buffer, 23);
final String actual = new String(buffer, 23, length);
final String expected = simpleDF.format(new Date(time));
assertEquals(expected, actual, format + "(" + pattern + ")" + "/" + time);
}
}
}
@Test
void testGetFormatReturnsConstructorFixedFormatPattern() {
final FixedDateFormat format = new FixedDateFormat(FixedDateFormat.FixedFormat.ABSOLUTE, TimeZone.getDefault());
assertSame(FixedDateFormat.FixedFormat.ABSOLUTE.getPattern(), format.getFormat());
}
@ParameterizedTest
@MethodSource("org.apache.logging.log4j.core.util.datetime.FixedDateFormat$FixedFormat#values")
@DefaultLocale(language = "en")
void testFixedFormatLength(FixedFormat format) {
LocalDate date = LocalDate.of(2023, 4, 8);
LocalTime time = LocalTime.of(19, 5, 14);
ZoneId zone = ZoneId.of("Europe/Warsaw");
long epochMillis = ZonedDateTime.of(date, time, zone).toInstant().toEpochMilli();
MutableInstant instant = new MutableInstant();
instant.initFromEpochMilli(epochMillis, 123_456);
FixedDateFormat formatter = FixedDateFormat.create(format);
String formatted = formatter.formatInstant(instant);
assertEquals(formatter.getLength(), formatted.length(), formatted);
}
}
| FixedDateFormatTest |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/dialect/oracle/ast/stmt/OracleCreateSynonymStatement.java | {
"start": 855,
"end": 1992
} | class ____ extends OracleStatementImpl implements SQLCreateStatement {
private boolean orReplace;
private SQLName name;
private boolean isPublic;
private SQLName object;
@Override
public void accept0(OracleASTVisitor visitor) {
if (visitor.visit(this)) {
acceptChild(visitor, name);
acceptChild(visitor, object);
}
}
public boolean isPublic() {
return isPublic;
}
public void setPublic(boolean value) {
isPublic = value;
}
public SQLName getName() {
return name;
}
public void setName(SQLName name) {
if (name != null) {
name.setParent(this);
}
this.name = name;
}
public SQLName getObject() {
return object;
}
public void setObject(SQLName object) {
if (object != null) {
object.setParent(this);
}
this.object = object;
}
public boolean isOrReplace() {
return orReplace;
}
public void setOrReplace(boolean orReplace) {
this.orReplace = orReplace;
}
}
| OracleCreateSynonymStatement |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/dispatcher/DispatcherCachedOperationsHandler.java | {
"start": 1586,
"end": 6629
} | class ____ {
private final CompletedOperationCache<AsynchronousJobOperationKey, String>
savepointTriggerCache;
private final CompletedOperationCache<AsynchronousJobOperationKey, Long> checkpointTriggerCache;
private final TriggerCheckpointFunction triggerCheckpointFunction;
private final TriggerSavepointFunction triggerSavepointFunction;
private final TriggerSavepointFunction stopWithSavepointFunction;
DispatcherCachedOperationsHandler(
DispatcherOperationCaches operationCaches,
TriggerCheckpointFunction triggerCheckpointFunction,
TriggerSavepointFunction triggerSavepointFunction,
TriggerSavepointFunction stopWithSavepointFunction) {
this(
triggerCheckpointFunction,
operationCaches.getCheckpointTriggerCache(),
triggerSavepointFunction,
stopWithSavepointFunction,
operationCaches.getSavepointTriggerCache());
}
@VisibleForTesting
DispatcherCachedOperationsHandler(
TriggerCheckpointFunction triggerCheckpointFunction,
CompletedOperationCache<AsynchronousJobOperationKey, Long> checkpointTriggerCache,
TriggerSavepointFunction triggerSavepointFunction,
TriggerSavepointFunction stopWithSavepointFunction,
CompletedOperationCache<AsynchronousJobOperationKey, String> savepointTriggerCache) {
this.triggerCheckpointFunction = triggerCheckpointFunction;
this.checkpointTriggerCache = checkpointTriggerCache;
this.triggerSavepointFunction = triggerSavepointFunction;
this.stopWithSavepointFunction = stopWithSavepointFunction;
this.savepointTriggerCache = savepointTriggerCache;
}
public CompletableFuture<Acknowledge> triggerCheckpoint(
AsynchronousJobOperationKey operationKey,
CheckpointType checkpointType,
Duration timeout) {
if (!checkpointTriggerCache.containsOperation(operationKey)) {
checkpointTriggerCache.registerOngoingOperation(
operationKey,
triggerCheckpointFunction.apply(
operationKey.getJobId(), checkpointType, timeout));
}
return CompletableFuture.completedFuture(Acknowledge.get());
}
public CompletableFuture<OperationResult<Long>> getCheckpointStatus(
AsynchronousJobOperationKey operationKey) {
return checkpointTriggerCache
.get(operationKey)
.map(CompletableFuture::completedFuture)
.orElse(
FutureUtils.completedExceptionally(
new UnknownOperationKeyException(operationKey)));
}
public CompletableFuture<Acknowledge> triggerSavepoint(
AsynchronousJobOperationKey operationKey,
String targetDirectory,
SavepointFormatType formatType,
TriggerSavepointMode savepointMode,
Duration timeout) {
return registerOperationIdempotently(
operationKey,
() ->
triggerSavepointFunction.apply(
operationKey.getJobId(),
targetDirectory,
formatType,
savepointMode,
timeout));
}
public CompletableFuture<Acknowledge> stopWithSavepoint(
AsynchronousJobOperationKey operationKey,
String targetDirectory,
SavepointFormatType formatType,
TriggerSavepointMode savepointMode,
Duration timeout) {
return registerOperationIdempotently(
operationKey,
() ->
stopWithSavepointFunction.apply(
operationKey.getJobId(),
targetDirectory,
formatType,
savepointMode,
timeout));
}
public CompletableFuture<OperationResult<String>> getSavepointStatus(
AsynchronousJobOperationKey operationKey) {
return savepointTriggerCache
.get(operationKey)
.map(CompletableFuture::completedFuture)
.orElse(
FutureUtils.completedExceptionally(
new UnknownOperationKeyException(operationKey)));
}
private CompletableFuture<Acknowledge> registerOperationIdempotently(
AsynchronousJobOperationKey operationKey,
Supplier<CompletableFuture<String>> operation) {
if (!savepointTriggerCache.containsOperation(operationKey)) {
savepointTriggerCache.registerOngoingOperation(operationKey, operation.get());
}
return CompletableFuture.completedFuture(Acknowledge.get());
}
}
| DispatcherCachedOperationsHandler |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/component/bean/issues/BeanRouteToDerivedClassTest.java | {
"start": 2545,
"end": 2770
} | class ____ have been invoked");
assertEquals("Hello World", out.toString());
out = template.requestBody("direct:other", new MyMessage("Hello World"));
assertNull(derived.getAndClearBody(), "Derived | should |
java | playframework__playframework | documentation/manual/working/javaGuide/main/json/code/javaguide/json/JavaJsonActions.java | {
"start": 3473,
"end": 4153
} | class ____ extends MockJavaAction {
JsonRequestAsAnyContentAction(JavaHandlerComponents javaHandlerComponents) {
super(javaHandlerComponents);
}
// #json-request-as-anycontent
public Result sayHello(Http.Request request) {
JsonNode json = request.body().asJson();
if (json == null) {
return badRequest("Expecting Json data");
} else {
String name = json.findPath("name").textValue();
if (name == null) {
return badRequest("Missing parameter [name]");
} else {
return ok("Hello " + name);
}
}
}
// #json-request-as-anycontent
}
static | JsonRequestAsAnyContentAction |
java | google__guice | extensions/testlib/src/com/google/inject/testing/fieldbinder/BoundFieldModule.java | {
"start": 7921,
"end": 12236
} | class ____ {
private final Object instance;
private final Field field;
private final TypeLiteral<?> fieldType;
private final Bind bindAnnotation;
/** @see #getBoundKey */
private final Key<?> boundKey;
private BoundFieldInfo(
Object instance, Field field, Bind bindAnnotation, TypeLiteral<?> fieldType)
throws BoundFieldException {
this.instance = instance;
this.field = field;
this.fieldType = fieldType;
this.bindAnnotation = bindAnnotation;
field.setAccessible(true);
Annotation bindingAnnotation = computeBindingAnnotation();
Optional<TypeLiteral<?>> naturalType = computeNaturalFieldType();
this.boundKey = computeKey(naturalType, bindingAnnotation);
checkBindingIsAssignable(field, naturalType);
}
private void checkBindingIsAssignable(Field field, Optional<TypeLiteral<?>> naturalType)
throws BoundFieldException {
if (naturalType.isPresent()) {
Class<?> boundRawType = boundKey.getTypeLiteral().getRawType();
Class<?> naturalRawType = MoreTypes.canonicalizeForKey(naturalType.get()).getRawType();
if (!boundRawType.isAssignableFrom(naturalRawType)) {
throw new BoundFieldException(
new Message(
field,
String.format(
"Requested binding type \"%s\" is not assignable "
+ "from field binding type \"%s\"",
boundRawType.getName(), naturalRawType.getName())));
}
}
}
/** The field itself. */
public Field getField() {
return field;
}
/**
* The actual type of the field.
*
* <p>For example, {@code @Bind(to = Object.class) Number one = new Integer(1);} will be {@code
* Number}. {@code @Bind Provider<Number>} will be {@code Provider<Number>}.
*/
public TypeLiteral<?> getFieldType() {
return fieldType;
}
/**
* The {@literal @}{@link Bind} annotation which is present on the field.
*
* <p>Note this is not the same as the binding annotation (or qualifier) for {@link
* #getBoundKey()}
*/
public Bind getBindAnnotation() {
return bindAnnotation;
}
/**
* The key this field will bind to.
*
* <ul>
* <li>{@code @Bind(to = Object.class) @MyQualifier Number one = new Integer(1);} will be
* {@code @MyQualifier Object}.
* <li>{@code @Bind @MyQualifier(2) Number one = new Integer(1);} will be
* {@code @MyQualifier(2) Number}.
* <li>{@code @Bind @MyQualifier Provider<String> three = "default"} will be
* {@code @MyQualfier String}
* </ul>
*/
public Key<?> getBoundKey() {
return boundKey;
}
/** Returns the current value of this field. */
public Object getValue() {
try {
return field.get(instance);
} catch (IllegalAccessException e) {
// Since we called setAccessible(true) on this field in the constructor, this is a
// programming error if it occurs.
throw new AssertionError(e);
}
}
private Annotation computeBindingAnnotation() throws BoundFieldException {
Annotation found = null;
for (Annotation annotation : InjectionPoint.getAnnotations(field)) {
Class<? extends Annotation> annotationType = annotation.annotationType();
if (Annotations.isBindingAnnotation(annotationType)) {
if (found != null) {
throw new BoundFieldException(
new Message(field, "More than one annotation is specified for this binding."));
}
found = annotation;
}
}
return found;
}
private Key<?> computeKey(Optional<TypeLiteral<?>> naturalType, Annotation bindingAnnotation)
throws BoundFieldException {
TypeLiteral<?> boundType = computeBoundType(naturalType);
if (bindingAnnotation == null) {
return Key.get(boundType);
} else {
return Key.get(boundType, bindingAnnotation);
}
}
private TypeLiteral<?> computeBoundType(Optional<TypeLiteral<?>> naturalType)
throws BoundFieldException {
Class<?> bindClass = bindAnnotation.to();
// Bind#to's default value is Bind. | BoundFieldInfo |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/type/descriptor/java/JdbcTimestampJavaType.java | {
"start": 8247,
"end": 8735
} | class ____ extends MutableMutabilityPlan<Date> {
public static final TimestampMutabilityPlan INSTANCE = new TimestampMutabilityPlan();
@Override
public Date deepCopyNotNull(Date value) {
if ( value instanceof Timestamp timestamp ) {
// make sure to get the nanos
final var copy = new Timestamp( timestamp.getTime() );
copy.setNanos( timestamp.getNanos() );
return copy;
}
else {
return new Timestamp( value.getTime() );
}
}
}
}
| TimestampMutabilityPlan |
java | apache__camel | components/camel-http/src/test/java/org/apache/camel/component/http/HttpServerTestSupport.java | {
"start": 1223,
"end": 1293
} | class ____ an empty configuration to be used.
*/
public abstract | contains |
java | apache__dubbo | dubbo-common/src/test/java/org/apache/dubbo/common/bytecode/MixinTest.java | {
"start": 1620,
"end": 1672
} | interface ____ {
void m3();
}
public | I3 |
java | apache__spark | examples/src/main/java/org/apache/spark/examples/mllib/JavaKMeansExample.java | {
"start": 1208,
"end": 2778
} | class ____ {
public static void main(String[] args) {
SparkConf conf = new SparkConf().setAppName("JavaKMeansExample");
JavaSparkContext jsc = new JavaSparkContext(conf);
// $example on$
// Load and parse data
String path = "data/mllib/kmeans_data.txt";
JavaRDD<String> data = jsc.textFile(path);
JavaRDD<Vector> parsedData = data.map(s -> {
String[] sarray = s.split(" ");
double[] values = new double[sarray.length];
for (int i = 0; i < sarray.length; i++) {
values[i] = Double.parseDouble(sarray[i]);
}
return Vectors.dense(values);
});
parsedData.cache();
// Cluster the data into two classes using KMeans
int numClusters = 2;
int numIterations = 20;
KMeansModel clusters = KMeans.train(parsedData.rdd(), numClusters, numIterations);
System.out.println("Cluster centers:");
for (Vector center: clusters.clusterCenters()) {
System.out.println(" " + center);
}
double cost = clusters.computeCost(parsedData.rdd());
System.out.println("Cost: " + cost);
// Evaluate clustering by computing Within Set Sum of Squared Errors
double WSSSE = clusters.computeCost(parsedData.rdd());
System.out.println("Within Set Sum of Squared Errors = " + WSSSE);
// Save and load model
clusters.save(jsc.sc(), "target/org/apache/spark/JavaKMeansExample/KMeansModel");
KMeansModel sameModel = KMeansModel.load(jsc.sc(),
"target/org/apache/spark/JavaKMeansExample/KMeansModel");
// $example off$
jsc.stop();
}
}
| JavaKMeansExample |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/method/configuration/EnableAuthorizationManagerReactiveMethodSecurityTests.java | {
"start": 2099,
"end": 2988
} | class ____ {
@Autowired
ReactiveMessageService messageService;
ReactiveMessageService delegate;
TestPublisher<String> result = TestPublisher.create();
Context withAdmin = ReactiveSecurityContextHolder
.withAuthentication(new TestingAuthenticationToken("admin", "password", "ROLE_USER", "ROLE_ADMIN"));
Context withUser = ReactiveSecurityContextHolder
.withAuthentication(new TestingAuthenticationToken("user", "password", "ROLE_USER"));
@AfterEach
public void cleanup() {
reset(this.delegate);
}
@Autowired
public void setConfig(Config config) {
this.delegate = config.delegate;
}
@Test
public void notPublisherPreAuthorizeFindByIdThenThrowsIllegalStateException() {
assertThatIllegalStateException().isThrownBy(() -> this.messageService.notPublisherPreAuthorizeFindById(1L))
.withMessage("The returnType | EnableAuthorizationManagerReactiveMethodSecurityTests |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/index/codec/vectors/es816/ES816BinaryFlatVectorsScorerTests.java | {
"start": 1276,
"end": 50323
} | class ____ extends LuceneTestCase {
static {
LogConfigurator.loadLog4jPlugins();
LogConfigurator.configureESLogging(); // native access requires logging to be initialized
}
public void testScore() throws IOException {
int dimensions = random().nextInt(1, 4097);
int discretizedDimensions = BQVectorUtils.discretize(dimensions, 64);
int randIdx = random().nextInt(VectorSimilarityFunction.values().length);
VectorSimilarityFunction similarityFunction = VectorSimilarityFunction.values()[randIdx];
float[] centroid = new float[dimensions];
for (int j = 0; j < dimensions; j++) {
centroid[j] = random().nextFloat(-50f, 50f);
}
if (similarityFunction == VectorSimilarityFunction.COSINE) {
VectorUtil.l2normalize(centroid);
}
byte[] vector = new byte[discretizedDimensions / 8 * BinaryQuantizer.B_QUERY];
random().nextBytes(vector);
float distanceToCentroid = random().nextFloat(0f, 10_000.0f);
float vl = random().nextFloat(-1000f, 1000f);
float width = random().nextFloat(0f, 1000f);
short quantizedSum = (short) random().nextInt(0, 4097);
float normVmC = random().nextFloat(-1000f, 1000f);
float vDotC = random().nextFloat(-1000f, 1000f);
ES816BinaryFlatRWVectorsScorer.BinaryQueryVector queryVector = new ES816BinaryFlatRWVectorsScorer.BinaryQueryVector(
vector,
new BinaryQuantizer.QueryFactors(quantizedSum, distanceToCentroid, vl, width, normVmC, vDotC)
);
BinarizedByteVectorValues targetVectors = new BinarizedByteVectorValues() {
@Override
public float getCentroidDistance(int vectorOrd) throws IOException {
return random().nextFloat(0f, 1000f);
}
@Override
public float getVectorMagnitude(int vectorOrd) throws IOException {
return random().nextFloat(0f, 100f);
}
@Override
public float getOOQ(int targetOrd) throws IOException {
return random().nextFloat(-1000f, 1000f);
}
@Override
public float getNormOC(int targetOrd) throws IOException {
return random().nextFloat(-1000f, 1000f);
}
@Override
public float getODotC(int targetOrd) throws IOException {
return random().nextFloat(-1000f, 1000f);
}
@Override
public BinaryQuantizer getQuantizer() {
int dimensions = 128;
return new BinaryQuantizer(dimensions, dimensions, VectorSimilarityFunction.EUCLIDEAN);
}
@Override
public float[] getCentroid() throws IOException {
return centroid;
}
@Override
public BinarizedByteVectorValues copy() throws IOException {
return null;
}
@Override
public byte[] vectorValue(int targetOrd) throws IOException {
byte[] vectorBytes = new byte[discretizedDimensions / 8];
random().nextBytes(vectorBytes);
return vectorBytes;
}
@Override
public int size() {
return 1;
}
@Override
public VectorScorer scorer(float[] query) throws IOException {
return null;
}
@Override
public float[] getCorrectiveTerms(int vectorOrd) throws IOException {
return new float[0];
}
@Override
public int dimension() {
return dimensions;
}
};
ES816BinaryFlatRWVectorsScorer.BinarizedRandomVectorScorer scorer = new ES816BinaryFlatRWVectorsScorer.BinarizedRandomVectorScorer(
queryVector,
targetVectors,
similarityFunction
);
float score = scorer.score(0);
assertTrue(score >= 0f);
}
public void testScoreEuclidean() throws IOException {
int dimensions = 128;
byte[] vector = new byte[] {
-8,
10,
-27,
112,
-83,
36,
-36,
-122,
-114,
82,
55,
33,
-33,
120,
55,
-99,
-93,
-86,
-55,
21,
-121,
30,
111,
30,
0,
82,
21,
38,
-120,
-127,
40,
-32,
78,
-37,
42,
-43,
122,
115,
30,
115,
123,
108,
-13,
-65,
123,
124,
-33,
-68,
49,
5,
20,
58,
0,
12,
30,
30,
4,
97,
10,
66,
4,
35,
1,
67 };
float distanceToCentroid = 157799.12f;
float vl = -57.883f;
float width = 9.972266f;
short quantizedSum = 795;
ES816BinaryFlatRWVectorsScorer.BinaryQueryVector queryVector = new ES816BinaryFlatRWVectorsScorer.BinaryQueryVector(
vector,
new BinaryQuantizer.QueryFactors(quantizedSum, distanceToCentroid, vl, width, 0f, 0f)
);
BinarizedByteVectorValues targetVectors = new BinarizedByteVectorValues() {
@Override
public float getCentroidDistance(int vectorOrd) {
return 355.78073f;
}
@Override
public float getVectorMagnitude(int vectorOrd) {
return 0.7636705f;
}
@Override
public float getOOQ(int targetOrd) {
return 0;
}
@Override
public float getNormOC(int targetOrd) {
return 0;
}
@Override
public float getODotC(int targetOrd) {
return 0;
}
@Override
public BinaryQuantizer getQuantizer() {
int dimensions = 128;
return new BinaryQuantizer(dimensions, dimensions, VectorSimilarityFunction.EUCLIDEAN);
}
@Override
public float[] getCentroid() {
return new float[] {
26.7f,
16.2f,
10.913f,
10.314f,
12.12f,
14.045f,
15.887f,
16.864f,
32.232f,
31.567f,
34.922f,
21.624f,
16.349f,
29.625f,
31.994f,
22.044f,
37.847f,
24.622f,
36.299f,
27.966f,
14.368f,
19.248f,
30.778f,
35.927f,
27.019f,
16.381f,
17.325f,
16.517f,
13.272f,
9.154f,
9.242f,
17.995f,
53.777f,
23.011f,
12.929f,
16.128f,
22.16f,
28.643f,
25.861f,
27.197f,
59.883f,
40.878f,
34.153f,
22.795f,
24.402f,
37.427f,
34.19f,
29.288f,
61.812f,
26.355f,
39.071f,
37.789f,
23.33f,
22.299f,
28.64f,
47.828f,
52.457f,
21.442f,
24.039f,
29.781f,
27.707f,
19.484f,
14.642f,
28.757f,
54.567f,
20.936f,
25.112f,
25.521f,
22.077f,
18.272f,
14.526f,
29.054f,
61.803f,
24.509f,
37.517f,
35.906f,
24.106f,
22.64f,
32.1f,
48.788f,
60.102f,
39.625f,
34.766f,
22.497f,
24.397f,
41.599f,
38.419f,
30.99f,
55.647f,
25.115f,
14.96f,
18.882f,
26.918f,
32.442f,
26.231f,
27.107f,
26.828f,
15.968f,
18.668f,
14.071f,
10.906f,
8.989f,
9.721f,
17.294f,
36.32f,
21.854f,
35.509f,
27.106f,
14.067f,
19.82f,
33.582f,
35.997f,
33.528f,
30.369f,
36.955f,
21.23f,
15.2f,
30.252f,
34.56f,
22.295f,
29.413f,
16.576f,
11.226f,
10.754f,
12.936f,
15.525f,
15.868f,
16.43f };
}
@Override
public BinarizedByteVectorValues copy() {
return null;
}
@Override
public byte[] vectorValue(int targetOrd) {
return new byte[] { 44, 108, 120, -15, -61, -32, 124, 25, -63, -57, 6, 24, 1, -61, 1, 14 };
}
@Override
public int size() {
return 1;
}
@Override
public VectorScorer scorer(float[] query) throws IOException {
return null;
}
@Override
public float[] getCorrectiveTerms(int vectorOrd) throws IOException {
return new float[0];
}
@Override
public int dimension() {
return dimensions;
}
};
VectorSimilarityFunction similarityFunction = VectorSimilarityFunction.EUCLIDEAN;
ES816BinaryFlatRWVectorsScorer.BinarizedRandomVectorScorer scorer = new ES816BinaryFlatRWVectorsScorer.BinarizedRandomVectorScorer(
queryVector,
targetVectors,
similarityFunction
);
assertEquals(1f / (1f + 245482.47f), scorer.score(0), 0.1f);
}
public void testScoreMIP() throws IOException {
int dimensions = 768;
byte[] vector = new byte[] {
-76,
44,
81,
31,
30,
-59,
56,
-118,
-36,
45,
-11,
8,
-61,
95,
-100,
18,
-91,
-98,
-46,
31,
-8,
82,
-42,
121,
75,
-61,
125,
-21,
-82,
16,
21,
40,
-1,
12,
-92,
-22,
-49,
-92,
-19,
-32,
-56,
-34,
60,
-100,
69,
13,
60,
-51,
90,
4,
-77,
63,
124,
69,
88,
73,
-72,
29,
-96,
44,
69,
-123,
-59,
-94,
84,
80,
-61,
27,
-37,
-92,
-51,
-86,
19,
-55,
-36,
-2,
68,
-37,
-128,
59,
-47,
119,
-53,
56,
-12,
37,
27,
119,
-37,
125,
78,
19,
15,
-9,
94,
100,
-72,
55,
86,
-48,
26,
10,
-112,
28,
-15,
-64,
-34,
55,
-42,
-31,
-96,
-18,
60,
-44,
69,
106,
-20,
15,
47,
49,
-122,
-45,
119,
101,
22,
77,
108,
-15,
-71,
-28,
-43,
-68,
-127,
-86,
-118,
-51,
121,
-65,
-10,
-49,
115,
-6,
-61,
-98,
21,
41,
56,
29,
-16,
-82,
4,
72,
-77,
23,
23,
-32,
-98,
112,
27,
-4,
91,
-69,
102,
-114,
16,
-20,
-76,
-124,
43,
12,
3,
-30,
42,
-44,
-88,
-72,
-76,
-94,
-73,
46,
-17,
4,
-74,
-44,
53,
-11,
-117,
-105,
-113,
-37,
-43,
-128,
-70,
56,
-68,
-100,
56,
-20,
77,
12,
17,
-119,
-17,
59,
-10,
-26,
29,
42,
-59,
-28,
-28,
60,
-34,
60,
-24,
80,
-81,
24,
122,
127,
62,
124,
-5,
-11,
59,
-52,
74,
-29,
-116,
3,
-40,
-99,
-24,
11,
-10,
95,
21,
-38,
59,
-52,
29,
58,
112,
100,
-106,
-90,
71,
72,
57,
95,
98,
96,
-41,
-16,
50,
-18,
123,
-36,
74,
-101,
17,
50,
48,
96,
57,
7,
81,
-16,
-32,
-102,
-24,
-71,
-10,
37,
-22,
94,
-36,
-52,
-71,
-47,
47,
-1,
-31,
-10,
-126,
-15,
-123,
-59,
71,
-49,
67,
99,
-57,
21,
-93,
-13,
-18,
54,
-112,
-60,
9,
25,
-30,
-47,
26,
27,
26,
-63,
1,
-63,
18,
-114,
80,
110,
-123,
0,
-63,
-126,
-128,
10,
-60,
51,
-71,
28,
114,
-4,
53,
10,
23,
-96,
9,
32,
-22,
5,
-108,
33,
98,
-59,
-106,
-126,
73,
72,
-72,
-73,
-60,
-96,
-99,
31,
40,
15,
-19,
17,
-128,
33,
-75,
96,
-18,
-47,
75,
27,
-60,
-16,
-82,
13,
21,
37,
23,
70,
9,
-39,
16,
-127,
35,
-78,
64,
99,
-46,
1,
28,
65,
125,
14,
42,
26 };
float distanceToCentroid = 95.39032f;
float vl = -0.10079563f;
float width = 0.014609014f;
short quantizedSum = 5306;
float normVmC = 9.766797f;
float vDotC = 133.56123f;
float cDotC = 132.20227f;
ES816BinaryFlatRWVectorsScorer.BinaryQueryVector queryVector = new ES816BinaryFlatRWVectorsScorer.BinaryQueryVector(
vector,
new BinaryQuantizer.QueryFactors(quantizedSum, distanceToCentroid, vl, width, normVmC, vDotC)
);
BinarizedByteVectorValues targetVectors = new BinarizedByteVectorValues() {
@Override
public float getCentroidDistance(int vectorOrd) {
return 0f;
}
@Override
public float getCentroidDP() {
return cDotC;
}
@Override
public float getVectorMagnitude(int vectorOrd) {
return 0f;
}
@Override
public float getOOQ(int targetOrd) {
return 0.7882396f;
}
@Override
public float getNormOC(int targetOrd) {
return 5.0889387f;
}
@Override
public float getODotC(int targetOrd) {
return 131.485660f;
}
@Override
public BinaryQuantizer getQuantizer() {
int dimensions = 768;
return new BinaryQuantizer(dimensions, dimensions, VectorSimilarityFunction.MAXIMUM_INNER_PRODUCT);
}
@Override
public float[] getCentroid() {
return new float[] {
0.16672021f,
0.11700719f,
0.013227397f,
0.09305186f,
-0.029422699f,
0.17622353f,
0.4267106f,
-0.297038f,
0.13915674f,
0.38441318f,
-0.486725f,
-0.15987667f,
-0.19712289f,
0.1349074f,
-0.19016947f,
-0.026179956f,
0.4129807f,
0.14325741f,
-0.09106042f,
0.06876218f,
-0.19389102f,
0.4467732f,
0.03169017f,
-0.066950575f,
-0.044301506f,
-0.0059755715f,
-0.33196586f,
0.18213534f,
-0.25065416f,
0.30251458f,
0.3448419f,
-0.14900115f,
-0.07782894f,
0.3568707f,
-0.46595258f,
0.37295088f,
-0.088741764f,
0.17248306f,
-0.0072736046f,
0.32928637f,
0.13216197f,
0.032092985f,
0.21553043f,
0.016091486f,
0.31958902f,
0.0133126f,
0.1579258f,
0.018537233f,
0.046248164f,
-0.0048194043f,
-0.2184672f,
-0.26273906f,
-0.110678785f,
-0.04542999f,
-0.41625032f,
0.46025568f,
-0.16116948f,
0.4091706f,
0.18427321f,
0.004736977f,
0.16289745f,
-0.05330932f,
-0.2694863f,
-0.14762327f,
0.17744702f,
0.2445075f,
0.14377175f,
0.37390858f,
0.16165806f,
0.17177118f,
0.097307935f,
0.36326465f,
0.23221572f,
0.15579978f,
-0.065486655f,
-0.29006517f,
-0.009194494f,
0.009019374f,
0.32154799f,
-0.23186184f,
0.46485493f,
-0.110756285f,
-0.18604982f,
0.35027295f,
0.19815539f,
0.47386464f,
-0.031379268f,
0.124035835f,
0.11556784f,
0.4304302f,
-0.24455063f,
0.1816723f,
0.034300473f,
-0.034347706f,
0.040140998f,
0.1389901f,
0.22840638f,
-0.19911191f,
0.07563166f,
-0.2744902f,
0.13114859f,
-0.23862572f,
-0.31404558f,
0.41355187f,
0.12970817f,
-0.35403475f,
-0.2714075f,
0.07231573f,
0.043893218f,
0.30324167f,
0.38928393f,
-0.1567055f,
-0.0083288215f,
0.0487653f,
0.12073729f,
-0.01582117f,
0.13381198f,
-0.084824145f,
-0.15329859f,
-1.120622f,
0.3972598f,
0.36022213f,
-0.29826534f,
-0.09468781f,
0.03550699f,
-0.21630692f,
0.55655843f,
-0.14842057f,
0.5924833f,
0.38791573f,
0.1502777f,
0.111737385f,
0.1926823f,
0.66021144f,
0.25601995f,
0.28220543f,
0.10194068f,
0.013066262f,
-0.09348819f,
-0.24085014f,
-0.17843121f,
-0.012598432f,
0.18757571f,
0.48543528f,
-0.059388146f,
0.1548026f,
0.041945867f,
0.3322589f,
0.012830887f,
0.16621992f,
0.22606649f,
0.13959105f,
-0.16688728f,
0.47194278f,
-0.12767595f,
0.037815034f,
0.441938f,
0.07875027f,
0.08625042f,
0.053454693f,
0.74093896f,
0.34662113f,
0.009829135f,
-0.033400282f,
0.030965377f,
0.17645596f,
0.083803624f,
0.32578796f,
0.49538168f,
-0.13212465f,
-0.39596975f,
0.109529115f,
0.2815771f,
-0.051440604f,
0.21889819f,
0.25598505f,
0.012208843f,
-0.012405662f,
0.3248759f,
0.00997502f,
0.05999008f,
0.03562817f,
0.19007418f,
0.24805716f,
0.5926766f,
0.26937613f,
0.25856f,
-0.05798439f,
-0.29168302f,
0.14050555f,
0.084851265f,
-0.03763504f,
0.8265359f,
-0.23383066f,
-0.042164285f,
0.19120507f,
-0.12189065f,
0.3864055f,
-0.19823311f,
0.30280992f,
0.10814344f,
-0.164514f,
-0.22905481f,
0.13680641f,
0.4513772f,
-0.514546f,
-0.061746247f,
0.11598224f,
-0.23093395f,
-0.09735358f,
0.02767051f,
0.11594536f,
0.17106244f,
0.21301728f,
-0.048222974f,
0.2212131f,
-0.018857865f,
-0.09783516f,
0.42156664f,
-0.14032331f,
-0.103861615f,
0.4190284f,
0.068923555f,
-0.015083771f,
0.083590426f,
-0.15759592f,
-0.19096768f,
-0.4275228f,
0.12626286f,
0.12192557f,
0.4157616f,
0.048780657f,
0.008426048f,
-0.0869124f,
0.054927208f,
0.28417027f,
0.29765493f,
0.09203619f,
-0.14446871f,
-0.117514975f,
0.30662632f,
0.24904715f,
-0.19551662f,
-0.0045785015f,
0.4217626f,
-0.31457824f,
0.23381722f,
0.089111514f,
-0.27170828f,
-0.06662652f,
0.10011391f,
-0.090274535f,
0.101849966f,
0.26554734f,
-0.1722843f,
0.23296228f,
0.25112453f,
-0.16790418f,
0.010348314f,
0.05061285f,
0.38003662f,
0.0804625f,
0.3450673f,
0.364368f,
-0.2529952f,
-0.034065288f,
0.22796603f,
0.5457553f,
0.11120353f,
0.24596325f,
0.42822433f,
-0.19215727f,
-0.06974534f,
0.19388479f,
-0.17598474f,
-0.08769705f,
0.12769659f,
0.1371616f,
-0.4636819f,
0.16870509f,
0.14217548f,
0.04412187f,
-0.20930687f,
0.0075530168f,
0.10065227f,
0.45334083f,
-0.1097471f,
-0.11139921f,
-0.31835595f,
-0.057386875f,
0.16285825f,
0.5088513f,
-0.06318843f,
-0.34759882f,
0.21132466f,
0.33609292f,
0.04858872f,
-0.058759f,
0.22845529f,
-0.07641319f,
0.5452827f,
-0.5050389f,
0.1788054f,
0.37428045f,
0.066334985f,
-0.28162515f,
-0.15629752f,
0.33783385f,
-0.0832242f,
0.29144394f,
0.47892854f,
-0.47006592f,
-0.07867588f,
0.3872869f,
0.28053126f,
0.52399015f,
0.21979983f,
0.076880336f,
0.47866163f,
0.252952f,
-0.1323851f,
-0.22225754f,
-0.38585815f,
0.12967427f,
0.20340872f,
-0.326928f,
0.09636557f,
-0.35929212f,
0.5413311f,
0.019960884f,
0.33512768f,
0.15133342f,
-0.14124066f,
-0.1868793f,
-0.07862198f,
0.22739467f,
0.19598985f,
0.34314656f,
-0.05071516f,
-0.21107961f,
0.19934991f,
0.04822684f,
0.15060754f,
0.26586458f,
-0.15528078f,
0.123646654f,
0.14450715f,
-0.12574252f,
0.30608323f,
0.018549249f,
0.36323825f,
0.06762097f,
0.08562406f,
-0.07863075f,
0.15975896f,
0.008347004f,
0.37931192f,
0.22957338f,
0.33606857f,
-0.25204057f,
0.18126069f,
0.41903302f,
0.20244692f,
-0.053850617f,
0.23088565f,
0.16085246f,
0.1077502f,
-0.12445943f,
0.115779735f,
0.124704875f,
0.13076028f,
-0.11628619f,
-0.12580182f,
0.065204754f,
-0.26290357f,
-0.23539798f,
-0.1855292f,
0.39872098f,
0.44495568f,
0.05491784f,
0.05135692f,
0.624011f,
0.22839564f,
0.0022447354f,
-0.27169296f,
-0.1694988f,
-0.19106841f,
0.0110123325f,
0.15464798f,
-0.16269256f,
0.04033836f,
-0.11792753f,
0.17172396f,
-0.08912173f,
-0.30929542f,
-0.03446989f,
-0.21738084f,
0.39657044f,
0.33550346f,
-0.06839139f,
0.053675443f,
0.33783767f,
0.22576828f,
0.38280004f,
4.1448855f,
0.14225426f,
0.24038498f,
0.072373435f,
-0.09465926f,
-0.016144043f,
0.40864578f,
-0.2583055f,
0.031816103f,
0.062555805f,
0.06068663f,
0.25858644f,
-0.10598804f,
0.18201788f,
-0.00090025424f,
0.085680895f,
0.4304161f,
0.028686283f,
0.027298616f,
0.27473378f,
-0.3888415f,
0.44825438f,
0.3600378f,
0.038944595f,
0.49292335f,
0.18556066f,
0.15779617f,
0.29989767f,
0.39233804f,
0.39759228f,
0.3850708f,
-0.0526475f,
0.18572918f,
0.09667526f,
-0.36111078f,
0.3439669f,
0.1724522f,
0.14074509f,
0.26097745f,
0.16626832f,
-0.3062964f,
-0.054877423f,
0.21702516f,
0.4736452f,
0.2298038f,
-0.2983771f,
0.118479654f,
0.35940516f,
0.12212727f,
0.17234904f,
0.30632678f,
0.09207966f,
-0.14084268f,
-0.19737118f,
0.12442629f,
0.52454203f,
0.1266684f,
0.3062802f,
0.121598125f,
-0.09156268f,
0.11491686f,
-0.105715364f,
0.19831072f,
0.061421417f,
-0.41778997f,
0.14488487f,
0.023310646f,
0.27257463f,
0.16821945f,
-0.16702746f,
0.263203f,
0.33512688f,
0.35117313f,
-0.31740817f,
-0.14203706f,
0.061256267f,
-0.19764185f,
0.04822579f,
-0.0016218472f,
-0.025792575f,
0.4885193f,
-0.16942391f,
-0.04156327f,
0.15908112f,
-0.06998626f,
0.53907114f,
0.10317832f,
-0.365468f,
0.4729886f,
0.14291425f,
0.32812154f,
-0.0273262f,
0.31760117f,
0.16925456f,
0.21820979f,
0.085142255f,
0.16118735f,
-3.7089362f,
0.251577f,
0.18394576f,
0.027926167f,
0.15720351f,
0.13084261f,
0.16240814f,
0.23045056f,
-0.3966458f,
0.22822891f,
-0.061541352f,
0.028320132f,
-0.14736478f,
0.184569f,
0.084853746f,
0.15172474f,
0.08277542f,
0.27751622f,
0.23450488f,
-0.15349835f,
0.29665688f,
0.32045734f,
0.20012043f,
-0.2749372f,
0.011832386f,
0.05976605f,
0.018300122f,
-0.07855043f,
-0.075900674f,
0.0384252f,
-0.15101928f,
0.10922137f,
0.47396383f,
-0.1771141f,
0.2203417f,
0.33174303f,
0.36640546f,
0.10906258f,
0.13765177f,
0.2488032f,
-0.061588854f,
0.20347528f,
0.2574979f,
0.22369152f,
0.18777567f,
-0.0772263f,
-0.1353299f,
0.087077625f,
-0.05409276f,
0.027534787f,
0.08053508f,
0.3403908f,
-0.15362988f,
0.07499862f,
0.54367846f,
-0.045938436f,
0.12206868f,
0.031069376f,
0.2972343f,
0.3235321f,
-0.053970363f,
-0.0042564687f,
0.21447177f,
0.023565233f,
-0.1286087f,
-0.047359955f,
0.23021339f,
0.059837278f,
0.19709614f,
-0.17340347f,
0.11572943f,
0.21720429f,
0.29375625f,
-0.045433592f,
0.033339307f,
0.24594454f,
-0.021661613f,
-0.12823369f,
0.41809165f,
0.093840264f,
-0.007481906f,
0.22441079f,
-0.45719734f,
0.2292629f,
2.675806f,
0.3690025f,
2.1311781f,
0.07818368f,
-0.17055893f,
0.3162922f,
-0.2983149f,
0.21211359f,
0.037087034f,
0.021580033f,
0.086415835f,
0.13541797f,
-0.12453424f,
0.04563163f,
-0.082379065f,
-0.15938349f,
0.38595748f,
-0.8796574f,
-0.080991246f,
0.078572094f,
0.20274459f,
0.009252143f,
-0.12719384f,
0.105845824f,
0.1592398f,
-0.08656061f,
-0.053054806f,
0.090986334f,
-0.02223379f,
-0.18215932f,
-0.018316114f,
0.1806707f,
0.24788831f,
-0.041049056f,
0.01839475f,
0.19160001f,
-0.04827654f,
4.4070687f,
0.12640671f,
-0.11171499f,
-0.015480781f,
0.14313947f,
0.10024215f,
0.4129662f,
0.038836367f,
-0.030228542f,
0.2948598f,
0.32946473f,
0.2237934f,
0.14260699f,
-0.044821896f,
0.23791742f,
0.079720296f,
0.27059034f,
0.32129505f,
0.2725177f,
0.06883333f,
0.1478041f,
0.07598411f,
0.27230525f,
-0.04704308f,
0.045167264f,
0.215413f,
0.20359069f,
-0.092178136f,
-0.09523752f,
0.21427691f,
0.10512272f,
5.1295033f,
0.040909242f,
0.007160441f,
-0.192866f,
-0.102640584f,
0.21103396f,
-0.006780398f,
-0.049653083f,
-0.29426834f,
-0.0038102255f,
-0.13842082f,
0.06620181f,
-0.3196518f,
0.33279592f,
0.13845938f,
0.16162738f,
-0.24798508f,
-0.06672485f,
0.195944f,
-0.11957207f,
0.44237947f,
-0.07617347f,
0.13575341f,
-0.35074243f,
-0.093798876f,
0.072853446f,
-0.20490398f,
0.26504788f,
-0.046076056f,
0.16488416f,
0.36007464f,
0.20955376f,
-0.3082038f,
0.46533757f,
-0.27326992f,
-0.14167665f,
0.25017953f,
0.062622115f,
0.14057694f,
-0.102370486f,
0.33898357f,
0.36456722f,
-0.10120469f,
-0.27838466f,
-0.11779602f,
0.18517569f,
-0.05942488f,
0.076405466f,
0.007960496f,
0.0443746f,
0.098998964f,
-0.01897129f,
0.8059487f,
0.06991939f,
0.26562217f,
0.26942885f,
0.11432197f,
-0.0055776504f,
0.054493718f,
-0.13086213f,
0.6841702f,
0.121975765f,
0.02787146f,
0.29039973f,
0.30943078f,
0.21762547f,
0.28751117f,
0.027524523f,
0.5315654f,
-0.22451901f,
-0.13782433f,
0.08228316f,
0.07808882f,
0.17445615f,
-0.042489477f,
0.13232234f,
0.2756272f,
-0.18824948f,
0.14326479f,
-0.119312495f,
0.011788091f,
-0.22103515f,
-0.2477118f,
-0.10513839f,
0.034028634f,
0.10693818f,
0.03057979f,
0.04634646f,
0.2289361f,
0.09981585f,
0.26901972f,
0.1561221f,
-0.10639886f,
0.36466748f,
0.06350991f,
0.027927283f,
0.11919768f,
0.23290513f,
-0.03417105f,
0.16698854f,
-0.19243467f,
0.28430334f,
0.03754995f,
-0.08697018f,
0.20413163f,
-0.27218238f,
0.13707504f,
-0.082289375f,
0.03479585f,
0.2298305f,
0.4983682f,
0.34522808f,
-0.05711886f,
-0.10568684f,
-0.07771385f };
}
@Override
public BinarizedByteVectorValues copy() {
return null;
}
@Override
public byte[] vectorValue(int targetOrd) {
return new byte[] {
-88,
-3,
60,
-75,
-38,
79,
84,
-53,
-116,
-126,
19,
-19,
-21,
-80,
69,
101,
-71,
53,
101,
-124,
-24,
-76,
92,
-45,
108,
-107,
-18,
102,
23,
-80,
-47,
116,
87,
-50,
27,
-31,
-10,
-13,
117,
-88,
-27,
-93,
-98,
-39,
30,
-109,
-114,
5,
-15,
98,
-82,
81,
83,
118,
30,
-118,
-12,
-95,
121,
125,
-13,
-88,
75,
-85,
-56,
-126,
82,
-59,
48,
-81,
67,
-63,
81,
24,
-83,
95,
-44,
103,
3,
-40,
-13,
-41,
-29,
-60,
1,
65,
-4,
-110,
-40,
34,
118,
51,
-76,
75,
70,
-51 };
}
@Override
public int size() {
return 1;
}
@Override
public VectorScorer scorer(float[] query) throws IOException {
return null;
}
@Override
public float[] getCorrectiveTerms(int vectorOrd) throws IOException {
return new float[0];
}
@Override
public int dimension() {
return dimensions;
}
};
VectorSimilarityFunction similarityFunction = VectorSimilarityFunction.MAXIMUM_INNER_PRODUCT;
ES816BinaryFlatRWVectorsScorer.BinarizedRandomVectorScorer scorer = new ES816BinaryFlatRWVectorsScorer.BinarizedRandomVectorScorer(
queryVector,
targetVectors,
similarityFunction
);
assertEquals(129.64046f, scorer.score(0), 0.0001f);
}
}
| ES816BinaryFlatVectorsScorerTests |
java | netty__netty | codec-compression/src/main/java/io/netty/handler/codec/compression/FastLz.java | {
"start": 770,
"end": 1071
} | class ____ methods for compression and decompression of buffers and saves
* constants which use by {@link FastLzFrameEncoder} and {@link FastLzFrameDecoder}.
*
* This is refactored code of <a href="https://code.google.com/p/jfastlz/">jfastlz</a>
* library written by William Kinney.
*/
final | provides |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.