language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/UnusedVariableTest.java | {
"start": 48815,
"end": 49302
} | class ____ {
private int foo(int b) {
return b;
}
void test() {
foo(2);
}
}
""")
.doTest();
}
@Test
public void unusedFunctionalInterfaceParameter() {
helper
.addSourceLines(
"Test.java",
"""
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
| Test |
java | elastic__elasticsearch | x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlLogoutRequestHandler.java | {
"start": 998,
"end": 5913
} | class ____ extends SamlObjectHandler {
private static final String REQUEST_TAG_NAME = "LogoutRequest";
SamlLogoutRequestHandler(Clock clock, IdpConfiguration idp, SpConfiguration sp, TimeValue maxSkew) {
super(clock, idp, sp, maxSkew);
}
/**
* Processes the provided LogoutRequest and extracts the NameID and SessionIndex.
* Returns these in a {@link SamlAttributes} object with an empty attributes list.
* <p>
* The recommended binding for Logout (for maximum interoperability) is HTTP-Redirect.
* Under this binding the signature is applied to the query-string (including parameter
* names and url-encoded/base64-encoded/deflated values). Therefore in order to properly
* validate the signature, this method operates on a raw query- string.
*
* @throws ElasticsearchSecurityException If the SAML is invalid for this realm/configuration
*/
public Result parseFromQueryString(String queryString) {
final ParsedQueryString parsed = parseQueryStringAndValidateSignature(queryString, "SAMLRequest");
final Element root = parseSamlMessage(inflate(decodeBase64(parsed.samlMessage)));
if (REQUEST_TAG_NAME.equals(root.getLocalName()) && SAML_NAMESPACE.equals(root.getNamespaceURI())) {
try {
final LogoutRequest logoutRequest = buildXmlObject(root, LogoutRequest.class);
return parseLogout(logoutRequest, parsed.hasSignature == false, parsed.relayState);
} catch (ElasticsearchSecurityException e) {
logger.trace("Rejecting SAML logout request {} because {}", SamlUtils.toString(root), e.getMessage());
throw e;
}
} else {
throw samlException(
"SAML content [{}] should have a root element of Namespace=[{}] Tag=[{}]",
root,
SAML_NAMESPACE,
REQUEST_TAG_NAME
);
}
}
private Result parseLogout(LogoutRequest logoutRequest, boolean requireSignature, String relayState) {
final Signature signature = logoutRequest.getSignature();
if (signature == null) {
if (requireSignature) {
throw samlException("Logout request is not signed");
}
} else {
validateSignature(signature, logoutRequest.getIssuer());
}
checkIssuer(logoutRequest.getIssuer(), logoutRequest);
checkDestination(logoutRequest);
validateNotOnOrAfter(logoutRequest.getNotOnOrAfter());
return new Result(logoutRequest.getID(), SamlNameId.fromXml(getNameID(logoutRequest)), getSessionIndex(logoutRequest), relayState);
}
private NameID getNameID(LogoutRequest logoutRequest) {
final NameID nameID = logoutRequest.getNameID();
if (nameID == null) {
final EncryptedID encryptedID = logoutRequest.getEncryptedID();
if (encryptedID != null) {
final SAMLObject samlObject = decrypt(encryptedID);
if (samlObject instanceof NameID) {
return (NameID) samlObject;
}
}
}
return nameID;
}
private SAMLObject decrypt(EncryptedID encrypted) {
if (decrypter == null) {
throw samlException("SAML EncryptedID [" + text(encrypted, 32) + "] is encrypted, but no decryption key is available");
}
try {
return decrypter.decrypt(encrypted);
} catch (DecryptionException e) {
logger.debug(
() -> format(
"Failed to decrypt SAML EncryptedID [%s] with [%s]",
text(encrypted, 512),
describe(getSpConfiguration().getEncryptionCredentials())
),
e
);
throw samlException("Failed to decrypt SAML EncryptedID " + text(encrypted, 32), e);
}
}
private static String getSessionIndex(LogoutRequest logoutRequest) {
return logoutRequest.getSessionIndexes().stream().map(as -> as.getValue()).filter(Objects::nonNull).findFirst().orElse(null);
}
private void checkDestination(LogoutRequest request) {
final String url = getSpConfiguration().getLogoutUrl();
if (url == null) {
throw samlException(
"SAML request "
+ request.getID()
+ " is for destination "
+ request.getDestination()
+ " but this realm is not configured for logout"
);
}
if (url.equals(request.getDestination()) == false) {
throw samlException(
"SAML request " + request.getID() + " is for destination " + request.getDestination() + " but this realm uses " + url
);
}
}
public static | SamlLogoutRequestHandler |
java | junit-team__junit5 | junit-jupiter-api/src/main/java/org/junit/jupiter/api/extension/RegisterExtension.java | {
"start": 4380,
"end": 4792
} | class ____
* initialized programmatically by supplying a custom {@code lookUpDocsDir()}
* method to a {@code static} factory method in the {@code DocumentationExtension}.
* The configured {@code DocumentationExtension} will be automatically registered
* as an extension. In addition, test methods can access the instance of the
* extension via the {@code docs} field if necessary.
*
* <pre style="code">
* | is |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenManager.java | {
"start": 1811,
"end": 2693
} | class ____ {
private static final Logger LOG =
LoggerFactory.getLogger(DelegationTokenManager.class);
public static final String ENABLE_ZK_KEY = "zk-dt-secret-manager.enable";
public static final String PREFIX = "delegation-token.";
public static final String UPDATE_INTERVAL = PREFIX + "update-interval.sec";
public static final long UPDATE_INTERVAL_DEFAULT = 24 * 60 * 60;
public static final String MAX_LIFETIME = PREFIX + "max-lifetime.sec";
public static final long MAX_LIFETIME_DEFAULT = 7 * 24 * 60 * 60;
public static final String RENEW_INTERVAL = PREFIX + "renew-interval.sec";
public static final long RENEW_INTERVAL_DEFAULT = 24 * 60 * 60;
public static final String REMOVAL_SCAN_INTERVAL = PREFIX +
"removal-scan-interval.sec";
public static final long REMOVAL_SCAN_INTERVAL_DEFAULT = 60 * 60;
private static | DelegationTokenManager |
java | elastic__elasticsearch | modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateIndexNameProcessor.java | {
"start": 1256,
"end": 5174
} | class ____ extends AbstractProcessor {
public static final String TYPE = "date_index_name";
private final String field;
private final TemplateScript.Factory indexNamePrefixTemplate;
private final TemplateScript.Factory dateRoundingTemplate;
private final TemplateScript.Factory indexNameFormatTemplate;
private final ZoneId timezone;
private final List<Function<String, ZonedDateTime>> dateFormats;
DateIndexNameProcessor(
String tag,
String description,
String field,
List<Function<String, ZonedDateTime>> dateFormats,
ZoneId timezone,
TemplateScript.Factory indexNamePrefixTemplate,
TemplateScript.Factory dateRoundingTemplate,
TemplateScript.Factory indexNameFormatTemplate
) {
super(tag, description);
this.field = field;
this.timezone = timezone;
this.dateFormats = dateFormats;
this.indexNamePrefixTemplate = indexNamePrefixTemplate;
this.dateRoundingTemplate = dateRoundingTemplate;
this.indexNameFormatTemplate = indexNameFormatTemplate;
}
@Override
public IngestDocument execute(IngestDocument ingestDocument) throws Exception {
// Date can be specified as a string or long:
Object obj = ingestDocument.getFieldValue(field, Object.class);
String date = null;
if (obj != null) {
// Not use Objects.toString(...) here, because null gets changed to "null" which may confuse some date parsers
date = obj.toString();
}
ZonedDateTime dateTime = null;
Exception lastException = null;
for (Function<String, ZonedDateTime> dateParser : dateFormats) {
try {
dateTime = dateParser.apply(date);
} catch (Exception e) {
// try the next parser and keep track of the exceptions
lastException = ExceptionsHelper.useOrSuppress(lastException, e);
}
}
if (dateTime == null) {
throw new IllegalArgumentException("unable to parse date [" + date + "]", lastException);
}
String indexNamePrefix = ingestDocument.renderTemplate(indexNamePrefixTemplate);
String indexNameFormat = ingestDocument.renderTemplate(indexNameFormatTemplate);
String dateRounding = ingestDocument.renderTemplate(dateRoundingTemplate);
DateFormatter formatter = DateFormatter.forPattern(indexNameFormat);
// use UTC instead of Z is string representation of UTC, so behaviour is the same between 6.x and 7
String zone = timezone.equals(ZoneOffset.UTC) ? "UTC" : timezone.getId();
StringBuilder builder = new StringBuilder().append('<')
.append(indexNamePrefix)
.append('{')
.append(formatter.format(dateTime))
.append("||/")
.append(dateRounding)
.append('{')
.append(indexNameFormat)
.append('|')
.append(zone)
.append('}')
.append('}')
.append('>');
String dynamicIndexName = builder.toString();
ingestDocument.setFieldValue(IngestDocument.Metadata.INDEX.getFieldName(), dynamicIndexName);
return ingestDocument;
}
@Override
public String getType() {
return TYPE;
}
String getField() {
return field;
}
TemplateScript.Factory getIndexNamePrefixTemplate() {
return indexNamePrefixTemplate;
}
TemplateScript.Factory getDateRoundingTemplate() {
return dateRoundingTemplate;
}
TemplateScript.Factory getIndexNameFormatTemplate() {
return indexNameFormatTemplate;
}
ZoneId getTimezone() {
return timezone;
}
List<Function<String, ZonedDateTime>> getDateFormats() {
return dateFormats;
}
public static final | DateIndexNameProcessor |
java | apache__rocketmq | test/src/main/java/org/apache/rocketmq/test/util/VerifyUtils.java | {
"start": 1110,
"end": 4749
} | class ____ {
private static Logger logger = LoggerFactory.getLogger(VerifyUtils.class);
public static int verify(Collection<Object> sendMsgs, Collection<Object> recvMsgs) {
int miss = 0;
for (Object msg : sendMsgs) {
if (!recvMsgs.contains(msg)) {
miss++;
}
}
return miss;
}
public static Collection<Object> getFilterdMessage(Collection<Object> sendMsgs,
Collection<Object> recvMsgs) {
Collection<Object> recvMsgsSync = Collections.synchronizedCollection(recvMsgs);
Collection<Object> filteredMsgs = new ArrayList<Object>();
int filterNum = 0;
for (Object msg : recvMsgsSync) {
if (sendMsgs.contains(msg)) {
filteredMsgs.add(msg);
} else {
filterNum++;
}
}
logger.info(String.format("[%s] messages is filtered!", filterNum));
return filteredMsgs;
}
public static int verifyUserProperty(Collection<Object> sendMsgs, Collection<Object> recvMsgs) {
return 0;
}
public static void verifyMessageQueueId(int expectId, Collection<Object> msgs) {
for (Object msg : msgs) {
MessageExt msgEx = (MessageExt) msg;
assert expectId == msgEx.getQueueId();
}
}
public static boolean verifyBalance(int msgSize, float error, int... recvSize) {
boolean balance = true;
int evenSize = msgSize / recvSize.length;
for (int size : recvSize) {
if (Math.abs(size - evenSize) > error * evenSize) {
balance = false;
break;
}
}
return balance;
}
public static boolean verifyBalance(int msgSize, int... recvSize) {
return verifyBalance(msgSize, 0.1f, recvSize);
}
public static boolean verifyDelay(long delayTimeMills, long nextLevelDelayTimeMills,
Collection<Object> recvMsgTimes) {
boolean delay = true;
for (Object timeObj : recvMsgTimes) {
long time = (Long) timeObj;
if (time < delayTimeMills || time > nextLevelDelayTimeMills) {
delay = false;
logger.info(String.format("delay error:%s", Math.abs(time - delayTimeMills)));
break;
}
}
return delay;
}
public static boolean verifyOrder(Collection<Collection<Object>> queueMsgs) {
for (Collection<Object> msgs : queueMsgs) {
if (!verifyOrderMsg(msgs)) {
return false;
}
}
return true;
}
public static boolean verifyOrderMsg(Collection<Object> msgs) {
int min = Integer.MIN_VALUE;
int curr;
if (msgs.size() == 0 || msgs.size() == 1) {
return true;
} else {
for (Object msg : msgs) {
curr = Integer.parseInt((String) msg);
if (curr < min) {
return false;
} else {
min = curr;
}
}
}
return true;
}
public static boolean verifyRT(Collection<Object> rts, long maxRTMills) {
boolean rtExpect = true;
for (Object obj : rts) {
long rt = (Long) obj;
if (rt > maxRTMills) {
rtExpect = false;
logger.info(String.format("%s greater thran maxtRT:%s!", rt, maxRTMills));
}
}
return rtExpect;
}
public static void main(String[] args) {
verifyBalance(400, 0.1f, 230, 190);
}
}
| VerifyUtils |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/admin/indices/template/reservedstate/ReservedComposableIndexTemplateAction.java | {
"start": 2582,
"end": 12874
} | class ____
implements
ReservedProjectStateHandler<ReservedComposableIndexTemplateAction.ComponentsAndComposables> {
public static final String NAME = "index_templates";
public static final String COMPONENTS = "component_templates";
private static final String COMPONENT_PREFIX = "component_template:";
public static final String COMPOSABLES = "composable_index_templates";
public static final String COMPOSABLE_PREFIX = "composable_index_template:";
private final MetadataIndexTemplateService indexTemplateService;
public ReservedComposableIndexTemplateAction(MetadataIndexTemplateService indexTemplateService) {
this.indexTemplateService = indexTemplateService;
}
@Override
public String name() {
return NAME;
}
// Since we can't split the reserved state handler into two separate handlers, because of the
// circular dependency on create and delete, we must store both the component template keys and
// the composable index template keys in the same reserved state handler. To be able to correctly
// distinguish between the component names and the composable names, we prefix the reserved keys
// when they are stored in the cluster state. Similarly, we remove the prefix when we need to perform
// the REST API validation in the corresponding transport actions.
/**
* Prefixes the component template name with a prefix for storage in the cluster state
* @param name component template name
* @return prefixed component template name for storage in the reserved cluster state
*/
public static String reservedComponentName(String name) {
return COMPONENT_PREFIX + name;
}
/**
* Removes the reserved cluster state prefix from the component template name
* <p>
* Once the prefix is removed we can use the name for conflict validation in {@link TransportPutComponentTemplateAction} and
* {@link org.elasticsearch.action.admin.indices.template.delete.TransportDeleteComponentTemplateAction}
* @param name the prefixed reserved component template name
* @return the un-prefixed component template name used for conflict validation at REST
*/
public static String componentNameFromReservedName(String name) {
assert name.startsWith(COMPONENT_PREFIX);
return name.substring(COMPONENT_PREFIX.length());
}
/**
* Prefixes the composable index template name with a prefix for storage in the cluster state
* @param name composable index template name
* @return prefixed composable index template name for storage in the reserved cluster state
*/
public static String reservedComposableIndexName(String name) {
return COMPOSABLE_PREFIX + name;
}
/**
* Removes the reserved cluster state prefix from the composable index template name
* <p>
* Once the prefix is removed we can use the name for conflict validation in
* {@link org.elasticsearch.action.admin.indices.template.put.TransportPutComposableIndexTemplateAction} and
* {@link org.elasticsearch.action.admin.indices.template.delete.TransportDeleteComposableIndexTemplateAction}
* @param name the prefixed reserved composable index template name
* @return the un-prefixed composable index template name used for conflict validation at REST
*/
public static String composableIndexNameFromReservedName(String name) {
assert name.startsWith(COMPOSABLE_PREFIX);
return name.substring(COMPOSABLE_PREFIX.length());
}
private ComponentsAndComposables prepare(ComponentsAndComposables componentsAndComposables) {
for (var request : componentsAndComposables.componentTemplates) {
validate(request);
}
for (var request : componentsAndComposables.composableTemplates) {
validate(request);
}
return componentsAndComposables;
}
@Override
public TransformState transform(ProjectId projectId, ComponentsAndComposables source, TransformState prevState) throws Exception {
var requests = prepare(source);
ClusterState clusterState = prevState.state();
ProjectMetadata project = clusterState.getMetadata().getProject(projectId);
// We transform in the following order:
// 1. create or update component templates (composable templates depend on them)
// 2. create or update composable index templates (with disabled v2 overlap validation, we might delete some at step 3,
// while, 2 and 3 cannot be reversed because of data streams)
// 3. delete composable index templates (this will fail on attached data streams, unless we added higher priority one)
// 4. validate for v2 composable template overlaps
// 5. delete component templates (this will check if there are any related composable index templates and fail)
var components = requests.componentTemplates;
var composables = requests.composableTemplates;
// 1. create or update component templates (composable templates depend on them)
for (var request : components) {
ComponentTemplate template = indexTemplateService.normalizeComponentTemplate(request.componentTemplate());
project = indexTemplateService.addComponentTemplate(project, false, request.name(), template);
}
// 2. create or update composable index templates, no overlap validation
for (var request : composables) {
MetadataIndexTemplateService.validateV2TemplateRequest(project, request.name(), request.indexTemplate());
project = indexTemplateService.addIndexTemplateV2(project, false, request.name(), request.indexTemplate(), false);
}
Set<String> composableEntities = composables.stream().map(r -> reservedComposableIndexName(r.name())).collect(Collectors.toSet());
Set<String> composablesToDelete = prevState.keys()
.stream()
.filter(k -> k.startsWith(COMPOSABLE_PREFIX) && composableEntities.contains(k) == false)
.collect(Collectors.toSet());
// 3. delete composable index templates (this will fail on attached data streams, unless we added a higher priority one)
if (composablesToDelete.isEmpty() == false) {
var composableNames = composablesToDelete.stream().map(c -> composableIndexNameFromReservedName(c)).toArray(String[]::new);
project = MetadataIndexTemplateService.innerRemoveIndexTemplateV2(project, composableNames);
}
// 4. validate for v2 composable template overlaps
for (var request : composables) {
MetadataIndexTemplateService.v2TemplateOverlaps(project.templatesV2(), request.name(), request.indexTemplate(), true);
}
Set<String> componentEntities = components.stream().map(r -> reservedComponentName(r.name())).collect(Collectors.toSet());
Set<String> componentsToDelete = prevState.keys().stream().filter(k -> k.startsWith(COMPONENT_PREFIX)).collect(Collectors.toSet());
componentsToDelete.removeAll(componentEntities);
// 5. delete component templates (this will check if there are any related composable index templates and fail)
if (componentsToDelete.isEmpty() == false) {
var componentNames = componentsToDelete.stream().map(c -> componentNameFromReservedName(c)).toArray(String[]::new);
project = MetadataIndexTemplateService.innerRemoveComponentTemplate(project, componentNames);
}
return new TransformState(
ClusterState.builder(clusterState).putProjectMetadata(project).build(),
Sets.union(componentEntities, composableEntities)
);
}
@Override
public ClusterState remove(ProjectId projectId, TransformState prevState) throws Exception {
return transform(projectId, ComponentsAndComposables.EMPTY, prevState).state();
}
@Override
public ComponentsAndComposables fromXContent(XContentParser parser) throws IOException {
List<PutComponentTemplateAction.Request> componentTemplates = new ArrayList<>();
List<TransportPutComposableIndexTemplateAction.Request> composableTemplates = new ArrayList<>();
Map<String, ?> source = parser.map();
@SuppressWarnings("unchecked")
Map<String, ?> components = (Map<String, ?>) source.get(COMPONENTS);
if (components != null) {
for (var entry : components.entrySet()) {
@SuppressWarnings("unchecked")
Map<String, ?> content = (Map<String, ?>) entry.getValue();
try (XContentParser componentParser = mapToXContentParser(XContentParserConfiguration.EMPTY, content)) {
var componentTemplate = new PutComponentTemplateAction.Request(entry.getKey());
componentTemplate.componentTemplate(ComponentTemplate.parse(componentParser));
componentTemplates.add(componentTemplate);
}
}
}
@SuppressWarnings("unchecked")
Map<String, ?> composables = (Map<String, ?>) source.get(COMPOSABLES);
if (composables != null) {
for (var entry : composables.entrySet()) {
@SuppressWarnings("unchecked")
Map<String, ?> content = (Map<String, ?>) entry.getValue();
try (XContentParser componentParser = mapToXContentParser(XContentParserConfiguration.EMPTY, content)) {
var composableTemplate = new TransportPutComposableIndexTemplateAction.Request(entry.getKey());
composableTemplate.indexTemplate(ComposableIndexTemplate.parse(componentParser));
composableTemplates.add(composableTemplate);
}
}
}
return new ComponentsAndComposables(componentTemplates, composableTemplates);
}
record ComponentsAndComposables(
List<PutComponentTemplateAction.Request> componentTemplates,
List<TransportPutComposableIndexTemplateAction.Request> composableTemplates
) {
static final ComponentsAndComposables EMPTY = new ComponentsAndComposables(List.of(), List.of());
}
}
| ReservedComposableIndexTemplateAction |
java | mockito__mockito | mockito-extensions/mockito-junit-jupiter/src/test/java/org/mockitousage/GenericTypeMockTest.java | {
"start": 13160,
"end": 13657
} | class ____ extends BaseService<One, Two> {
private OneRepository oneRepository;
private TwoRepository twoRepository;
}
@Mock OneRepository oneRepository;
@Mock TwoRepository twoRepository;
@InjectMocks UnderTest underTest = new UnderTest();
@Test
public void testNoAioobe() {
assertNotNull(oneRepository);
assertNotNull(twoRepository);
assertNotNull(underTest);
}
}
}
| UnderTest |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/api/condition/OSTests.java | {
"start": 1030,
"end": 2110
} | class ____ {
@ParameterizedTest
@ValueSource(strings = { "AIX", "Aix", "LaIxOS" })
void aix(String name) {
assertEquals(OS.AIX, OS.parse(name));
}
@ParameterizedTest
@ValueSource(strings = { "FREEBSD", "FreeBSD" })
void freebsd(String name) {
assertEquals(OS.FREEBSD, OS.parse(name));
}
@ParameterizedTest
@ValueSource(strings = { "LINUX", "Linux" })
void linux(String name) {
assertEquals(OS.LINUX, OS.parse(name));
}
@ParameterizedTest
@ValueSource(strings = { "MAC", "mac" })
void mac(String name) {
assertEquals(OS.MAC, OS.parse(name));
}
@ParameterizedTest
@ValueSource(strings = { "OPENBSD", "OpenBSD" })
void openbsd(String name) {
assertEquals(OS.OPENBSD, OS.parse(name));
}
@ParameterizedTest
@ValueSource(strings = { "SOLARIS", "SunOS" })
void solaris(String name) {
assertEquals(OS.SOLARIS, OS.parse(name));
}
@ParameterizedTest
@ValueSource(strings = { "WINDOW", "Microsoft Windows [Version 10.?]" })
void windows(String name) {
assertEquals(OS.WINDOWS, OS.parse(name));
}
}
}
| ValidNames |
java | apache__camel | dsl/camel-endpointdsl/src/test/java/org/apache/camel/builder/endpoint/LanguageHeaderRouteTest.java | {
"start": 989,
"end": 1646
} | class ____ extends BaseEndpointDslTest {
@Test
public void testLanguage() throws Exception {
getMockEndpoint("mock:result").expectedBodiesReceived("Hello World");
template.sendBodyAndHeader("direct:start", "Hi", "foo", "Hello World");
MockEndpoint.assertIsSatisfied(context);
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new EndpointRouteBuilder() {
@Override
public void configure() throws Exception {
from(direct("start")).to(language("header:foo")).to(mock("result"));
}
};
}
}
| LanguageHeaderRouteTest |
java | spring-projects__spring-security | oauth2/oauth2-jose/src/main/java/org/springframework/security/oauth2/jwt/DPoPProofContext.java | {
"start": 2954,
"end": 5407
} | class ____ {
private String dPoPProof;
private String method;
private String targetUri;
private OAuth2Token accessToken;
private Builder(String dPoPProof) {
Assert.hasText(dPoPProof, "dPoPProof cannot be empty");
this.dPoPProof = dPoPProof;
}
/**
* Sets the value of the HTTP method of the request to which the DPoP Proof
* {@link Jwt} is attached.
* @param method the value of the HTTP method of the request to which the DPoP
* Proof {@link Jwt} is attached
* @return the {@link Builder}
*/
public Builder method(String method) {
this.method = method;
return this;
}
/**
* Sets the value of the HTTP target URI of the request to which the DPoP Proof
* {@link Jwt} is attached, without query and fragment parts.
* @param targetUri the value of the HTTP target URI of the request to which the
* DPoP Proof {@link Jwt} is attached
* @return the {@link Builder}
*/
public Builder targetUri(String targetUri) {
this.targetUri = targetUri;
return this;
}
/**
* Sets the access token if the request is a Protected Resource request.
* @param accessToken the access token if the request is a Protected Resource
* request
* @return the {@link Builder}
*/
public Builder accessToken(OAuth2Token accessToken) {
this.accessToken = accessToken;
return this;
}
/**
* Builds a new {@link DPoPProofContext}.
* @return a {@link DPoPProofContext}
*/
public DPoPProofContext build() {
validate();
return new DPoPProofContext(this.dPoPProof, this.method, this.targetUri, this.accessToken);
}
private void validate() {
Assert.hasText(this.method, "method cannot be empty");
Assert.hasText(this.targetUri, "targetUri cannot be empty");
if (!"GET".equals(this.method) && !"HEAD".equals(this.method) && !"POST".equals(this.method)
&& !"PUT".equals(this.method) && !"PATCH".equals(this.method) && !"DELETE".equals(this.method)
&& !"OPTIONS".equals(this.method) && !"TRACE".equals(this.method)) {
throw new IllegalArgumentException("method is invalid");
}
URI uri;
try {
uri = new URI(this.targetUri);
uri.toURL();
}
catch (Exception ex) {
throw new IllegalArgumentException("targetUri must be a valid URL", ex);
}
if (uri.getQuery() != null || uri.getFragment() != null) {
throw new IllegalArgumentException("targetUri cannot contain query or fragment parts");
}
}
}
}
| Builder |
java | apache__hadoop | hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsManagedApacheHttpConnection.java | {
"start": 1501,
"end": 1618
} | class ____ the {@link ManagedHttpClientConnection} and provides
* insights onto the connection level activity.
*/
| wraps |
java | grpc__grpc-java | grpclb/src/test/java/io/grpc/grpclb/GrpclbNameResolverTest.java | {
"start": 2391,
"end": 3023
} | class ____ {
@Rule
public final MockitoRule mocks = MockitoJUnit.rule();
private static final String NAME = "foo.googleapis.com";
private static final int DEFAULT_PORT = 887;
private final SynchronizationContext syncContext = new SynchronizationContext(
new Thread.UncaughtExceptionHandler() {
@Override
public void uncaughtException(Thread t, Throwable e) {
throw new AssertionError(e);
}
});
private final FakeClock fakeClock = new FakeClock();
private final FakeExecutorResource fakeExecutorResource = new FakeExecutorResource();
private final | GrpclbNameResolverTest |
java | apache__flink | flink-python/src/main/java/org/apache/flink/formats/json/JsonRowSerializationSchema.java | {
"start": 7389,
"end": 17320
} | interface ____ extends Serializable {
JsonNode convert(ObjectMapper mapper, JsonNode reuse, Object object);
}
private SerializationRuntimeConverter createConverter(TypeInformation<?> typeInfo) {
SerializationRuntimeConverter baseConverter =
createConverterForSimpleType(typeInfo)
.orElseGet(
() ->
createContainerConverter(typeInfo)
.orElseGet(this::createFallbackConverter));
return wrapIntoNullableConverter(baseConverter);
}
private SerializationRuntimeConverter wrapIntoNullableConverter(
SerializationRuntimeConverter converter) {
return (mapper, reuse, object) -> {
if (object == null) {
return mapper.getNodeFactory().nullNode();
}
return converter.convert(mapper, reuse, object);
};
}
private Optional<SerializationRuntimeConverter> createContainerConverter(
TypeInformation<?> typeInfo) {
if (typeInfo instanceof RowTypeInfo) {
return Optional.of(createRowConverter((RowTypeInfo) typeInfo));
} else if (typeInfo instanceof ObjectArrayTypeInfo) {
return Optional.of(
createObjectArrayConverter(
((ObjectArrayTypeInfo) typeInfo).getComponentInfo()));
} else if (typeInfo instanceof BasicArrayTypeInfo) {
return Optional.of(
createObjectArrayConverter(((BasicArrayTypeInfo) typeInfo).getComponentInfo()));
} else if (isPrimitiveByteArray(typeInfo)) {
return Optional.of(
(mapper, reuse, object) -> mapper.getNodeFactory().binaryNode((byte[]) object));
} else {
return Optional.empty();
}
}
private boolean isPrimitiveByteArray(TypeInformation<?> typeInfo) {
return typeInfo instanceof PrimitiveArrayTypeInfo
&& ((PrimitiveArrayTypeInfo) typeInfo).getComponentType() == Types.BYTE;
}
private SerializationRuntimeConverter createObjectArrayConverter(
TypeInformation elementTypeInfo) {
SerializationRuntimeConverter elementConverter = createConverter(elementTypeInfo);
return assembleArrayConverter(elementConverter);
}
private SerializationRuntimeConverter createRowConverter(RowTypeInfo typeInfo) {
List<SerializationRuntimeConverter> fieldConverters =
Arrays.stream(typeInfo.getFieldTypes())
.map(this::createConverter)
.collect(Collectors.toList());
return assembleRowConverter(typeInfo.getFieldNames(), fieldConverters);
}
private SerializationRuntimeConverter createFallbackConverter() {
return (mapper, reuse, object) -> {
// for types that were specified without JSON schema
// e.g. POJOs
try {
return mapper.valueToTree(object);
} catch (IllegalArgumentException e) {
throw new WrappingRuntimeException(
format("Could not convert object: %s", object), e);
}
};
}
private Optional<SerializationRuntimeConverter> createConverterForSimpleType(
TypeInformation<?> simpleTypeInfo) {
if (simpleTypeInfo == Types.VOID) {
return Optional.of((mapper, reuse, object) -> mapper.getNodeFactory().nullNode());
} else if (simpleTypeInfo == Types.BOOLEAN) {
return Optional.of(
(mapper, reuse, object) ->
mapper.getNodeFactory().booleanNode((Boolean) object));
} else if (simpleTypeInfo == Types.STRING) {
return Optional.of(
(mapper, reuse, object) -> mapper.getNodeFactory().textNode((String) object));
} else if (simpleTypeInfo == Types.INT) {
return Optional.of(
(mapper, reuse, object) ->
mapper.getNodeFactory().numberNode((Integer) object));
} else if (simpleTypeInfo == Types.LONG) {
return Optional.of(
(mapper, reuse, object) -> mapper.getNodeFactory().numberNode((Long) object));
} else if (simpleTypeInfo == Types.DOUBLE) {
return Optional.of(
(mapper, reuse, object) -> mapper.getNodeFactory().numberNode((Double) object));
} else if (simpleTypeInfo == Types.FLOAT) {
return Optional.of(
(mapper, reuse, object) -> mapper.getNodeFactory().numberNode((Float) object));
} else if (simpleTypeInfo == Types.SHORT) {
return Optional.of(
(mapper, reuse, object) -> mapper.getNodeFactory().numberNode((Short) object));
} else if (simpleTypeInfo == Types.BYTE) {
return Optional.of(
(mapper, reuse, object) -> mapper.getNodeFactory().numberNode((Byte) object));
} else if (simpleTypeInfo == Types.BIG_DEC) {
return Optional.of(createBigDecimalConverter());
} else if (simpleTypeInfo == Types.BIG_INT) {
return Optional.of(createBigIntegerConverter());
} else if (simpleTypeInfo == Types.SQL_DATE) {
return Optional.of(this::convertDate);
} else if (simpleTypeInfo == Types.SQL_TIME) {
return Optional.of(this::convertTime);
} else if (simpleTypeInfo == Types.SQL_TIMESTAMP) {
return Optional.of(this::convertTimestamp);
} else if (simpleTypeInfo == Types.LOCAL_DATE) {
return Optional.of(this::convertLocalDate);
} else if (simpleTypeInfo == Types.LOCAL_TIME) {
return Optional.of(this::convertLocalTime);
} else if (simpleTypeInfo == Types.LOCAL_DATE_TIME) {
return Optional.of(this::convertLocalDateTime);
} else {
return Optional.empty();
}
}
private JsonNode convertLocalDate(ObjectMapper mapper, JsonNode reuse, Object object) {
return mapper.getNodeFactory().textNode(ISO_LOCAL_DATE.format((LocalDate) object));
}
private JsonNode convertDate(ObjectMapper mapper, JsonNode reuse, Object object) {
Date date = (Date) object;
return convertLocalDate(mapper, reuse, date.toLocalDate());
}
private JsonNode convertLocalDateTime(ObjectMapper mapper, JsonNode reuse, Object object) {
return mapper.getNodeFactory()
.textNode(RFC3339_TIMESTAMP_FORMAT.format((LocalDateTime) object));
}
private JsonNode convertTimestamp(ObjectMapper mapper, JsonNode reuse, Object object) {
Timestamp timestamp = (Timestamp) object;
return convertLocalDateTime(mapper, reuse, timestamp.toLocalDateTime());
}
private JsonNode convertLocalTime(ObjectMapper mapper, JsonNode reuse, Object object) {
JsonNodeFactory nodeFactory = mapper.getNodeFactory();
return nodeFactory.textNode(RFC3339_TIME_FORMAT.format((LocalTime) object));
}
private JsonNode convertTime(ObjectMapper mapper, JsonNode reuse, Object object) {
final Time time = (Time) object;
return convertLocalTime(mapper, reuse, time.toLocalTime());
}
private SerializationRuntimeConverter createBigDecimalConverter() {
return (mapper, reuse, object) -> {
// convert decimal if necessary
JsonNodeFactory nodeFactory = mapper.getNodeFactory();
if (object instanceof BigDecimal) {
return nodeFactory.numberNode((BigDecimal) object);
}
return nodeFactory.numberNode(BigDecimal.valueOf(((Number) object).doubleValue()));
};
}
private SerializationRuntimeConverter createBigIntegerConverter() {
return (mapper, reuse, object) -> {
// convert decimal if necessary
JsonNodeFactory nodeFactory = mapper.getNodeFactory();
if (object instanceof BigInteger) {
return nodeFactory.numberNode((BigInteger) object);
}
return nodeFactory.numberNode(BigInteger.valueOf(((Number) object).longValue()));
};
}
private SerializationRuntimeConverter assembleRowConverter(
String[] fieldNames, List<SerializationRuntimeConverter> fieldConverters) {
return (mapper, reuse, object) -> {
ObjectNode node;
// reuse could be a NullNode if last record is null.
if (reuse == null || reuse.isNull()) {
node = mapper.createObjectNode();
} else {
node = (ObjectNode) reuse;
}
Row row = (Row) object;
for (int i = 0; i < fieldNames.length; i++) {
String fieldName = fieldNames[i];
node.set(
fieldName,
fieldConverters
.get(i)
.convert(mapper, node.get(fieldNames[i]), row.getField(i)));
}
return node;
};
}
private SerializationRuntimeConverter assembleArrayConverter(
SerializationRuntimeConverter elementConverter) {
return (mapper, reuse, object) -> {
ArrayNode node;
// reuse could be a NullNode if last record is null.
if (reuse == null || reuse.isNull()) {
node = mapper.createArrayNode();
} else {
node = (ArrayNode) reuse;
node.removeAll();
}
Object[] array = (Object[]) object;
for (Object element : array) {
node.add(elementConverter.convert(mapper, null, element));
}
return node;
};
}
}
| SerializationRuntimeConverter |
java | spring-projects__spring-boot | module/spring-boot-web-server/src/test/java/org/springframework/boot/web/server/SpringApplicationWebServerTests.java | {
"start": 7397,
"end": 7579
} | class ____ {
@Bean
MockServletWebServerFactory webServer() {
return new MockServletWebServerFactory();
}
}
@Configuration(proxyBeanMethods = false)
static | ExampleWebConfig |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/core/convert/TypeDescriptor.java | {
"start": 18551,
"end": 31363
} | class ____ the provided map value. For example, if this describes a
* {@code java.util.Map<java.lang.String, java.lang.Number>} and the value
* argument is a {@code java.lang.Integer}, the returned TypeDescriptor will be
* {@code java.lang.Integer}. If this describes a {@code java.util.Map<?, ?>}
* and the value argument is a {@code java.lang.Integer}, the returned
* TypeDescriptor will be {@code java.lang.Integer} as well.
* <p>Annotation and nested type context will be preserved in the narrowed
* TypeDescriptor that is returned.
* @param mapValue the map value
* @return the map value type descriptor
* @throws IllegalStateException if this type is not a {@code java.util.Map}
* @see #narrow(Object)
*/
public @Nullable TypeDescriptor getMapValueTypeDescriptor(@Nullable Object mapValue) {
return narrow(mapValue, getMapValueTypeDescriptor());
}
private @Nullable TypeDescriptor getRelatedIfResolvable(ResolvableType type) {
if (type.resolve() == null) {
return null;
}
return new TypeDescriptor(type, null, getAnnotations());
}
private @Nullable TypeDescriptor narrow(@Nullable Object value, @Nullable TypeDescriptor typeDescriptor) {
if (typeDescriptor != null) {
return typeDescriptor.narrow(value);
}
if (value != null) {
return narrow(value);
}
return null;
}
@Override
public boolean equals(@Nullable Object other) {
if (this == other) {
return true;
}
if (!(other instanceof TypeDescriptor otherDesc)) {
return false;
}
if (getType() != otherDesc.getType()) {
return false;
}
if (!annotationsMatch(otherDesc)) {
return false;
}
return Arrays.equals(getResolvableType().getGenerics(), otherDesc.getResolvableType().getGenerics());
}
private boolean annotationsMatch(TypeDescriptor otherDesc) {
Annotation[] anns = getAnnotations();
Annotation[] otherAnns = otherDesc.getAnnotations();
if (anns == otherAnns) {
return true;
}
if (anns.length != otherAnns.length) {
return false;
}
if (anns.length > 0) {
for (int i = 0; i < anns.length; i++) {
if (!annotationEquals(anns[i], otherAnns[i])) {
return false;
}
}
}
return true;
}
private boolean annotationEquals(Annotation ann, Annotation otherAnn) {
// Annotation.equals is reflective and pretty slow, so let's check identity and proxy type first.
return (ann == otherAnn || (ann.getClass() == otherAnn.getClass() && ann.equals(otherAnn)));
}
@Override
public int hashCode() {
return getType().hashCode();
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
for (Annotation ann : getAnnotations()) {
builder.append('@').append(getName(ann.annotationType())).append(' ');
}
builder.append(getResolvableType());
return builder.toString();
}
/**
* Create a new type descriptor for an object.
* <p>Use this factory method to introspect a source object before asking the
* conversion system to convert it to some other type.
* <p>If the provided object is {@code null}, returns {@code null}, else calls
* {@link #valueOf(Class)} to build a TypeDescriptor from the object's class.
* @param source the source object
* @return the type descriptor
*/
@Contract("!null -> !null; null -> null")
public static @Nullable TypeDescriptor forObject(@Nullable Object source) {
return (source != null ? valueOf(source.getClass()) : null);
}
/**
* Create a new type descriptor from the given type.
* <p>Use this to instruct the conversion system to convert an object to a
* specific target type, when no type location such as a method parameter or
* field is available to provide additional conversion context.
* <p>Generally prefer use of {@link #forObject(Object)} for constructing type
* descriptors from source objects, as it handles the {@code null} object case.
* @param type the class (may be {@code null} to indicate {@code Object.class})
* @return the corresponding type descriptor
*/
public static TypeDescriptor valueOf(@Nullable Class<?> type) {
if (type == null) {
type = Object.class;
}
TypeDescriptor desc = commonTypesCache.get(type);
return (desc != null ? desc : new TypeDescriptor(ResolvableType.forClass(type), null, null));
}
/**
* Create a new type descriptor from a {@link java.util.Collection} type.
* <p>Useful for converting to typed Collections.
* <p>For example, a {@code List<String>} could be converted to a
* {@code List<EmailAddress>} by converting to a targetType built with this method.
* The method call to construct such a {@code TypeDescriptor} would look something
* like: {@code collection(List.class, TypeDescriptor.valueOf(EmailAddress.class));}
* @param collectionType the collection type, which must implement {@link Collection}.
* @param elementTypeDescriptor a descriptor for the collection's element type,
* used to convert collection elements
* @return the collection type descriptor
*/
public static TypeDescriptor collection(Class<?> collectionType, @Nullable TypeDescriptor elementTypeDescriptor) {
Assert.notNull(collectionType, "Collection type must not be null");
if (!Collection.class.isAssignableFrom(collectionType)) {
throw new IllegalArgumentException("Collection type must be a [java.util.Collection]");
}
ResolvableType element = (elementTypeDescriptor != null ? elementTypeDescriptor.resolvableType : null);
return new TypeDescriptor(ResolvableType.forClassWithGenerics(collectionType, element), null, null);
}
/**
* Create a new type descriptor from a {@link java.util.Map} type.
* <p>Useful for converting to typed Maps.
* <p>For example, a {@code Map<String, String>} could be converted to a {@code Map<Id, EmailAddress>}
* by converting to a targetType built with this method:
* The method call to construct such a TypeDescriptor would look something like:
* <pre class="code">
* map(Map.class, TypeDescriptor.valueOf(Id.class), TypeDescriptor.valueOf(EmailAddress.class));
* </pre>
* @param mapType the map type, which must implement {@link Map}
* @param keyTypeDescriptor a descriptor for the map's key type, used to convert map keys
* @param valueTypeDescriptor the map's value type, used to convert map values
* @return the map type descriptor
*/
public static TypeDescriptor map(Class<?> mapType, @Nullable TypeDescriptor keyTypeDescriptor,
@Nullable TypeDescriptor valueTypeDescriptor) {
Assert.notNull(mapType, "Map type must not be null");
if (!Map.class.isAssignableFrom(mapType)) {
throw new IllegalArgumentException("Map type must be a [java.util.Map]");
}
ResolvableType key = (keyTypeDescriptor != null ? keyTypeDescriptor.resolvableType : null);
ResolvableType value = (valueTypeDescriptor != null ? valueTypeDescriptor.resolvableType : null);
return new TypeDescriptor(ResolvableType.forClassWithGenerics(mapType, key, value), null, null);
}
/**
* Create a new type descriptor as an array of the specified type.
* <p>For example to create a {@code Map<String,String>[]} use:
* <pre class="code">
* TypeDescriptor.array(TypeDescriptor.map(Map.class, TypeDescriptor.value(String.class), TypeDescriptor.value(String.class)));
* </pre>
* @param elementTypeDescriptor the {@link TypeDescriptor} of the array element or {@code null}
* @return an array {@link TypeDescriptor} or {@code null} if {@code elementTypeDescriptor} is {@code null}
* @since 3.2.1
*/
@Contract("!null -> !null; null -> null")
public static @Nullable TypeDescriptor array(@Nullable TypeDescriptor elementTypeDescriptor) {
if (elementTypeDescriptor == null) {
return null;
}
return new TypeDescriptor(ResolvableType.forArrayComponent(elementTypeDescriptor.resolvableType),
null, elementTypeDescriptor.getAnnotations());
}
/**
* Create a type descriptor for a nested type declared within the method parameter.
* <p>For example, if the methodParameter is a {@code List<String>} and the
* nesting level is 1, the nested type descriptor will be String.class.
* <p>If the methodParameter is a {@code List<List<String>>} and the nesting
* level is 2, the nested type descriptor will also be a String.class.
* <p>If the methodParameter is a {@code Map<Integer, String>} and the nesting
* level is 1, the nested type descriptor will be String, derived from the map value.
* <p>If the methodParameter is a {@code List<Map<Integer, String>>} and the
* nesting level is 2, the nested type descriptor will be String, derived from the map value.
* <p>Returns {@code null} if a nested type cannot be obtained because it was not declared.
* For example, if the method parameter is a {@code List<?>}, the nested type
* descriptor returned will be {@code null}.
* @param methodParameter the method parameter with a nestingLevel of 1
* @param nestingLevel the nesting level of the collection/array element or
* map key/value declaration within the method parameter
* @return the nested type descriptor at the specified nesting level,
* or {@code null} if it could not be obtained
* @throws IllegalArgumentException if the nesting level of the input
* {@link MethodParameter} argument is not 1, or if the types up to the
* specified nesting level are not of collection, array, or map types
*/
public static @Nullable TypeDescriptor nested(MethodParameter methodParameter, int nestingLevel) {
if (methodParameter.getNestingLevel() != 1) {
throw new IllegalArgumentException("MethodParameter nesting level must be 1: " +
"use the nestingLevel parameter to specify the desired nestingLevel for nested type traversal");
}
return new TypeDescriptor(methodParameter).nested(nestingLevel);
}
/**
* Create a type descriptor for a nested type declared within the field.
* <p>For example, if the field is a {@code List<String>} and the nesting
* level is 1, the nested type descriptor will be {@code String.class}.
* <p>If the field is a {@code List<List<String>>} and the nesting level is
* 2, the nested type descriptor will also be a {@code String.class}.
* <p>If the field is a {@code Map<Integer, String>} and the nesting level
* is 1, the nested type descriptor will be String, derived from the map value.
* <p>If the field is a {@code List<Map<Integer, String>>} and the nesting
* level is 2, the nested type descriptor will be String, derived from the map value.
* <p>Returns {@code null} if a nested type cannot be obtained because it was not
* declared. For example, if the field is a {@code List<?>}, the nested type
* descriptor returned will be {@code null}.
* @param field the field
* @param nestingLevel the nesting level of the collection/array element or
* map key/value declaration within the field
* @return the nested type descriptor at the specified nesting level,
* or {@code null} if it could not be obtained
* @throws IllegalArgumentException if the types up to the specified nesting
* level are not of collection, array, or map types
*/
public static @Nullable TypeDescriptor nested(Field field, int nestingLevel) {
return new TypeDescriptor(field).nested(nestingLevel);
}
/**
* Create a type descriptor for a nested type declared within the property.
* <p>For example, if the property is a {@code List<String>} and the nesting
* level is 1, the nested type descriptor will be {@code String.class}.
* <p>If the property is a {@code List<List<String>>} and the nesting level
* is 2, the nested type descriptor will also be a {@code String.class}.
* <p>If the property is a {@code Map<Integer, String>} and the nesting level
* is 1, the nested type descriptor will be String, derived from the map value.
* <p>If the property is a {@code List<Map<Integer, String>>} and the nesting
* level is 2, the nested type descriptor will be String, derived from the map value.
* <p>Returns {@code null} if a nested type cannot be obtained because it was not
* declared. For example, if the property is a {@code List<?>}, the nested type
* descriptor returned will be {@code null}.
* @param property the property
* @param nestingLevel the nesting level of the collection/array element or
* map key/value declaration within the property
* @return the nested type descriptor at the specified nesting level, or
* {@code null} if it could not be obtained
* @throws IllegalArgumentException if the types up to the specified nesting
* level are not of collection, array, or map types
*/
public static @Nullable TypeDescriptor nested(Property property, int nestingLevel) {
return new TypeDescriptor(property).nested(nestingLevel);
}
private static String getName(Class<?> clazz) {
String canonicalName = clazz.getCanonicalName();
return (canonicalName != null ? canonicalName : clazz.getName());
}
private | of |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/internal/double2darrays/Double2DArrays_assertContains_at_Index_Test.java | {
"start": 1128,
"end": 1510
} | class ____ extends Double2DArraysBaseTest {
@Test
void should_delegate_to_Arrays2D() {
// GIVEN
double[] doubles = new double[] { 6.0, 8.0, 10.0 };
// WHEN
double2dArrays.assertContains(info, actual, doubles, atIndex(1));
// THEN
verify(arrays2d).assertContains(info, failures, actual, doubles, atIndex(1));
}
}
| Double2DArrays_assertContains_at_Index_Test |
java | hibernate__hibernate-orm | hibernate-testing/src/main/java/org/hibernate/testing/cleaner/H2DatabaseCleaner.java | {
"start": 490,
"end": 4356
} | class ____ implements DatabaseCleaner {
private static final Logger LOG = Logger.getLogger( H2DatabaseCleaner.class.getName() );
private static final String SYSTEM_SCHEMAS = "'INFORMATION_SCHEMA'";
private final List<String> ignoredTables = new ArrayList<>();
private final Map<String, List<String>> cachedTableNamesPerSchema = new HashMap<>();
@Override
public boolean isApplicable(Connection connection) {
try {
return connection.getMetaData().getDatabaseProductName().startsWith( "H2" );
}
catch (SQLException e) {
throw new RuntimeException( "Could not resolve the database metadata!", e );
}
}
@Override
public void addIgnoredTable(String tableName) {
ignoredTables.add( tableName );
}
@Override
public void clearAllSchemas(Connection c) {
cachedTableNamesPerSchema.clear();
try (Statement s = c.createStatement()) {
LOG.log( Level.FINEST, "Dropping schema objects: START" );
s.execute( "DROP ALL OBJECTS" );
LOG.log( Level.FINEST, "Dropping schema objects: END" );
LOG.log( Level.FINEST, "Committing: START" );
c.commit();
LOG.log( Level.FINEST, "Committing: END" );
}
catch (SQLException e) {
try {
c.rollback();
}
catch (SQLException e1) {
e.addSuppressed( e1 );
}
throw new RuntimeException( e );
}
}
@Override
public void clearSchema(Connection c, String schemaName) {
throw new UnsupportedOperationException();
}
@Override
public void clearAllData(Connection connection) {
clearData0(
connection,
null,
statement -> {
try {
return statement.executeQuery(
"SELECT TABLE_SCHEMA, TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA NOT IN (" + SYSTEM_SCHEMAS + ")" );
}
catch (SQLException sqlException) {
throw new RuntimeException( sqlException );
}
}
);
}
@Override
public void clearData(Connection connection, String schemaName) {
clearData0(
connection,
schemaName,
statement -> {
try {
return statement.executeQuery(
"SELECT TABLE_SCHEMA, TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA = '" + schemaName + "'" );
}
catch (SQLException sqlException) {
throw new RuntimeException( sqlException );
}
}
);
}
private void clearData0(Connection connection, String schemaName, Function<Statement, ResultSet> tablesProvider) {
try (Statement s = connection.createStatement()) {
// Disable foreign keys
LOG.log( Level.FINEST, "Disable foreign keys: START" );
s.execute( "SET REFERENTIAL_INTEGRITY FALSE" );
LOG.log( Level.FINEST, "Disable foreign keys: END" );
// Delete data
LOG.log( Level.FINEST, "Deleting data: START" );
List<String> cachedTableNames = cachedTableNamesPerSchema.get( schemaName );
if ( cachedTableNames == null ) {
cachedTableNames = new ArrayList<>();
ResultSet rs = tablesProvider.apply( s );
while ( rs.next() ) {
String tableSchema = rs.getString( 1 );
String tableName = rs.getString( 2 );
if ( !ignoredTables.contains( tableName ) ) {
cachedTableNames.add( tableSchema + "." + tableName );
}
}
cachedTableNamesPerSchema.put( schemaName, cachedTableNames );
}
for ( String table : cachedTableNames ) {
s.execute( "TRUNCATE TABLE " + table );
}
LOG.log( Level.FINEST, "Deleting data: END" );
// Enable foreign keys
LOG.log( Level.FINEST, "Enabling foreign keys: START" );
s.execute( "SET REFERENTIAL_INTEGRITY TRUE" );
LOG.log( Level.FINEST, "Enabling foreign keys: END" );
LOG.log( Level.FINEST, "Committing: START" );
connection.commit();
LOG.log( Level.FINEST, "Committing: END" );
}
catch (SQLException e) {
try {
connection.rollback();
}
catch (SQLException e1) {
e.addSuppressed( e1 );
}
throw new RuntimeException( e );
}
}
}
| H2DatabaseCleaner |
java | quarkusio__quarkus | extensions/smallrye-openapi/deployment/src/test/java/io/quarkus/smallrye/openapi/test/jaxrs/AutoBadRequestResource.java | {
"start": 2627,
"end": 2695
} | class ____ {
public String bar;
}
private static | MyBean |
java | apache__camel | components/camel-quartz/src/test/java/org/apache/camel/component/quartz/SpringQuartzPersistentStoreRestartAppTest.java | {
"start": 1281,
"end": 3327
} | class ____ {
protected final Logger log = LoggerFactory.getLogger(getClass());
@Test
public void testQuartzPersistentStoreRestart() throws Exception {
// load spring app
AbstractXmlApplicationContext app
= newAppContext("SpringQuartzPersistentStoreTest.xml");
app.start();
CamelContext camel = app.getBean("camelContext-" + getClass().getSimpleName(), CamelContext.class);
assertNotNull(camel);
MockEndpoint mock = camel.getEndpoint("mock:result", MockEndpoint.class);
mock.expectedMinimumMessageCount(2);
mock.assertIsSatisfied();
app.stop();
log.info("Restarting ...");
log.info("Restarting ...");
log.info("Restarting ...");
// NOTE:
// To test a restart where the app has crashed, then you can in QuartzEndpoint
// in the doShutdown method, then remove the following code line
// deleteTrigger(getTrigger());
// then when we restart then there is old stale data which QuartzComponent
// is supposed to handle and start again
// load spring app
AbstractXmlApplicationContext app2 = newAppContext("SpringQuartzPersistentStoreRestartTest.xml");
app2.start();
CamelContext camel2 = app2.getBean("camelContext-" + getClass().getSimpleName(), CamelContext.class);
assertNotNull(camel2);
MockEndpoint mock2 = camel2.getEndpoint("mock:result", MockEndpoint.class);
mock2.expectedMinimumMessageCount(2);
mock2.assertIsSatisfied();
app2.stop();
// we're done so let's properly close the application contexts, but close
// the second app before the first one so that the quartz scheduler running
// inside it can be properly shutdown
IOHelper.close(app2, app);
}
private AbstractXmlApplicationContext newAppContext(String config) {
return CamelSpringTestSupport.newAppContext(config, getClass());
}
}
| SpringQuartzPersistentStoreRestartAppTest |
java | apache__flink | flink-core/src/test/java/org/apache/flink/api/common/typeutils/base/LocalDateTimeSerializerTest.java | {
"start": 1075,
"end": 2043
} | class ____ extends SerializerTestBase<LocalDateTime> {
@Override
protected TypeSerializer<LocalDateTime> createSerializer() {
return new LocalDateTimeSerializer();
}
@Override
protected int getLength() {
return 13;
}
@Override
protected Class<LocalDateTime> getTypeClass() {
return LocalDateTime.class;
}
@Override
protected LocalDateTime[] getTestData() {
return new LocalDateTime[] {
LocalDateTime.of(1970, 1, 1, 0, 0, 0, 0),
LocalDateTime.of(1990, 10, 14, 2, 42, 25, 123_000_000),
LocalDateTime.of(1990, 10, 14, 2, 42, 25, 123_000_001),
LocalDateTime.of(1990, 10, 14, 2, 42, 25, 123_000_002),
LocalDateTime.of(2013, 8, 12, 14, 15, 59, 478_000_000),
LocalDateTime.of(2013, 8, 12, 14, 15, 59, 479_000_000),
LocalDateTime.of(2040, 5, 12, 18, 0, 45, 999_000_000)
};
}
}
| LocalDateTimeSerializerTest |
java | apache__flink | flink-tests/src/test/java/org/apache/flink/test/streaming/runtime/DataStreamPojoITCase.java | {
"start": 1546,
"end": 10404
} | class ____ extends AbstractTestBaseJUnit4 {
static List<Data> elements = new ArrayList<>();
static {
elements.add(new Data(0, 0, 0));
elements.add(new Data(0, 0, 0));
elements.add(new Data(1, 1, 1));
elements.add(new Data(1, 1, 1));
elements.add(new Data(2, 2, 3));
elements.add(new Data(2, 2, 3));
}
/** Test composite key on the Data POJO (with nested fields). */
@Test
public void testCompositeKeyOnNestedPojo() throws Exception {
StreamExecutionEnvironment see = StreamExecutionEnvironment.getExecutionEnvironment();
see.getConfig().disableObjectReuse();
see.setParallelism(3);
DataStream<Data> dataStream = see.fromData(elements);
DataStream<Data> summedStream =
dataStream
.keyBy(
x -> Tuple3.of(x.aaa, x.abc, x.wxyz),
Types.TUPLE(Types.INT, Types.INT, Types.LONG))
.sum("sum")
.keyBy(
x -> Tuple3.of(x.aaa, x.abc, x.wxyz),
Types.TUPLE(Types.INT, Types.INT, Types.LONG))
.flatMap(
new FlatMapFunction<Data, Data>() {
private static final long serialVersionUID =
788865239171396315L;
Data[] first = new Data[3];
@Override
public void flatMap(Data value, Collector<Data> out)
throws Exception {
if (first[value.aaa] == null) {
first[value.aaa] = value;
if (value.sum != 1) {
throw new RuntimeException(
"Expected the sum to be one");
}
} else {
if (value.sum != 2) {
throw new RuntimeException(
"Expected the sum to be two");
}
if (first[value.aaa].aaa != value.aaa) {
throw new RuntimeException("aaa key wrong");
}
if (first[value.aaa].abc != value.abc) {
throw new RuntimeException("abc key wrong");
}
if (first[value.aaa].wxyz != value.wxyz) {
throw new RuntimeException("wxyz key wrong");
}
}
}
});
summedStream.print();
see.execute();
}
/** Test composite & nested key on the Data POJO. */
@Test
public void testNestedKeyOnNestedPojo() throws Exception {
StreamExecutionEnvironment see = StreamExecutionEnvironment.getExecutionEnvironment();
see.getConfig().disableObjectReuse();
see.setParallelism(4);
DataStream<Data> dataStream = see.fromData(elements);
DataStream<Data> summedStream =
dataStream
.keyBy(
x -> Tuple2.of(x.aaa, x.stats.count),
Types.TUPLE(Types.INT, Types.LONG))
.sum("sum")
.keyBy(
x -> Tuple2.of(x.aaa, x.stats.count),
Types.TUPLE(Types.INT, Types.LONG))
.flatMap(
new FlatMapFunction<Data, Data>() {
private static final long serialVersionUID =
-3678267280397950258L;
Data[] first = new Data[3];
@Override
public void flatMap(Data value, Collector<Data> out)
throws Exception {
if (value.stats.count != 123) {
throw new RuntimeException(
"Wrong value for value.stats.count");
}
if (first[value.aaa] == null) {
first[value.aaa] = value;
if (value.sum != 1) {
throw new RuntimeException(
"Expected the sum to be one");
}
} else {
if (value.sum != 2) {
throw new RuntimeException(
"Expected the sum to be two");
}
if (first[value.aaa].aaa != value.aaa) {
throw new RuntimeException("aaa key wrong");
}
if (first[value.aaa].abc != value.abc) {
throw new RuntimeException("abc key wrong");
}
if (first[value.aaa].wxyz != value.wxyz) {
throw new RuntimeException("wxyz key wrong");
}
}
}
});
summedStream.print();
see.execute();
}
@Test
public void testNestedPojoFieldAccessor() throws Exception {
StreamExecutionEnvironment see = StreamExecutionEnvironment.getExecutionEnvironment();
see.getConfig().disableObjectReuse();
see.setParallelism(4);
DataStream<Data> dataStream = see.fromData(elements);
DataStream<Data> summedStream =
dataStream
.keyBy(x -> x.aaa)
.sum("stats.count")
.keyBy(x -> x.aaa)
.flatMap(
new FlatMapFunction<Data, Data>() {
Data[] first = new Data[3];
@Override
public void flatMap(Data value, Collector<Data> out)
throws Exception {
if (first[value.aaa] == null) {
first[value.aaa] = value;
if (value.stats.count != 123) {
throw new RuntimeException(
"Expected stats.count to be 123");
}
} else {
if (value.stats.count != 2 * 123) {
throw new RuntimeException(
"Expected stats.count to be 2 * 123");
}
}
}
});
summedStream.print();
see.execute();
}
@Test(expected = CompositeType.InvalidFieldReferenceException.class)
public void testFailOnNestedPojoFieldAccessor() throws Exception {
StreamExecutionEnvironment see = StreamExecutionEnvironment.getExecutionEnvironment();
DataStream<Data> dataStream = see.fromData(elements);
dataStream
.keyBy(x -> Tuple2.of(x.aaa, x.stats.count), Types.TUPLE(Types.INT, Types.LONG))
.sum("stats.nonExistingField");
}
/** POJO. */
public static | DataStreamPojoITCase |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/SystemUtils.java | {
"start": 23092,
"end": 23723
} | class ____ loaded, the value will be out of sync with that System property.
* </p>
*
* @see SystemProperties#getJavaVmSpecificationVersion()
* @since Java 1.2
*/
public static final String JAVA_VM_SPECIFICATION_VERSION = SystemProperties.getJavaVmSpecificationVersion();
/**
* A constant for the System Property {@code java.vm.vendor}. Java Virtual Machine implementation vendor.
*
* <p>
* Defaults to {@code null} if the runtime does not have security access to read this property or the property does not exist.
* </p>
* <p>
* This value is initialized when the | is |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/web/configurers/oauth2/server/authorization/OAuth2TokenIntrospectionTests.java | {
"start": 28808,
"end": 30056
} | class ____
extends AuthorizationServerConfiguration {
// @formatter:off
@Bean
SecurityFilterChain authorizationServerSecurityFilterChain(HttpSecurity http) throws Exception {
http
.oauth2AuthorizationServer((authorizationServer) ->
authorizationServer
.tokenIntrospectionEndpoint((tokenIntrospectionEndpoint) ->
tokenIntrospectionEndpoint
.introspectionRequestConverter(authenticationConverter)
.introspectionRequestConverters(authenticationConvertersConsumer)
.authenticationProvider(authenticationProvider)
.authenticationProviders(authenticationProvidersConsumer)
.introspectionResponseHandler(authenticationSuccessHandler)
.errorResponseHandler(authenticationFailureHandler))
)
.authorizeHttpRequests((authorize) ->
authorize.anyRequest().authenticated()
);
return http.build();
}
// @formatter:on
@Override
AuthorizationServerSettings authorizationServerSettings() {
return AuthorizationServerSettings.builder()
.multipleIssuersAllowed(true)
.tokenIntrospectionEndpoint("/test/introspect")
.build();
}
}
}
| AuthorizationServerConfigurationCustomTokenIntrospectionEndpoint |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/loader/ast/internal/SingleIdExecutionContext.java | {
"start": 583,
"end": 2178
} | class ____ extends BaseExecutionContext {
private final Object entityInstance;
private final Object entityId;
private final EntityMappingType rootEntityDescriptor;
private final Boolean readOnly;
private final LockOptions lockOptions;
private final SubselectFetch.RegistrationHandler subSelectFetchableKeysHandler;
public SingleIdExecutionContext(
Object entityId,
Object entityInstance,
EntityMappingType rootEntityDescriptor,
Boolean readOnly,
LockOptions lockOptions,
SubselectFetch.RegistrationHandler subSelectFetchableKeysHandler,
SharedSessionContractImplementor session) {
super( session );
this.entityInstance = entityInstance;
this.entityId = entityId;
this.rootEntityDescriptor = rootEntityDescriptor;
this.readOnly = readOnly;
this.lockOptions = lockOptions;
this.subSelectFetchableKeysHandler = subSelectFetchableKeysHandler;
}
@Override
public Object getEntityInstance() {
return entityInstance;
}
@Override
public Object getEntityId() {
return entityId;
}
@Override
public EntityMappingType getRootEntityDescriptor() {
return rootEntityDescriptor;
}
@Override
public QueryOptions getQueryOptions() {
return new QueryOptionsAdapter() {
@Override
public Boolean isReadOnly() {
return readOnly;
}
@Override
public LockOptions getLockOptions() {
return lockOptions;
}
};
}
@Override
public void registerLoadingEntityHolder(EntityHolder holder) {
subSelectFetchableKeysHandler.addKey( holder );
}
@Override
public boolean upgradeLocks() {
return true;
}
}
| SingleIdExecutionContext |
java | mapstruct__mapstruct | processor/src/main/java/org/mapstruct/ap/internal/model/assignment/UpdateWrapper.java | {
"start": 499,
"end": 3598
} | class ____ extends AssignmentWrapper {
private final List<Type> thrownTypesToExclude;
private final Assignment factoryMethod;
private final Type targetImplementationType;
private final boolean includeSourceNullCheck;
private final boolean setExplicitlyToNull;
private final boolean setExplicitlyToDefault;
public UpdateWrapper( Assignment decoratedAssignment,
List<Type> thrownTypesToExclude,
Assignment factoryMethod,
boolean fieldAssignment,
Type targetType,
boolean includeSourceNullCheck,
boolean setExplicitlyToNull,
boolean setExplicitlyToDefault ) {
super( decoratedAssignment, fieldAssignment );
this.thrownTypesToExclude = thrownTypesToExclude;
this.factoryMethod = factoryMethod;
this.targetImplementationType = determineImplType( factoryMethod, targetType );
this.includeSourceNullCheck = includeSourceNullCheck;
this.setExplicitlyToDefault = setExplicitlyToDefault;
this.setExplicitlyToNull = setExplicitlyToNull;
}
private static Type determineImplType(Assignment factoryMethod, Type targetType) {
if ( factoryMethod != null ) {
//If we have factory method then we won't use the targetType
return null;
}
if ( targetType.getImplementationType() != null ) {
// it's probably a collection or something
return targetType.getImplementationType();
}
// no factory method means we create a new instance ourselves and thus need to import the type
return targetType;
}
@Override
public List<Type> getThrownTypes() {
List<Type> parentThrownTypes = super.getThrownTypes();
List<Type> result = new ArrayList<>( parentThrownTypes );
for ( Type thrownTypeToExclude : thrownTypesToExclude ) {
for ( Type parentThrownType : parentThrownTypes ) {
if ( parentThrownType.isAssignableTo( thrownTypeToExclude ) ) {
result.remove( parentThrownType );
}
}
}
return result;
}
@Override
public Set<Type> getImportTypes() {
Set<Type> imported = new HashSet<>( super.getImportTypes() );
if ( factoryMethod != null ) {
imported.addAll( factoryMethod.getImportTypes() );
}
if ( targetImplementationType != null ) {
imported.add( targetImplementationType );
imported.addAll( targetImplementationType.getTypeParameters() );
}
return imported;
}
public Assignment getFactoryMethod() {
return factoryMethod;
}
public boolean isIncludeSourceNullCheck() {
return includeSourceNullCheck;
}
public boolean isSetExplicitlyToNull() {
return setExplicitlyToNull;
}
public boolean isSetExplicitlyToDefault() {
return setExplicitlyToDefault;
}
}
| UpdateWrapper |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/AbstractDNSToSwitchMapping.java | {
"start": 1434,
"end": 1515
} | class ____ are automatically picked up by all implementations.
* <p>
*
* This | that |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/subclassmapping/abstractsuperclass/ErroneousSubclassWithAbstractSuperClassMapper.java | {
"start": 458,
"end": 955
} | interface ____ {
ErroneousSubclassWithAbstractSuperClassMapper INSTANCE =
Mappers.getMapper( ErroneousSubclassWithAbstractSuperClassMapper.class );
VehicleCollectionDto map(VehicleCollection vehicles);
CarDto map(Car car);
BikeDto map(Bike bike);
@SubclassMapping( source = Car.class, target = CarDto.class )
@SubclassMapping( source = Bike.class, target = BikeDto.class )
VehicleDto map(AbstractVehicle vehicle);
}
| ErroneousSubclassWithAbstractSuperClassMapper |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/PendingDataNodeMessages.java | {
"start": 1589,
"end": 6063
} | class ____ {
private final Block block;
private final DatanodeStorageInfo storageInfo;
private final ReplicaState reportedState;
ReportedBlockInfo(DatanodeStorageInfo storageInfo, Block block,
ReplicaState reportedState) {
this.storageInfo = storageInfo;
this.block = block;
this.reportedState = reportedState;
}
Block getBlock() {
return block;
}
ReplicaState getReportedState() {
return reportedState;
}
DatanodeStorageInfo getStorageInfo() {
return storageInfo;
}
@Override
public String toString() {
return "ReportedBlockInfo [block=" + block + ", dn="
+ storageInfo.getDatanodeDescriptor()
+ ", reportedState=" + reportedState + "]";
}
}
/**
* Remove all pending DN messages which reference the given DN.
* @param dn the datanode whose messages we should remove.
*/
void removeAllMessagesForDatanode(DatanodeDescriptor dn) {
for (Map.Entry<Block, Queue<ReportedBlockInfo>> entry :
queueByBlockId.entrySet()) {
Queue<ReportedBlockInfo> newQueue = Lists.newLinkedList();
Queue<ReportedBlockInfo> oldQueue = entry.getValue();
while (!oldQueue.isEmpty()) {
ReportedBlockInfo rbi = oldQueue.remove();
if (!rbi.getStorageInfo().getDatanodeDescriptor().equals(dn)) {
newQueue.add(rbi);
} else {
count--;
}
}
queueByBlockId.put(entry.getKey(), newQueue);
}
}
void enqueueReportedBlock(DatanodeStorageInfo storageInfo, Block block,
ReplicaState reportedState) {
if (BlockIdManager.isStripedBlockID(block.getBlockId())) {
Block blkId = new Block(BlockIdManager.convertToStripedID(block
.getBlockId()));
getBlockQueue(blkId).add(
new ReportedBlockInfo(storageInfo, new Block(block), reportedState));
} else {
block = new Block(block);
getBlockQueue(block).add(
new ReportedBlockInfo(storageInfo, block, reportedState));
}
count++;
}
void removeQueuedBlock(DatanodeStorageInfo storageInfo, Block block) {
if (storageInfo == null || block == null) {
return;
}
Block blk = new Block(block);
if (BlockIdManager.isStripedBlockID(block.getBlockId())) {
blk = new Block(BlockIdManager.convertToStripedID(block
.getBlockId()));
}
Queue<ReportedBlockInfo> queue = queueByBlockId.get(blk);
if (queue == null) {
return;
}
// We only want the latest non-future reported block to be queued for each
// DataNode. Otherwise, there can be a race condition that causes an old
// reported block to be kept in the queue until the SNN switches to ANN and
// the old reported block will be processed and marked as corrupt by the ANN.
// See HDFS-17453
int size = queue.size();
if (queue.removeIf(rbi -> storageInfo.equals(rbi.storageInfo))) {
count -= (size - queue.size());
}
// If the block message queue is now empty, we should remove the block
// from the queue.
if (queue.isEmpty()) {
queueByBlockId.remove(blk);
}
}
/**
* @return any messages that were previously queued for the given block,
* or null if no messages were queued.
*/
Queue<ReportedBlockInfo> takeBlockQueue(Block block) {
Queue<ReportedBlockInfo> queue = queueByBlockId.remove(block);
if (queue != null) {
count -= queue.size();
}
return queue;
}
private Queue<ReportedBlockInfo> getBlockQueue(Block block) {
Queue<ReportedBlockInfo> queue = queueByBlockId.get(block);
if (queue == null) {
queue = Lists.newLinkedList();
queueByBlockId.put(block, queue);
}
return queue;
}
int count() {
return count ;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
for (Map.Entry<Block, Queue<ReportedBlockInfo>> entry :
queueByBlockId.entrySet()) {
sb.append("Block " + entry.getKey() + ":\n");
for (ReportedBlockInfo rbi : entry.getValue()) {
sb.append(" ").append(rbi).append("\n");
}
}
return sb.toString();
}
Iterable<ReportedBlockInfo> takeAll() {
List<ReportedBlockInfo> rbis = Lists.newArrayListWithCapacity(
count);
for (Queue<ReportedBlockInfo> q : queueByBlockId.values()) {
rbis.addAll(q);
}
queueByBlockId.clear();
count = 0;
return rbis;
}
} | ReportedBlockInfo |
java | spring-projects__spring-security | web/src/main/java/org/springframework/security/web/authentication/preauth/AbstractPreAuthenticatedProcessingFilter.java | {
"start": 4989,
"end": 18136
} | class ____ extends GenericFilterBean
implements ApplicationEventPublisherAware {
private SecurityContextHolderStrategy securityContextHolderStrategy = SecurityContextHolder
.getContextHolderStrategy();
private @Nullable ApplicationEventPublisher eventPublisher = null;
private AuthenticationDetailsSource<HttpServletRequest, ?> authenticationDetailsSource = new WebAuthenticationDetailsSource();
@SuppressWarnings("NullAway.Init")
private AuthenticationManager authenticationManager;
private boolean continueFilterChainOnUnsuccessfulAuthentication = true;
private boolean checkForPrincipalChanges;
private boolean invalidateSessionOnPrincipalChange = true;
private @Nullable AuthenticationSuccessHandler authenticationSuccessHandler = null;
private @Nullable AuthenticationFailureHandler authenticationFailureHandler = null;
private RequestMatcher requiresAuthenticationRequestMatcher = new PreAuthenticatedProcessingRequestMatcher();
private SecurityContextRepository securityContextRepository = new HttpSessionSecurityContextRepository();
private boolean mfaEnabled;
/**
* Check whether all required properties have been set.
*/
@Override
public void afterPropertiesSet() {
try {
super.afterPropertiesSet();
}
catch (ServletException ex) {
// convert to RuntimeException for passivity on afterPropertiesSet signature
throw new RuntimeException(ex);
}
Assert.notNull(this.authenticationManager, "An AuthenticationManager must be set");
}
/**
* Try to authenticate a pre-authenticated user with Spring Security if the user has
* not yet been authenticated.
*/
@Override
public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain)
throws IOException, ServletException {
if (this.requiresAuthenticationRequestMatcher.matches((HttpServletRequest) request)) {
if (logger.isDebugEnabled()) {
logger.debug(LogMessage
.of(() -> "Authenticating " + this.securityContextHolderStrategy.getContext().getAuthentication()));
}
doAuthenticate((HttpServletRequest) request, (HttpServletResponse) response);
}
else {
if (logger.isTraceEnabled()) {
logger.trace(LogMessage.format("Did not authenticate since request did not match [%s]",
this.requiresAuthenticationRequestMatcher));
}
}
chain.doFilter(request, response);
}
/**
* Determines if the current principal has changed. The default implementation tries
*
* <ul>
* <li>If the {@link #getPreAuthenticatedPrincipal(HttpServletRequest)} is a String,
* the {@link Authentication#getName()} is compared against the pre authenticated
* principal</li>
* <li>Otherwise, the {@link #getPreAuthenticatedPrincipal(HttpServletRequest)} is
* compared against the {@link Authentication#getPrincipal()}
* </ul>
*
* <p>
* Subclasses can override this method to determine when a principal has changed.
* </p>
* @param request
* @param currentAuthentication
* @return true if the principal has changed, else false
*/
protected boolean principalChanged(HttpServletRequest request, Authentication currentAuthentication) {
Object principal = getPreAuthenticatedPrincipal(request);
if ((principal instanceof String) && currentAuthentication.getName().equals(principal)) {
return false;
}
if (principal != null && principal.equals(currentAuthentication.getPrincipal())) {
return false;
}
this.logger.debug(LogMessage.format("Pre-authenticated principal has changed to %s and will be reauthenticated",
principal));
return true;
}
/**
* Do the actual authentication for a pre-authenticated user.
*/
private void doAuthenticate(HttpServletRequest request, HttpServletResponse response)
throws IOException, ServletException {
Object principal = getPreAuthenticatedPrincipal(request);
if (principal == null) {
this.logger.debug("No pre-authenticated principal found in request");
return;
}
this.logger.debug(LogMessage.format("preAuthenticatedPrincipal = %s, trying to authenticate", principal));
Object credentials = getPreAuthenticatedCredentials(request);
try {
PreAuthenticatedAuthenticationToken authenticationRequest = new PreAuthenticatedAuthenticationToken(
principal, credentials);
authenticationRequest.setDetails(this.authenticationDetailsSource.buildDetails(request));
Authentication authenticationResult = this.authenticationManager.authenticate(authenticationRequest);
Authentication current = this.securityContextHolderStrategy.getContext().getAuthentication();
if (shouldPerformMfa(current, authenticationResult)) {
authenticationResult = authenticationResult.toBuilder()
// @formatter:off
.authorities((a) -> {
Set<String> newAuthorities = a.stream()
.map(GrantedAuthority::getAuthority)
.collect(Collectors.toUnmodifiableSet());
for (GrantedAuthority currentAuthority : current.getAuthorities()) {
if (!newAuthorities.contains(currentAuthority.getAuthority())) {
a.add(currentAuthority);
}
}
})
.build();
// @formatter:on
}
successfulAuthentication(request, response, authenticationResult);
}
catch (AuthenticationException ex) {
unsuccessfulAuthentication(request, response, ex);
if (!this.continueFilterChainOnUnsuccessfulAuthentication) {
throw ex;
}
}
}
@Contract("null, _ -> false")
private boolean shouldPerformMfa(@Nullable Authentication current, Authentication authenticationResult) {
if (!this.mfaEnabled) {
return false;
}
if (current == null || !current.isAuthenticated()) {
return false;
}
if (!declaresToBuilder(authenticationResult)) {
return false;
}
return current.getName().equals(authenticationResult.getName());
}
private static boolean declaresToBuilder(Authentication authentication) {
for (Method method : authentication.getClass().getDeclaredMethods()) {
if (method.getName().equals("toBuilder") && method.getParameterTypes().length == 0) {
return true;
}
}
return false;
}
/**
* Puts the <code>Authentication</code> instance returned by the authentication
* manager into the secure context.
*/
protected void successfulAuthentication(HttpServletRequest request, HttpServletResponse response,
Authentication authResult) throws IOException, ServletException {
this.logger.debug(LogMessage.format("Authentication success: %s", authResult));
SecurityContext context = this.securityContextHolderStrategy.createEmptyContext();
context.setAuthentication(authResult);
this.securityContextHolderStrategy.setContext(context);
this.securityContextRepository.saveContext(context, request, response);
if (this.eventPublisher != null) {
this.eventPublisher.publishEvent(new InteractiveAuthenticationSuccessEvent(authResult, this.getClass()));
}
if (this.authenticationSuccessHandler != null) {
this.authenticationSuccessHandler.onAuthenticationSuccess(request, response, authResult);
}
}
/**
* Ensures the authentication object in the secure context is set to null when
* authentication fails.
* <p>
* Caches the failure exception as a request attribute
*/
protected void unsuccessfulAuthentication(HttpServletRequest request, HttpServletResponse response,
AuthenticationException failed) throws IOException, ServletException {
this.securityContextHolderStrategy.clearContext();
this.logger.debug("Cleared security context due to exception", failed);
request.setAttribute(WebAttributes.AUTHENTICATION_EXCEPTION, failed);
if (this.authenticationFailureHandler != null) {
this.authenticationFailureHandler.onAuthenticationFailure(request, response, failed);
}
}
/**
* @param anApplicationEventPublisher The ApplicationEventPublisher to use
*/
@Override
public void setApplicationEventPublisher(ApplicationEventPublisher anApplicationEventPublisher) {
this.eventPublisher = anApplicationEventPublisher;
}
/**
* Enables Multi-Factor Authentication (MFA) support.
* @param mfaEnabled true to enable MFA support, false to disable it. Default is
* false.
*/
public void setMfaEnabled(boolean mfaEnabled) {
this.mfaEnabled = mfaEnabled;
}
/**
* Sets the {@link SecurityContextRepository} to save the {@link SecurityContext} on
* authentication success. The default action is to save the {@link SecurityContext}
* in {@link HttpSession} using {@link HttpSessionSecurityContextRepository}.
* @param securityContextRepository the {@link SecurityContextRepository} to use.
* Cannot be null.
*/
public void setSecurityContextRepository(SecurityContextRepository securityContextRepository) {
Assert.notNull(securityContextRepository, "securityContextRepository cannot be null");
this.securityContextRepository = securityContextRepository;
}
/**
* @param authenticationDetailsSource The AuthenticationDetailsSource to use
*/
public void setAuthenticationDetailsSource(
AuthenticationDetailsSource<HttpServletRequest, ?> authenticationDetailsSource) {
Assert.notNull(authenticationDetailsSource, "AuthenticationDetailsSource required");
this.authenticationDetailsSource = authenticationDetailsSource;
}
protected AuthenticationDetailsSource<HttpServletRequest, ?> getAuthenticationDetailsSource() {
return this.authenticationDetailsSource;
}
/**
* @param authenticationManager The AuthenticationManager to use
*/
public void setAuthenticationManager(AuthenticationManager authenticationManager) {
this.authenticationManager = authenticationManager;
}
/**
* If set to {@code true} (the default), any {@code AuthenticationException} raised by
* the {@code AuthenticationManager} will be swallowed, and the request will be
* allowed to proceed, potentially using alternative authentication mechanisms. If
* {@code false}, authentication failure will result in an immediate exception.
* @param shouldContinue set to {@code true} to allow the request to proceed after a
* failed authentication.
*/
public void setContinueFilterChainOnUnsuccessfulAuthentication(boolean shouldContinue) {
this.continueFilterChainOnUnsuccessfulAuthentication = shouldContinue;
}
/**
* If set, the pre-authenticated principal will be checked on each request and
* compared against the name of the current <tt>Authentication</tt> object. A check to
* determine if {@link Authentication#getPrincipal()} is equal to the principal will
* also be performed. If a change is detected, the user will be reauthenticated.
* @param checkForPrincipalChanges
*/
public void setCheckForPrincipalChanges(boolean checkForPrincipalChanges) {
this.checkForPrincipalChanges = checkForPrincipalChanges;
}
/**
* If <tt>checkForPrincipalChanges</tt> is set, and a change of principal is detected,
* determines whether any existing session should be invalidated before proceeding to
* authenticate the new principal.
* @param invalidateSessionOnPrincipalChange <tt>false</tt> to retain the existing
* session. Defaults to <tt>true</tt>.
*/
public void setInvalidateSessionOnPrincipalChange(boolean invalidateSessionOnPrincipalChange) {
this.invalidateSessionOnPrincipalChange = invalidateSessionOnPrincipalChange;
}
/**
* Sets the strategy used to handle a successful authentication.
*/
public void setAuthenticationSuccessHandler(AuthenticationSuccessHandler authenticationSuccessHandler) {
this.authenticationSuccessHandler = authenticationSuccessHandler;
}
/**
* Sets the strategy used to handle a failed authentication.
*/
public void setAuthenticationFailureHandler(AuthenticationFailureHandler authenticationFailureHandler) {
this.authenticationFailureHandler = authenticationFailureHandler;
}
/**
* Sets the request matcher to check whether to proceed the request further.
*/
public void setRequiresAuthenticationRequestMatcher(RequestMatcher requiresAuthenticationRequestMatcher) {
Assert.notNull(requiresAuthenticationRequestMatcher, "requestMatcher cannot be null");
this.requiresAuthenticationRequestMatcher = requiresAuthenticationRequestMatcher;
}
/**
* Sets the {@link SecurityContextHolderStrategy} to use. The default action is to use
* the {@link SecurityContextHolderStrategy} stored in {@link SecurityContextHolder}.
*
* @since 5.8
*/
public void setSecurityContextHolderStrategy(SecurityContextHolderStrategy securityContextHolderStrategy) {
Assert.notNull(securityContextHolderStrategy, "securityContextHolderStrategy cannot be null");
this.securityContextHolderStrategy = securityContextHolderStrategy;
}
/**
* Override to extract the principal information from the current request
*/
protected abstract @Nullable Object getPreAuthenticatedPrincipal(HttpServletRequest request);
/**
* Override to extract the credentials (if applicable) from the current request.
* Should not return null for a valid principal, though some implementations may
* return a dummy value.
*/
protected abstract @Nullable Object getPreAuthenticatedCredentials(HttpServletRequest request);
/**
* Request matcher for default auth check logic
*/
private | AbstractPreAuthenticatedProcessingFilter |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/scripting/support/ScriptFactoryPostProcessor.java | {
"start": 23224,
"end": 25651
} | class ____ constructor arguments).
* @param bd the full script bean definition
* @param scriptFactoryBeanName the name of the internal ScriptFactory bean
* @param scriptSource the ScriptSource for the scripted bean
* @param interfaces the interfaces that the scripted bean is supposed to implement
* @return the extracted ScriptFactory bean definition
* @see org.springframework.scripting.ScriptFactory#getScriptedObject
*/
protected BeanDefinition createScriptedObjectBeanDefinition(BeanDefinition bd, String scriptFactoryBeanName,
ScriptSource scriptSource, Class<?> @Nullable [] interfaces) {
GenericBeanDefinition objectBd = new GenericBeanDefinition(bd);
objectBd.setFactoryBeanName(scriptFactoryBeanName);
objectBd.setFactoryMethodName("getScriptedObject");
objectBd.getConstructorArgumentValues().clear();
objectBd.getConstructorArgumentValues().addIndexedArgumentValue(0, scriptSource);
objectBd.getConstructorArgumentValues().addIndexedArgumentValue(1, interfaces);
return objectBd;
}
/**
* Create a refreshable proxy for the given AOP TargetSource.
* @param ts the refreshable TargetSource
* @param interfaces the proxy interfaces (may be {@code null} to
* indicate proxying of all interfaces implemented by the target class)
* @return the generated proxy
* @see RefreshableScriptTargetSource
*/
protected Object createRefreshableProxy(TargetSource ts, Class<?> @Nullable [] interfaces, boolean proxyTargetClass) {
ProxyFactory proxyFactory = new ProxyFactory();
proxyFactory.setTargetSource(ts);
ClassLoader classLoader = this.beanClassLoader;
if (interfaces != null) {
proxyFactory.setInterfaces(interfaces);
}
else {
Class<?> targetClass = ts.getTargetClass();
if (targetClass != null) {
proxyFactory.setInterfaces(ClassUtils.getAllInterfacesForClass(targetClass, this.beanClassLoader));
}
}
if (proxyTargetClass) {
classLoader = null; // force use of Class.getClassLoader()
proxyFactory.setProxyTargetClass(true);
}
DelegatingIntroductionInterceptor introduction = new DelegatingIntroductionInterceptor(ts);
introduction.suppressInterface(TargetSource.class);
proxyFactory.addAdvice(introduction);
return proxyFactory.getProxy(classLoader);
}
/**
* Destroy the inner bean factory (used for scripts) on shutdown.
*/
@Override
public void destroy() {
this.scriptBeanFactory.destroySingletons();
}
}
| and |
java | spring-projects__spring-security | acl/src/main/java/org/springframework/security/acls/domain/AccessControlEntryImpl.java | {
"start": 1167,
"end": 5144
} | class ____ implements AccessControlEntry, AuditableAccessControlEntry {
private final Acl acl;
private Permission permission;
private final Serializable id;
private final Sid sid;
private boolean auditFailure = false;
private boolean auditSuccess = false;
private final boolean granting;
public AccessControlEntryImpl(Serializable id, Acl acl, Sid sid, Permission permission, boolean granting,
boolean auditSuccess, boolean auditFailure) {
Assert.notNull(acl, "Acl required");
Assert.notNull(sid, "Sid required");
Assert.notNull(permission, "Permission required");
this.id = id;
this.acl = acl; // can be null
this.sid = sid;
this.permission = permission;
this.granting = granting;
this.auditSuccess = auditSuccess;
this.auditFailure = auditFailure;
}
@Override
public boolean equals(Object arg0) {
if (!(arg0 instanceof AccessControlEntryImpl)) {
return false;
}
AccessControlEntryImpl other = (AccessControlEntryImpl) arg0;
if (this.acl == null) {
if (other.getAcl() != null) {
return false;
}
// Both this.acl and rhs.acl are null and thus equal
}
else {
// this.acl is non-null
if (other.getAcl() == null) {
return false;
}
// Both this.acl and rhs.acl are non-null, so do a comparison
if (this.acl.getObjectIdentity() == null) {
if (other.acl.getObjectIdentity() != null) {
return false;
}
// Both this.acl and rhs.acl are null and thus equal
}
else {
// Both this.acl.objectIdentity and rhs.acl.objectIdentity are non-null
if (!this.acl.getObjectIdentity().equals(other.getAcl().getObjectIdentity())) {
return false;
}
}
}
if (this.id == null) {
if (other.id != null) {
return false;
}
// Both this.id and rhs.id are null and thus equal
}
else {
// this.id is non-null
if (other.id == null) {
return false;
}
// Both this.id and rhs.id are non-null
if (!this.id.equals(other.id)) {
return false;
}
}
if ((this.auditFailure != other.isAuditFailure()) || (this.auditSuccess != other.isAuditSuccess())
|| (this.granting != other.isGranting()) || !this.permission.equals(other.getPermission())
|| !this.sid.equals(other.getSid())) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = this.permission.hashCode();
result = 31 * result + ((this.id != null) ? this.id.hashCode() : 0);
result = 31 * result + (this.sid.hashCode());
result = 31 * result + (this.auditFailure ? 1 : 0);
result = 31 * result + (this.auditSuccess ? 1 : 0);
result = 31 * result + (this.granting ? 1 : 0);
return result;
}
@Override
public Acl getAcl() {
return this.acl;
}
@Override
public Serializable getId() {
return this.id;
}
@Override
public Permission getPermission() {
return this.permission;
}
@Override
public Sid getSid() {
return this.sid;
}
@Override
public boolean isAuditFailure() {
return this.auditFailure;
}
@Override
public boolean isAuditSuccess() {
return this.auditSuccess;
}
@Override
public boolean isGranting() {
return this.granting;
}
void setAuditFailure(boolean auditFailure) {
this.auditFailure = auditFailure;
}
void setAuditSuccess(boolean auditSuccess) {
this.auditSuccess = auditSuccess;
}
void setPermission(Permission permission) {
Assert.notNull(permission, "Permission required");
this.permission = permission;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("AccessControlEntryImpl[");
sb.append("id: ").append(this.id).append("; ");
sb.append("granting: ").append(this.granting).append("; ");
sb.append("sid: ").append(this.sid).append("; ");
sb.append("permission: ").append(this.permission).append("; ");
sb.append("auditSuccess: ").append(this.auditSuccess).append("; ");
sb.append("auditFailure: ").append(this.auditFailure);
sb.append("]");
return sb.toString();
}
}
| AccessControlEntryImpl |
java | google__truth | core/src/test/java/com/google/common/truth/StackTraceCleanerTest.java | {
"start": 10822,
"end": 16377
} | class ____ called directly without any subject's subclass or {@link
* StandardSubjectBuilder} in the call stack should not happen in practical, testing anyway to
* make sure even if it does, the behavior should match expectation.
*/
@Test
public void truthFrameWithOutSubject_shouldNotCleaned() {
Throwable throwable =
createThrowableWithStackTrace(
"com.google.random.Package",
// two or more truth frame will trigger string matching mechanism to got it collapsed
"com.google.common.truth.FailureMetadata",
"com.google.example.SomeClass");
cleanStackTrace(throwable);
assertThat(throwable.getStackTrace())
.isEqualTo(
new StackTraceElement[] {
createStackTraceElement("com.google.random.Package"),
createStackTraceElement("com.google.common.truth.FailureMetadata"),
createStackTraceElement("com.google.example.SomeClass"),
});
}
@Test
public void causingThrowablesAreAlsoCleaned() {
Throwable cause2 = createThrowableWithStackTrace("com.example.Foo", "org.junit.FilterMe");
Throwable cause1 =
createThrowableWithStackTrace(cause2, "com.example.Bar", "org.junit.FilterMe");
Throwable rootThrowable =
createThrowableWithStackTrace(cause1, "com.example.Car", "org.junit.FilterMe");
cleanStackTrace(rootThrowable);
assertThat(rootThrowable.getStackTrace()).isEqualTo(createStackTrace("com.example.Car"));
assertThat(cause1.getStackTrace()).isEqualTo(createStackTrace("com.example.Bar"));
assertThat(cause2.getStackTrace()).isEqualTo(createStackTrace("com.example.Foo"));
}
@Test
public void suppressedThrowablesAreAlsoCleaned() {
Throwable throwable = createThrowableWithStackTrace("com.example.Foo", "org.junit.FilterMe");
Throwable suppressed1 = createThrowableWithStackTrace("com.example.Bar", "org.junit.FilterMe");
Throwable suppressed2 = createThrowableWithStackTrace("com.example.Car", "org.junit.FilterMe");
throwable.addSuppressed(suppressed1);
throwable.addSuppressed(suppressed2);
cleanStackTrace(throwable);
assertThat(throwable.getStackTrace()).isEqualTo(createStackTrace("com.example.Foo"));
assertThat(suppressed1.getStackTrace()).isEqualTo(createStackTrace("com.example.Bar"));
assertThat(suppressed2.getStackTrace()).isEqualTo(createStackTrace("com.example.Car"));
}
@Test
public void mixedCausingAndSuppressThrowablesAreCleaned() {
Throwable suppressed1 = createThrowableWithStackTrace("com.example.Foo", "org.junit.FilterMe");
Throwable cause2 = createThrowableWithStackTrace("com.example.Bar", "org.junit.FilterMe");
Throwable cause1 =
createThrowableWithStackTrace(cause2, "com.example.Car", "org.junit.FilterMe");
Throwable suppressed2 =
createThrowableWithStackTrace(cause1, "com.example.Dar", "org.junit.FilterMe");
Throwable throwable = createThrowableWithStackTrace("com.example.Far", "org.junit.FilterMe");
throwable.addSuppressed(suppressed1);
throwable.addSuppressed(suppressed2);
cleanStackTrace(throwable);
assertThat(throwable.getStackTrace()).isEqualTo(createStackTrace("com.example.Far"));
assertThat(suppressed1.getStackTrace()).isEqualTo(createStackTrace("com.example.Foo"));
assertThat(suppressed2.getStackTrace()).isEqualTo(createStackTrace("com.example.Dar"));
assertThat(cause1.getStackTrace()).isEqualTo(createStackTrace("com.example.Car"));
assertThat(cause2.getStackTrace()).isEqualTo(createStackTrace("com.example.Bar"));
}
@Test
public void cleaningTraceIsIdempotent() {
Throwable throwable = createThrowableWithStackTrace("com.example.Foo", "org.junit.FilterMe");
cleanStackTrace(throwable);
cleanStackTrace(throwable);
assertThat(throwable.getStackTrace()).isEqualTo(createStackTrace("com.example.Foo"));
}
@Test
public void cyclesAreHandled() {
SelfReferencingThrowable selfReferencingThrowable =
new SelfReferencingThrowable("com.example.Foo", "org.junit.FilterMe");
cleanStackTrace(selfReferencingThrowable);
assertThat(selfReferencingThrowable.getStackTrace())
.isEqualTo(createStackTrace("com.example.Foo"));
}
private static Throwable createThrowableWithStackTrace(String... classNames) {
return createThrowableWithStackTrace(/* cause= */ null, classNames);
}
private static Throwable createThrowableWithStackTrace(
@Nullable Throwable cause, String... classNames) {
Throwable throwable = new RuntimeException(cause);
StackTraceElement[] stackTrace = createStackTrace(classNames);
throwable.setStackTrace(stackTrace);
return throwable;
}
private static StackTraceElement[] createStackTrace(String... classNames) {
StackTraceElement[] stackTrace = new StackTraceElement[classNames.length];
for (int i = 0; i < classNames.length; i++) {
stackTrace[i] = createStackTraceElement(classNames[i]);
}
return stackTrace;
}
private static StackTraceElement createStackTraceElement(String className) {
return new StackTraceElement(className, "", "", -1);
}
private static StackTraceElement createCollapsedStackTraceElement(
String frameworkName, int collapsed) {
return new StackTraceElement(
"[["
+ frameworkName
+ ": "
+ collapsed
+ " frames collapsed ("
+ StackTraceCleaner.CLEANER_LINK
+ ")]]",
"",
"",
0);
}
private static | is |
java | spring-projects__spring-boot | module/spring-boot-resttestclient/src/test/java/org/springframework/boot/resttestclient/TestRestTemplateTests.java | {
"start": 21090,
"end": 21224
} | interface ____ {
void doWithTestRestTemplate(TestRestTemplate testRestTemplate, URI relativeUri);
}
static | TestRestTemplateCallback |
java | quarkusio__quarkus | extensions/grpc/deployment/src/main/java/io/quarkus/grpc/deployment/GrpcClientProcessor.java | {
"start": 6653,
"end": 8312
} | interface ____ the generated Mutiny client: " + generatedClient);
}
generatedClients.put(serviceInterface, generatedClient.name());
}
for (InjectionPointInfo injectionPoint : beanDiscovery.getInjectionPoints()) {
AnnotationInstance clientAnnotation = injectionPoint.getRequiredQualifier(GrpcDotNames.GRPC_CLIENT);
if (clientAnnotation == null) {
continue;
}
Set<String> registeredInterceptors = getRegisteredInterceptors(injectionPoint);
String clientName;
AnnotationValue clientNameValue = clientAnnotation.value();
if (clientNameValue == null || clientNameValue.asString().equals(GrpcClient.ELEMENT_NAME)) {
// Determine the service name from the annotated element
if (clientAnnotation.target().kind() == Kind.FIELD) {
clientName = clientAnnotation.target().asField().name();
} else if (clientAnnotation.target().kind() == Kind.METHOD_PARAMETER) {
MethodParameterInfo param = clientAnnotation.target().asMethodParameter();
clientName = param.method().parameterName(param.position());
if (clientName == null) {
throw new DeploymentException("Unable to determine the client name from the parameter at position "
+ param.position()
+ " in method "
+ param.method().declaringClass().name() + "#" + param.method().name()
+ "() - compile the | for |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/streaming/api/graph/util/OperatorChainInfo.java | {
"start": 1553,
"end": 7418
} | class ____ {
private final Integer startNodeId;
private final Map<Integer, List<ChainedOperatorHashInfo>> chainedOperatorHashes;
private final Map<Integer, ChainedSourceInfo> chainedSources;
private final Map<Integer, ResourceSpec> chainedMinResources;
private final Map<Integer, ResourceSpec> chainedPreferredResources;
private final Map<Integer, String> chainedNames;
/** The {@link OperatorInfo}s, key is the id of the stream node. */
private final Map<Integer, OperatorInfo> chainedOperatorInfos;
private final List<OperatorCoordinator.Provider> coordinatorProviders;
private final List<StreamNode> chainedNodes;
private final List<StreamEdge> transitiveOutEdges;
private final List<StreamEdge> transitiveInEdges;
private InputOutputFormatContainer inputOutputFormatContainer = null;
public OperatorChainInfo(int startNodeId) {
this.startNodeId = startNodeId;
this.chainedOperatorHashes = new HashMap<>();
this.coordinatorProviders = new ArrayList<>();
this.chainedSources = new HashMap<>();
this.chainedMinResources = new HashMap<>();
this.chainedPreferredResources = new HashMap<>();
this.chainedNames = new HashMap<>();
this.chainedNodes = new ArrayList<>();
this.transitiveOutEdges = new ArrayList<>();
this.transitiveInEdges = new ArrayList<>();
this.chainedOperatorInfos = new HashMap<>();
}
public Integer getStartNodeId() {
return startNodeId;
}
public List<ChainedOperatorHashInfo> getChainedOperatorHashes(int startNodeId) {
return chainedOperatorHashes.get(startNodeId);
}
public void addCoordinatorProvider(OperatorCoordinator.Provider coordinator) {
coordinatorProviders.add(coordinator);
}
public List<OperatorCoordinator.Provider> getCoordinatorProviders() {
return coordinatorProviders;
}
public Map<Integer, ChainedSourceInfo> getChainedSources() {
return chainedSources;
}
public OperatorID addNodeToChain(
int currentNodeId, String operatorName, JobVertexBuildContext jobVertexBuildContext) {
StreamGraph streamGraph = jobVertexBuildContext.getStreamGraph();
StreamNode streamNode = streamGraph.getStreamNode(currentNodeId);
recordChainedNode(streamNode);
List<ChainedOperatorHashInfo> operatorHashes =
chainedOperatorHashes.computeIfAbsent(startNodeId, k -> new ArrayList<>());
byte[] primaryHashBytes = jobVertexBuildContext.getHash(currentNodeId);
for (byte[] legacyHash : jobVertexBuildContext.getLegacyHashes(currentNodeId)) {
operatorHashes.add(
new ChainedOperatorHashInfo(primaryHashBytes, legacyHash, streamNode));
}
streamNode
.getCoordinatorProvider(operatorName, new OperatorID(primaryHashBytes))
.map(coordinatorProviders::add);
return new OperatorID(primaryHashBytes);
}
public void setTransitiveOutEdges(final List<StreamEdge> transitiveOutEdges) {
this.transitiveOutEdges.addAll(transitiveOutEdges);
}
public List<StreamEdge> getTransitiveOutEdges() {
return transitiveOutEdges;
}
public void recordChainedNode(StreamNode streamNode) {
chainedNodes.add(streamNode);
}
public OperatorChainInfo newChain(Integer startNodeId) {
return new OperatorChainInfo(startNodeId);
}
public List<StreamNode> getAllChainedNodes() {
return chainedNodes;
}
public boolean hasFormatContainer() {
return inputOutputFormatContainer != null;
}
public InputOutputFormatContainer getOrCreateFormatContainer() {
if (inputOutputFormatContainer == null) {
inputOutputFormatContainer =
new InputOutputFormatContainer(Thread.currentThread().getContextClassLoader());
}
return inputOutputFormatContainer;
}
public void addChainedSource(StreamNode sourceNode, ChainedSourceInfo chainedSourceInfo) {
recordChainedNode(sourceNode);
chainedSources.put(sourceNode.getId(), chainedSourceInfo);
}
public void addChainedMinResources(Integer sourceNodeId, ResourceSpec resourceSpec) {
chainedMinResources.put(sourceNodeId, resourceSpec);
}
public ResourceSpec getChainedMinResources(Integer sourceNodeId) {
return chainedMinResources.get(sourceNodeId);
}
public void addChainedPreferredResources(Integer sourceNodeId, ResourceSpec resourceSpec) {
chainedPreferredResources.put(sourceNodeId, resourceSpec);
}
public ResourceSpec getChainedPreferredResources(Integer sourceNodeId) {
return chainedPreferredResources.get(sourceNodeId);
}
public String getChainedName(Integer streamNodeId) {
return chainedNames.get(streamNodeId);
}
public Map<Integer, String> getChainedNames() {
return chainedNames;
}
public void addChainedName(Integer streamNodeId, String chainedName) {
this.chainedNames.put(streamNodeId, chainedName);
}
public void addTransitiveInEdge(StreamEdge streamEdge) {
transitiveInEdges.add(streamEdge);
}
public List<StreamEdge> getTransitiveInEdges() {
return transitiveInEdges;
}
public OperatorInfo getOperatorInfo(Integer nodeId) {
return chainedOperatorInfos.get(nodeId);
}
public OperatorInfo createAndGetOperatorInfo(Integer nodeId, OperatorID operatorId) {
OperatorInfo operatorInfo = new OperatorInfo(operatorId);
chainedOperatorInfos.put(nodeId, operatorInfo);
return operatorInfo;
}
public Map<Integer, OperatorInfo> getOperatorInfos() {
return Collections.unmodifiableMap(chainedOperatorInfos);
}
}
| OperatorChainInfo |
java | spring-projects__spring-framework | spring-jdbc/src/test/java/org/springframework/jdbc/datasource/lookup/JndiDataSourceLookupTests.java | {
"start": 967,
"end": 2184
} | class ____ {
private static final String DATA_SOURCE_NAME = "Love is like a stove, burns you when it's hot";
@Test
void testSunnyDay() {
final DataSource expectedDataSource = new StubDataSource();
JndiDataSourceLookup lookup = new JndiDataSourceLookup() {
@Override
protected <T> T lookup(String jndiName, Class<T> requiredType) {
assertThat(jndiName).isEqualTo(DATA_SOURCE_NAME);
return requiredType.cast(expectedDataSource);
}
};
DataSource dataSource = lookup.getDataSource(DATA_SOURCE_NAME);
assertThat(dataSource).as("A DataSourceLookup implementation must *never* return null from getDataSource(): this one obviously (and incorrectly) is").isNotNull();
assertThat(dataSource).isSameAs(expectedDataSource);
}
@Test
void testNoDataSourceAtJndiLocation() {
JndiDataSourceLookup lookup = new JndiDataSourceLookup() {
@Override
protected <T> T lookup(String jndiName, Class<T> requiredType) throws NamingException {
assertThat(jndiName).isEqualTo(DATA_SOURCE_NAME);
throw new NamingException();
}
};
assertThatExceptionOfType(DataSourceLookupFailureException.class).isThrownBy(() ->
lookup.getDataSource(DATA_SOURCE_NAME));
}
}
| JndiDataSourceLookupTests |
java | apache__maven | compat/maven-compat/src/main/java/org/apache/maven/execution/DefaultRuntimeInformation.java | {
"start": 1330,
"end": 2024
} | class ____ implements RuntimeInformation, Initializable {
@Inject
private org.apache.maven.rtinfo.RuntimeInformation rtInfo;
private ArtifactVersion applicationVersion;
@Override
public ArtifactVersion getApplicationVersion() {
return applicationVersion;
}
@Override
public void initialize() throws InitializationException {
String mavenVersion = rtInfo.getMavenVersion();
if (mavenVersion == null || mavenVersion.isEmpty()) {
throw new InitializationException("Unable to read Maven version from maven-core");
}
applicationVersion = new DefaultArtifactVersion(mavenVersion);
}
}
| DefaultRuntimeInformation |
java | hibernate__hibernate-orm | hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/ids/protectedmodifier/ProtectedConstructorTest.java | {
"start": 840,
"end": 1564
} | class ____ {
private final ProtectedConstructorEntity testEntity = new ProtectedConstructorEntity(
new WrappedStringId( "embeddedStringId" ),
"string"
);
@BeforeClassTemplate
public void initData(EntityManagerFactoryScope scope) {
// Revision 1
scope.inTransaction( em -> {
em.persist( testEntity );
} );
}
@Test
public void testAuditEntityInstantiation(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
final var auditReader = AuditReaderFactory.get( em );
List result = auditReader.createQuery()
.forEntitiesAtRevision( ProtectedConstructorEntity.class, 1 )
.getResultList();
assertEquals( Arrays.asList( testEntity ), result );
} );
}
}
| ProtectedConstructorTest |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/instance/RawTypeInstanceTest.java | {
"start": 1083,
"end": 1189
} | class ____ {
@SuppressWarnings("rawtypes")
@Inject
Instance instance;
}
}
| Alpha |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMutableMetrics.java | {
"start": 1992,
"end": 2033
} | interface ____ mutable metrics
*/
public | and |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/model/ModelRouteFilterPatternExcludeTest.java | {
"start": 1069,
"end": 2201
} | class ____ extends ContextTestSupport {
@Override
protected CamelContext createCamelContext() throws Exception {
CamelContext context = super.createCamelContext();
// filter to exclude bar
context.getCamelContextExtension().getContextPlugin(Model.class).setRouteFilterPattern(null, "bar*");
return context;
}
@Test
public void testRouteFilter() throws Exception {
assertEquals(1, context.getRoutes().size());
assertEquals(1, context.getRouteDefinitions().size());
assertEquals("foo", context.getRouteDefinitions().get(0).getId());
getMockEndpoint("mock:foo").expectedMessageCount(1);
template.sendBody("direct:foo", "Hello World");
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:foo").routeId("foo").to("mock:foo");
from("direct:bar").routeId("bar").to("mock:bar");
}
};
}
}
| ModelRouteFilterPatternExcludeTest |
java | elastic__elasticsearch | build-conventions/src/main/java/org/elasticsearch/gradle/internal/checkstyle/StringFormattingCheck.java | {
"start": 1365,
"end": 3388
} | class ____ extends AbstractCheck {
public static final String FORMATTED_MSG_KEY = "forbidden.formatted";
@Override
public int[] getDefaultTokens() {
return getRequiredTokens();
}
@Override
public int[] getAcceptableTokens() {
return getRequiredTokens();
}
@Override
public int[] getRequiredTokens() {
return new int[] { TokenTypes.METHOD_CALL };
}
@Override
public void visitToken(DetailAST ast) {
checkFormattedMethod(ast);
}
// Originally pinched from java/util/Formatter.java but then modified.
// %[argument_index$][flags][width][.precision][t]conversion
private static final Pattern formatSpecifier = Pattern.compile("%(?:\\d+\\$)?(?:[-#+ 0,\\(<]*)?(?:\\d+)?(?:\\.\\d+)?([tT]?[a-zA-Z%])");
private void checkFormattedMethod(DetailAST ast) {
final DetailAST dotAst = ast.findFirstToken(TokenTypes.DOT);
if (dotAst == null) {
return;
}
final String methodName = dotAst.findFirstToken(TokenTypes.IDENT).getText();
if (methodName.equals("formatted") == false) {
return;
}
final DetailAST subjectAst = dotAst.getFirstChild();
String stringContent = null;
if (subjectAst.getType() == TokenTypes.TEXT_BLOCK_LITERAL_BEGIN) {
stringContent = subjectAst.findFirstToken(TokenTypes.TEXT_BLOCK_CONTENT).getText();
} else if (subjectAst.getType() == TokenTypes.STRING_LITERAL) {
stringContent = subjectAst.getText();
}
if (stringContent != null) {
final Matcher m = formatSpecifier.matcher(stringContent);
while (m.find()) {
char specifier = m.group(1).toLowerCase(Locale.ROOT).charAt(0);
if (specifier == 'd' || specifier == 'e' || specifier == 'f' || specifier == 'g' || specifier == 't') {
log(ast, FORMATTED_MSG_KEY, m.group());
}
}
}
}
}
| StringFormattingCheck |
java | apache__hadoop | hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/EagerKeyGeneratorKeyProviderCryptoExtension.java | {
"start": 5382,
"end": 7143
} | class ____ a proxy for a <code>KeyProviderCryptoExtension</code> that
* decorates the underlying <code>CryptoExtension</code> with one that eagerly
* caches pre-generated Encrypted Keys using a <code>ValueQueue</code>
*
* @param conf Configuration object to load parameters from
* @param keyProviderCryptoExtension <code>KeyProviderCryptoExtension</code>
* to delegate calls to.
*/
public EagerKeyGeneratorKeyProviderCryptoExtension(Configuration conf,
KeyProviderCryptoExtension keyProviderCryptoExtension) {
super(keyProviderCryptoExtension,
new CryptoExtension(conf, keyProviderCryptoExtension));
}
/**
* Roll a new version of the given key generating the material for it.
* <p>
* Due to the caching on the ValueQueue, even after a rollNewVersion call,
* {@link #generateEncryptedKey(String)} may still return an old key - even
* when we drain the queue here, the async thread may later fill in old keys.
* This is acceptable since old version keys are still able to decrypt, and
* client shall make no assumptions that it will get a new versioned key
* after rollNewVersion.
*/
@Override
public KeyVersion rollNewVersion(String name)
throws NoSuchAlgorithmException, IOException {
KeyVersion keyVersion = super.rollNewVersion(name);
getExtension().drain(name);
return keyVersion;
}
@Override
public KeyVersion rollNewVersion(String name, byte[] material)
throws IOException {
KeyVersion keyVersion = super.rollNewVersion(name, material);
getExtension().drain(name);
return keyVersion;
}
@Override
public void invalidateCache(String name) throws IOException {
super.invalidateCache(name);
getExtension().drain(name);
}
}
| is |
java | quarkusio__quarkus | extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/graal/StrimziSubstitutions.java | {
"start": 610,
"end": 1113
} | class ____ implements BooleanSupplier {
@Override
public boolean getAsBoolean() {
try {
KafkaSubstitutions.class.getClassLoader()
.loadClass("io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler");
return true;
} catch (Exception e) {
return false;
}
}
}
@TargetClass(className = "com.jayway.jsonpath.internal.filter.ValueNodes", innerClass = "JsonNode", onlyWith = HasStrimzi.class)
final | HasStrimzi |
java | apache__maven | its/core-it-suite/src/test/resources/mng-3710/pom-inheritance/maven-mng3710-pomInheritance-plugin/src/main/java/jar/MyMojo.java | {
"start": 1724,
"end": 2569
} | class ____ extends AbstractMojo {
/**
* Location of the file.
* @parameter expression="${project.build.directory}/touch.txt"
* @required
*/
private File touchFile;
public void execute() throws MojoExecutionException {
File dir = touchFile.getParentFile();
if (dir != null && !dir.exists()) {
dir.mkdirs();
}
FileWriter w = null;
try {
w = new FileWriter(touchFile);
w.write("touch.txt");
} catch (IOException e) {
throw new MojoExecutionException("Error creating file " + touchFile, e);
} finally {
if (w != null) {
try {
w.close();
} catch (IOException e) {
// ignore
}
}
}
}
}
| MyMojo |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/web/servlet/samples/standalone/ExceptionHandlerTests.java | {
"start": 1954,
"end": 2523
} | class ____ {
@Test
void localExceptionHandlerMethod() throws Exception {
standaloneSetup(new PersonController()).build()
.perform(get("/person/Clyde"))
.andExpect(status().isOk())
.andExpect(forwardedUrl("errorView"));
}
@Test
void globalExceptionHandlerMethod() throws Exception {
standaloneSetup(new PersonController()).setControllerAdvice(new GlobalExceptionHandler()).build()
.perform(get("/person/Bonnie"))
.andExpect(status().isOk())
.andExpect(forwardedUrl("globalErrorView"));
}
}
@Controller
private static | MvcTests |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/dialect/functional/OracleFollowOnLockingTest.java | {
"start": 21639,
"end": 21742
} | class ____ extends Vehicle {
private double speed;
}
@Entity(name = "Truck")
public static | SportsCar |
java | apache__rocketmq | remoting/src/main/java/org/apache/rocketmq/remoting/protocol/header/namesrv/PutKVConfigRequestHeader.java | {
"start": 1390,
"end": 2086
} | class ____ implements CommandCustomHeader {
@CFNotNull
private String namespace;
@CFNotNull
private String key;
@CFNotNull
private String value;
@Override
public void checkFields() throws RemotingCommandException {
}
public String getNamespace() {
return namespace;
}
public void setNamespace(String namespace) {
this.namespace = namespace;
}
public String getKey() {
return key;
}
public void setKey(String key) {
this.key = key;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
}
| PutKVConfigRequestHeader |
java | google__dagger | javatests/dagger/internal/codegen/MissingBindingValidationTest.java | {
"start": 72689,
"end": 72863
} | class ____ @Inject constructor(val bar: Bar<Baz, Baz, Set<Baz>>) {}");
Source barSrc =
CompilerTests.kotlinSource(
"test.Bar.kt", "package test", "", " | Foo |
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/common/operators/GenericDataSourceBase.java | {
"start": 5015,
"end": 5076
} | class ____ the input format.
*
* @return The | describing |
java | hibernate__hibernate-orm | hibernate-envers/src/test/java/org/hibernate/testing/envers/junit/EnversTest.java | {
"start": 1748,
"end": 1901
} | interface ____ {
Class<? extends AuditStrategy>[] auditStrategies() default {
DefaultAuditStrategy.class,
ValidityAuditStrategy.class
};
}
| EnversTest |
java | apache__spark | common/network-shuffle/src/test/java/org/apache/spark/network/shuffle/ExternalShuffleSecuritySuite.java | {
"start": 4257,
"end": 4798
} | class ____ implements SecretKeyHolder {
private final String appId;
private final String secretKey;
TestSecretKeyHolder(String appId, String secretKey) {
this.appId = appId;
this.secretKey = secretKey;
}
@Override
public String getSaslUser(String appId) {
return "user";
}
@Override
public String getSecretKey(String appId) {
if (!appId.equals(this.appId)) {
throw new IllegalArgumentException("Wrong appId!");
}
return secretKey;
}
}
}
| TestSecretKeyHolder |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/junit4/rules/FailingBeforeAndAfterMethodsSpringRuleTests.java | {
"start": 5059,
"end": 5442
} | class ____ {
@ClassRule
public static final SpringClassRule springClassRule = new SpringClassRule();
@Rule
public final SpringMethodRule springMethodRule = new SpringMethodRule();
@Test
public void testNothing() {
}
@AfterTransaction
public void afterTransaction() {
fail("always failing afterTransaction()");
}
}
}
| FailingAfterTransactionSpringRuleTestCase |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/events/AutoFlushEventListenerTest.java | {
"start": 3659,
"end": 3786
} | class ____ {
@Id
@GeneratedValue
private Integer id;
public Entity1() {
}
}
@Entity(name = "Entity2")
static | Entity1 |
java | elastic__elasticsearch | x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStatusAction.java | {
"start": 930,
"end": 1232
} | class ____ extends ActionType<GetStatusAction.Response> {
public static final GetStatusAction INSTANCE = new GetStatusAction();
public static final String NAME = "cluster:monitor/profiling/status/get";
protected GetStatusAction() {
super(NAME);
}
public static | GetStatusAction |
java | junit-team__junit5 | platform-tooling-support-tests/projects/jupiter-starter/src/test/java/com/example/project/CalculatorParameterizedClassTests.java | {
"start": 730,
"end": 1033
} | class ____ {
@Parameter
int i;
@ParameterizedTest
@ValueSource(ints = { 1, 2 })
void parameterizedTest(int j) {
Calculator calculator = new Calculator();
assertEquals(i + j, calculator.add(i, j));
}
@Nested
@ParameterizedClass
@ValueSource(ints = { 1, 2 })
| CalculatorParameterizedClassTests |
java | apache__spark | sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/GetInfoType.java | {
"start": 931,
"end": 4392
} | enum ____ {
CLI_MAX_DRIVER_CONNECTIONS(TGetInfoType.CLI_MAX_DRIVER_CONNECTIONS),
CLI_MAX_CONCURRENT_ACTIVITIES(TGetInfoType.CLI_MAX_CONCURRENT_ACTIVITIES),
CLI_DATA_SOURCE_NAME(TGetInfoType.CLI_DATA_SOURCE_NAME),
CLI_FETCH_DIRECTION(TGetInfoType.CLI_FETCH_DIRECTION),
CLI_SERVER_NAME(TGetInfoType.CLI_SERVER_NAME),
CLI_SEARCH_PATTERN_ESCAPE(TGetInfoType.CLI_SEARCH_PATTERN_ESCAPE),
CLI_DBMS_NAME(TGetInfoType.CLI_DBMS_NAME),
CLI_DBMS_VER(TGetInfoType.CLI_DBMS_VER),
CLI_ACCESSIBLE_TABLES(TGetInfoType.CLI_ACCESSIBLE_TABLES),
CLI_ACCESSIBLE_PROCEDURES(TGetInfoType.CLI_ACCESSIBLE_PROCEDURES),
CLI_CURSOR_COMMIT_BEHAVIOR(TGetInfoType.CLI_CURSOR_COMMIT_BEHAVIOR),
CLI_DATA_SOURCE_READ_ONLY(TGetInfoType.CLI_DATA_SOURCE_READ_ONLY),
CLI_DEFAULT_TXN_ISOLATION(TGetInfoType.CLI_DEFAULT_TXN_ISOLATION),
CLI_IDENTIFIER_CASE(TGetInfoType.CLI_IDENTIFIER_CASE),
CLI_IDENTIFIER_QUOTE_CHAR(TGetInfoType.CLI_IDENTIFIER_QUOTE_CHAR),
CLI_MAX_COLUMN_NAME_LEN(TGetInfoType.CLI_MAX_COLUMN_NAME_LEN),
CLI_MAX_CURSOR_NAME_LEN(TGetInfoType.CLI_MAX_CURSOR_NAME_LEN),
CLI_MAX_SCHEMA_NAME_LEN(TGetInfoType.CLI_MAX_SCHEMA_NAME_LEN),
CLI_MAX_CATALOG_NAME_LEN(TGetInfoType.CLI_MAX_CATALOG_NAME_LEN),
CLI_MAX_TABLE_NAME_LEN(TGetInfoType.CLI_MAX_TABLE_NAME_LEN),
CLI_SCROLL_CONCURRENCY(TGetInfoType.CLI_SCROLL_CONCURRENCY),
CLI_TXN_CAPABLE(TGetInfoType.CLI_TXN_CAPABLE),
CLI_USER_NAME(TGetInfoType.CLI_USER_NAME),
CLI_TXN_ISOLATION_OPTION(TGetInfoType.CLI_TXN_ISOLATION_OPTION),
CLI_INTEGRITY(TGetInfoType.CLI_INTEGRITY),
CLI_GETDATA_EXTENSIONS(TGetInfoType.CLI_GETDATA_EXTENSIONS),
CLI_NULL_COLLATION(TGetInfoType.CLI_NULL_COLLATION),
CLI_ALTER_TABLE(TGetInfoType.CLI_ALTER_TABLE),
CLI_ORDER_BY_COLUMNS_IN_SELECT(TGetInfoType.CLI_ORDER_BY_COLUMNS_IN_SELECT),
CLI_SPECIAL_CHARACTERS(TGetInfoType.CLI_SPECIAL_CHARACTERS),
CLI_MAX_COLUMNS_IN_GROUP_BY(TGetInfoType.CLI_MAX_COLUMNS_IN_GROUP_BY),
CLI_MAX_COLUMNS_IN_INDEX(TGetInfoType.CLI_MAX_COLUMNS_IN_INDEX),
CLI_MAX_COLUMNS_IN_ORDER_BY(TGetInfoType.CLI_MAX_COLUMNS_IN_ORDER_BY),
CLI_MAX_COLUMNS_IN_SELECT(TGetInfoType.CLI_MAX_COLUMNS_IN_SELECT),
CLI_MAX_COLUMNS_IN_TABLE(TGetInfoType.CLI_MAX_COLUMNS_IN_TABLE),
CLI_MAX_INDEX_SIZE(TGetInfoType.CLI_MAX_INDEX_SIZE),
CLI_MAX_ROW_SIZE(TGetInfoType.CLI_MAX_ROW_SIZE),
CLI_MAX_STATEMENT_LEN(TGetInfoType.CLI_MAX_STATEMENT_LEN),
CLI_MAX_TABLES_IN_SELECT(TGetInfoType.CLI_MAX_TABLES_IN_SELECT),
CLI_MAX_USER_NAME_LEN(TGetInfoType.CLI_MAX_USER_NAME_LEN),
CLI_OJ_CAPABILITIES(TGetInfoType.CLI_OJ_CAPABILITIES),
CLI_XOPEN_CLI_YEAR(TGetInfoType.CLI_XOPEN_CLI_YEAR),
CLI_CURSOR_SENSITIVITY(TGetInfoType.CLI_CURSOR_SENSITIVITY),
CLI_DESCRIBE_PARAMETER(TGetInfoType.CLI_DESCRIBE_PARAMETER),
CLI_CATALOG_NAME(TGetInfoType.CLI_CATALOG_NAME),
CLI_COLLATION_SEQ(TGetInfoType.CLI_COLLATION_SEQ),
CLI_MAX_IDENTIFIER_LEN(TGetInfoType.CLI_MAX_IDENTIFIER_LEN),
CLI_ODBC_KEYWORDS(TGetInfoType.CLI_ODBC_KEYWORDS);
private final TGetInfoType tInfoType;
GetInfoType(TGetInfoType tInfoType) {
this.tInfoType = tInfoType;
}
public static GetInfoType getGetInfoType(TGetInfoType tGetInfoType) {
for (GetInfoType infoType : values()) {
if (tGetInfoType.equals(infoType.tInfoType)) {
return infoType;
}
}
throw new IllegalArgumentException("Unrecognized Thrift TGetInfoType value: " + tGetInfoType);
}
public TGetInfoType toTGetInfoType() {
return tInfoType;
}
}
| GetInfoType |
java | redisson__redisson | redisson/src/main/java/org/redisson/api/search/index/NumericIndexParams.java | {
"start": 704,
"end": 1792
} | class ____ implements NumericIndex {
private SortMode sortMode;
private boolean noIndex;
private final String fieldName;
private String as;
private boolean indexMissing;
NumericIndexParams(String name) {
this.fieldName = name;
}
@Override
public NumericIndexParams as(String as) {
this.as = as;
return this;
}
@Override
public NumericIndexParams sortMode(SortMode sortMode) {
this.sortMode = sortMode;
return this;
}
@Override
public NumericIndexParams noIndex() {
noIndex = true;
return this;
}
@Override
public NumericIndexParams indexMissing() {
this.indexMissing = true;
return this;
}
public SortMode getSortMode() {
return sortMode;
}
public boolean isNoIndex() {
return noIndex;
}
public String getFieldName() {
return fieldName;
}
public String getAs() {
return as;
}
public boolean isIndexMissing() {
return indexMissing;
}
}
| NumericIndexParams |
java | elastic__elasticsearch | x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherScheduleEngineBenchmark.java | {
"start": 3345,
"end": 15301
} | class ____ {
private static final Settings SETTINGS = Settings.builder()
.put("xpack.security.enabled", false)
.put("cluster.name", "bench")
.put("script.disable_dynamic", false)
.put("http.cors.enabled", true)
.build();
public static void main(String[] args) throws Exception {
String[] engines = new String[] { "ticker", "scheduler" };
int numWatches = 2000;
int benchTime = 60000;
int interval = 1;
if (args.length % 2 != 0) {
throw new IllegalArgumentException("Uneven number of arguments");
}
for (int i = 0; i < args.length; i += 2) {
String value = args[i + 1];
if ("--num_watches".equals(args[i])) {
numWatches = Integer.valueOf(value);
} else if ("--bench_time".equals(args[i])) {
benchTime = Integer.valueOf(value);
} else if ("--interval".equals(args[i])) {
interval = Integer.valueOf(value);
} else if ("--engines".equals(args[i])) {
engines = Strings.commaDelimitedListToStringArray(value);
}
}
System.out.println("Running schedule benchmark with:");
System.out.println(
"numWatches=" + numWatches + " benchTime=" + benchTime + " interval=" + interval + " engines=" + Arrays.toString(engines)
);
System.out.println("and heap_max=" + JvmInfo.jvmInfo().getMem().getHeapMax());
Environment internalNodeEnv = InternalSettingsPreparer.prepareEnvironment(
Settings.builder().put(SETTINGS).put("node.data", false).build(),
emptyMap(),
null,
() -> {
throw new IllegalArgumentException("settings must have [node.name]");
}
);
// First clean everything and index the watcher (but not via put alert api!)
try (
Node node = new Node(
internalNodeEnv,
PluginsLoader.createPluginsLoader(
PluginsLoader.loadModulesBundles(internalNodeEnv.modulesDir()),
PluginsLoader.loadPluginsBundles(internalNodeEnv.pluginsDir()),
Map.of()
)
).start()
) {
final Client client = node.client();
ClusterHealthResponse response = client.admin().cluster().prepareHealth(TimeValue.THIRTY_SECONDS).setWaitForNodes("2").get();
if (response.getNumberOfNodes() != 2 && response.getNumberOfDataNodes() != 1) {
throw new IllegalStateException("This benchmark needs one extra data only node running outside this benchmark");
}
client.admin().indices().prepareDelete("_all").get();
client.admin().indices().prepareCreate("test").get();
client.prepareIndex().setIndex("test").setId("1").setSource("{}", XContentType.JSON).get();
System.out.println("===============> indexing [" + numWatches + "] watches");
for (int i = 0; i < numWatches; i++) {
final String id = "_id_" + i;
client.prepareIndex()
.setIndex(Watch.INDEX)
.setId(id)
.setSource(
new WatchSourceBuilder().trigger(schedule(interval(interval + "s")))
.input(searchInput(templateRequest(new SearchSourceBuilder(), "test")))
.condition(
new ScriptCondition(
new Script(
ScriptType.INLINE,
Script.DEFAULT_SCRIPT_LANG,
"ctx.payload.hits.total.value > 0",
emptyMap()
)
)
)
.addAction("logging", ActionBuilders.loggingAction("test").setLevel(LoggingLevel.TRACE))
.buildAsBytes(XContentType.JSON),
XContentType.JSON
)
.get();
}
client.admin().indices().prepareFlush(Watch.INDEX, "test").get();
System.out.println("===============> indexed [" + numWatches + "] watches");
}
// Now for each scheduler impl run the benchmark
Map<String, BenchStats> results = new HashMap<>();
for (String engine : engines) {
BenchStats stats = new BenchStats(engine, numWatches);
results.put(engine, stats);
System.out.println("===============> testing engine [" + engine + "]");
System.gc();
Settings settings = Settings.builder()
.put(SETTINGS)
.put("xpack.watcher.trigger.schedule.engine", engine)
.put("node.data", false)
.build();
try (Node node = new MockNode(settings, Arrays.asList(LocalStateWatcher.class))) {
final Client client = node.client();
client.admin().cluster().prepareHealth(TimeValue.THIRTY_SECONDS).setWaitForNodes("2").get();
client.admin().indices().prepareDelete(HistoryStoreField.DATA_STREAM + "*").get();
client.admin().cluster().prepareHealth(TimeValue.THIRTY_SECONDS, Watch.INDEX, "test").setWaitForYellowStatus().get();
Clock clock = node.injector().getInstance(Clock.class);
while (new WatcherStatsRequestBuilder(client).get()
.getNodes()
.stream()
.allMatch(r -> r.getWatcherState() == WatcherState.STARTED) == false) {
Thread.sleep(100);
}
long actualLoadedWatches = new WatcherStatsRequestBuilder(client).get().getWatchesCount();
if (actualLoadedWatches != numWatches) {
throw new IllegalStateException(
"Expected ["
+ numWatches
+ "] watched to be loaded, but only ["
+ actualLoadedWatches
+ "] watches were actually loaded"
);
}
long startTime = clock.millis();
System.out.println("==> watcher started, waiting [" + benchTime + "] seconds now...");
final AtomicBoolean start = new AtomicBoolean(true);
final MeanMetric jvmUsedHeapSpace = new MeanMetric();
Thread sampleThread = new Thread(new Runnable() {
@Override
public void run() {
try {
while (start.get()) {
NodesStatsResponse response = client.admin().cluster().prepareNodesStats("_master").setJvm(true).get();
ByteSizeValue heapUsed = response.getNodes().get(0).getJvm().getMem().getHeapUsed();
jvmUsedHeapSpace.inc(heapUsed.getBytes());
Thread.sleep(1000);
}
} catch (InterruptedException ignored) {}
}
});
sampleThread.start();
Thread.sleep(benchTime);
long endTime = clock.millis();
start.set(false);
sampleThread.join();
NodesStatsResponse response = client.admin().cluster().prepareNodesStats().setThreadPool(true).get();
for (NodeStats nodeStats : response.getNodes()) {
for (ThreadPoolStats.Stats threadPoolStats : nodeStats.getThreadPool()) {
if ("watcher".equals(threadPoolStats.name())) {
stats.setWatcherThreadPoolStats(threadPoolStats);
}
}
}
client.admin().indices().prepareRefresh(HistoryStoreField.DATA_STREAM + "*").get();
Script script = new Script(
ScriptType.INLINE,
Script.DEFAULT_SCRIPT_LANG,
"doc['trigger_event.schedule.triggered_time'].value - doc['trigger_event.schedule.scheduled_time'].value",
emptyMap()
);
assertResponse(
client.prepareSearch(HistoryStoreField.DATA_STREAM + "*")
.setQuery(QueryBuilders.rangeQuery("trigger_event.schedule.scheduled_time").gte(startTime).lte(endTime))
.addAggregation(terms("state").field("state"))
.addAggregation(histogram("delay").script(script).interval(10))
.addAggregation(percentiles("percentile_delay").script(script).percentiles(1.0, 20.0, 50.0, 80.0, 99.0)),
searchResponse -> {
Terms terms = searchResponse.getAggregations().get("state");
stats.setStateStats(terms);
Histogram histogram = searchResponse.getAggregations().get("delay");
stats.setDelayStats(histogram);
System.out.println("===> State");
for (Terms.Bucket bucket : terms.getBuckets()) {
System.out.println("\t" + bucket.getKey() + "=" + bucket.getDocCount());
}
System.out.println("===> Delay");
for (Histogram.Bucket bucket : histogram.getBuckets()) {
System.out.println("\t" + bucket.getKey() + "=" + bucket.getDocCount());
}
Percentiles percentiles = searchResponse.getAggregations().get("percentile_delay");
stats.setDelayPercentiles(percentiles);
stats.setAvgJvmUsed(jvmUsedHeapSpace);
new WatcherServiceRequestBuilder(ESTestCase.TEST_REQUEST_TIMEOUT, client).stop().get();
}
);
}
}
// Finally print out the results in an asciidoc table:
System.out.println("## Ran with [" + numWatches + "] watches, interval [" + interval + "] and bench_time [" + benchTime + "]");
System.out.println();
System.out.println("### Watcher execution and watcher thread pool stats");
System.out.println();
System.out.println(" Name | avg heap used | wtp rejected | wtp completed");
System.out.println("---------- | ------------- | ------------ | -------------");
for (BenchStats benchStats : results.values()) {
benchStats.printThreadStats();
}
System.out.println();
System.out.println("### Watch record state");
System.out.println();
System.out.println(" Name | # state executed | # state failed | # state throttled | # state awaits_execution");
System.out.println("---------- | ---------------- | -------------- | ----------------- | ------------------------");
for (BenchStats benchStats : results.values()) {
benchStats.printWatchRecordState();
}
System.out.println();
System.out.println("### Trigger delay");
System.out.println();
System.out.println(" Name | 1% delayed | 20% delayed | 50% delayed | 80% delayed | 99% delayed");
System.out.println("---------- | ---------- | ----------- | ----------- | ----------- | -----------");
for (BenchStats benchStats : results.values()) {
benchStats.printTriggerDelay();
}
}
@SuppressForbidden(reason = "benchmark")
private static | WatcherScheduleEngineBenchmark |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/DebeziumPostgresEndpointBuilderFactory.java | {
"start": 118084,
"end": 123282
} | class ____ should be used to
* determine the topic name for data change, schema change, transaction,
* heartbeat event etc.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: io.debezium.schema.SchemaTopicNamingStrategy
* Group: postgres
*
* @param topicNamingStrategy the value to set
* @return the dsl builder
*/
default DebeziumPostgresEndpointBuilder topicNamingStrategy(String topicNamingStrategy) {
doSetProperty("topicNamingStrategy", topicNamingStrategy);
return this;
}
/**
* Topic prefix that identifies and provides a namespace for the
* particular database server/cluster is capturing changes. The topic
* prefix should be unique across all other connectors, since it is used
* as a prefix for all Kafka topic names that receive events emitted by
* this connector. Only alphanumeric characters, hyphens, dots and
* underscores must be accepted.
*
* The option is a: <code>java.lang.String</code> type.
*
* Required: true
* Group: postgres
*
* @param topicPrefix the value to set
* @return the dsl builder
*/
default DebeziumPostgresEndpointBuilder topicPrefix(String topicPrefix) {
doSetProperty("topicPrefix", topicPrefix);
return this;
}
/**
* Class to make transaction context & transaction struct/schemas.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default:
* io.debezium.pipeline.txmetadata.DefaultTransactionMetadataFactory
* Group: postgres
*
* @param transactionMetadataFactory the value to set
* @return the dsl builder
*/
default DebeziumPostgresEndpointBuilder transactionMetadataFactory(String transactionMetadataFactory) {
doSetProperty("transactionMetadataFactory", transactionMetadataFactory);
return this;
}
/**
* Specify the constant that will be provided by Debezium to indicate
* that the original value is a toasted value not provided by the
* database. If starts with 'hex:' prefix it is expected that the rest
* of the string represents hexadecimal encoded octets.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: __debezium_unavailable_value
* Group: postgres
*
* @param unavailableValuePlaceholder the value to set
* @return the dsl builder
*/
default DebeziumPostgresEndpointBuilder unavailableValuePlaceholder(String unavailableValuePlaceholder) {
doSetProperty("unavailableValuePlaceholder", unavailableValuePlaceholder);
return this;
}
/**
* Specify how often (in ms) the xmin will be fetched from the
* replication slot. This xmin value is exposed by the slot which gives
* a lower bound of where a new replication slot could start from. The
* lower the value, the more likely this value is to be the current
* 'true' value, but the bigger the performance cost. The bigger the
* value, the less likely this value is to be the current 'true' value,
* but the lower the performance penalty. The default is set to 0 ms,
* which disables tracking xmin.
*
* The option is a: <code>long</code> type.
*
* Default: 0ms
* Group: postgres
*
* @param xminFetchIntervalMs the value to set
* @return the dsl builder
*/
default DebeziumPostgresEndpointBuilder xminFetchIntervalMs(long xminFetchIntervalMs) {
doSetProperty("xminFetchIntervalMs", xminFetchIntervalMs);
return this;
}
/**
* Specify how often (in ms) the xmin will be fetched from the
* replication slot. This xmin value is exposed by the slot which gives
* a lower bound of where a new replication slot could start from. The
* lower the value, the more likely this value is to be the current
* 'true' value, but the bigger the performance cost. The bigger the
* value, the less likely this value is to be the current 'true' value,
* but the lower the performance penalty. The default is set to 0 ms,
* which disables tracking xmin.
*
* The option will be converted to a <code>long</code> type.
*
* Default: 0ms
* Group: postgres
*
* @param xminFetchIntervalMs the value to set
* @return the dsl builder
*/
default DebeziumPostgresEndpointBuilder xminFetchIntervalMs(String xminFetchIntervalMs) {
doSetProperty("xminFetchIntervalMs", xminFetchIntervalMs);
return this;
}
}
/**
* Advanced builder for endpoint for the Debezium PostgresSQL Connector component.
*/
public | that |
java | apache__camel | core/camel-core-model/src/generated/java/org/apache/camel/model/Resilience4jConfigurationDefinitionConfigurer.java | {
"start": 715,
"end": 13542
} | class ____ extends org.apache.camel.support.component.PropertyConfigurerSupport implements GeneratedPropertyConfigurer, ExtendedPropertyConfigurerGetter {
private static final Map<String, Object> ALL_OPTIONS;
static {
Map<String, Object> map = new CaseInsensitiveMap();
map.put("AutomaticTransitionFromOpenToHalfOpenEnabled", java.lang.String.class);
map.put("BulkheadEnabled", java.lang.String.class);
map.put("BulkheadMaxConcurrentCalls", java.lang.String.class);
map.put("BulkheadMaxWaitDuration", java.lang.String.class);
map.put("CircuitBreaker", java.lang.String.class);
map.put("Config", java.lang.String.class);
map.put("FailureRateThreshold", java.lang.String.class);
map.put("Id", java.lang.String.class);
map.put("IgnoreExceptions", java.util.List.class);
map.put("MicrometerEnabled", java.lang.String.class);
map.put("MinimumNumberOfCalls", java.lang.String.class);
map.put("PermittedNumberOfCallsInHalfOpenState", java.lang.String.class);
map.put("RecordExceptions", java.util.List.class);
map.put("SlidingWindowSize", java.lang.String.class);
map.put("SlidingWindowType", java.lang.String.class);
map.put("SlowCallDurationThreshold", java.lang.String.class);
map.put("SlowCallRateThreshold", java.lang.String.class);
map.put("ThrowExceptionWhenHalfOpenOrOpenState", java.lang.String.class);
map.put("TimeoutCancelRunningFuture", java.lang.String.class);
map.put("TimeoutDuration", java.lang.String.class);
map.put("TimeoutEnabled", java.lang.String.class);
map.put("TimeoutExecutorService", java.lang.String.class);
map.put("WaitDurationInOpenState", java.lang.String.class);
map.put("WritableStackTraceEnabled", java.lang.String.class);
ALL_OPTIONS = map;
}
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
org.apache.camel.model.Resilience4jConfigurationDefinition target = (org.apache.camel.model.Resilience4jConfigurationDefinition) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "automatictransitionfromopentohalfopenenabled":
case "automaticTransitionFromOpenToHalfOpenEnabled": target.setAutomaticTransitionFromOpenToHalfOpenEnabled(property(camelContext, java.lang.String.class, value)); return true;
case "bulkheadenabled":
case "bulkheadEnabled": target.setBulkheadEnabled(property(camelContext, java.lang.String.class, value)); return true;
case "bulkheadmaxconcurrentcalls":
case "bulkheadMaxConcurrentCalls": target.setBulkheadMaxConcurrentCalls(property(camelContext, java.lang.String.class, value)); return true;
case "bulkheadmaxwaitduration":
case "bulkheadMaxWaitDuration": target.setBulkheadMaxWaitDuration(property(camelContext, java.lang.String.class, value)); return true;
case "circuitbreaker":
case "circuitBreaker": target.setCircuitBreaker(property(camelContext, java.lang.String.class, value)); return true;
case "config": target.setConfig(property(camelContext, java.lang.String.class, value)); return true;
case "failureratethreshold":
case "failureRateThreshold": target.setFailureRateThreshold(property(camelContext, java.lang.String.class, value)); return true;
case "id": target.setId(property(camelContext, java.lang.String.class, value)); return true;
case "ignoreexceptions":
case "ignoreExceptions": target.setIgnoreExceptions(property(camelContext, java.util.List.class, value)); return true;
case "micrometerenabled":
case "micrometerEnabled": target.setMicrometerEnabled(property(camelContext, java.lang.String.class, value)); return true;
case "minimumnumberofcalls":
case "minimumNumberOfCalls": target.setMinimumNumberOfCalls(property(camelContext, java.lang.String.class, value)); return true;
case "permittednumberofcallsinhalfopenstate":
case "permittedNumberOfCallsInHalfOpenState": target.setPermittedNumberOfCallsInHalfOpenState(property(camelContext, java.lang.String.class, value)); return true;
case "recordexceptions":
case "recordExceptions": target.setRecordExceptions(property(camelContext, java.util.List.class, value)); return true;
case "slidingwindowsize":
case "slidingWindowSize": target.setSlidingWindowSize(property(camelContext, java.lang.String.class, value)); return true;
case "slidingwindowtype":
case "slidingWindowType": target.setSlidingWindowType(property(camelContext, java.lang.String.class, value)); return true;
case "slowcalldurationthreshold":
case "slowCallDurationThreshold": target.setSlowCallDurationThreshold(property(camelContext, java.lang.String.class, value)); return true;
case "slowcallratethreshold":
case "slowCallRateThreshold": target.setSlowCallRateThreshold(property(camelContext, java.lang.String.class, value)); return true;
case "throwexceptionwhenhalfopenoropenstate":
case "throwExceptionWhenHalfOpenOrOpenState": target.setThrowExceptionWhenHalfOpenOrOpenState(property(camelContext, java.lang.String.class, value)); return true;
case "timeoutcancelrunningfuture":
case "timeoutCancelRunningFuture": target.setTimeoutCancelRunningFuture(property(camelContext, java.lang.String.class, value)); return true;
case "timeoutduration":
case "timeoutDuration": target.setTimeoutDuration(property(camelContext, java.lang.String.class, value)); return true;
case "timeoutenabled":
case "timeoutEnabled": target.setTimeoutEnabled(property(camelContext, java.lang.String.class, value)); return true;
case "timeoutexecutorservice":
case "timeoutExecutorService": target.setTimeoutExecutorService(property(camelContext, java.lang.String.class, value)); return true;
case "waitdurationinopenstate":
case "waitDurationInOpenState": target.setWaitDurationInOpenState(property(camelContext, java.lang.String.class, value)); return true;
case "writablestacktraceenabled":
case "writableStackTraceEnabled": target.setWritableStackTraceEnabled(property(camelContext, java.lang.String.class, value)); return true;
default: return false;
}
}
@Override
public Map<String, Object> getAllOptions(Object target) {
return ALL_OPTIONS;
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "automatictransitionfromopentohalfopenenabled":
case "automaticTransitionFromOpenToHalfOpenEnabled": return java.lang.String.class;
case "bulkheadenabled":
case "bulkheadEnabled": return java.lang.String.class;
case "bulkheadmaxconcurrentcalls":
case "bulkheadMaxConcurrentCalls": return java.lang.String.class;
case "bulkheadmaxwaitduration":
case "bulkheadMaxWaitDuration": return java.lang.String.class;
case "circuitbreaker":
case "circuitBreaker": return java.lang.String.class;
case "config": return java.lang.String.class;
case "failureratethreshold":
case "failureRateThreshold": return java.lang.String.class;
case "id": return java.lang.String.class;
case "ignoreexceptions":
case "ignoreExceptions": return java.util.List.class;
case "micrometerenabled":
case "micrometerEnabled": return java.lang.String.class;
case "minimumnumberofcalls":
case "minimumNumberOfCalls": return java.lang.String.class;
case "permittednumberofcallsinhalfopenstate":
case "permittedNumberOfCallsInHalfOpenState": return java.lang.String.class;
case "recordexceptions":
case "recordExceptions": return java.util.List.class;
case "slidingwindowsize":
case "slidingWindowSize": return java.lang.String.class;
case "slidingwindowtype":
case "slidingWindowType": return java.lang.String.class;
case "slowcalldurationthreshold":
case "slowCallDurationThreshold": return java.lang.String.class;
case "slowcallratethreshold":
case "slowCallRateThreshold": return java.lang.String.class;
case "throwexceptionwhenhalfopenoropenstate":
case "throwExceptionWhenHalfOpenOrOpenState": return java.lang.String.class;
case "timeoutcancelrunningfuture":
case "timeoutCancelRunningFuture": return java.lang.String.class;
case "timeoutduration":
case "timeoutDuration": return java.lang.String.class;
case "timeoutenabled":
case "timeoutEnabled": return java.lang.String.class;
case "timeoutexecutorservice":
case "timeoutExecutorService": return java.lang.String.class;
case "waitdurationinopenstate":
case "waitDurationInOpenState": return java.lang.String.class;
case "writablestacktraceenabled":
case "writableStackTraceEnabled": return java.lang.String.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
org.apache.camel.model.Resilience4jConfigurationDefinition target = (org.apache.camel.model.Resilience4jConfigurationDefinition) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "automatictransitionfromopentohalfopenenabled":
case "automaticTransitionFromOpenToHalfOpenEnabled": return target.getAutomaticTransitionFromOpenToHalfOpenEnabled();
case "bulkheadenabled":
case "bulkheadEnabled": return target.getBulkheadEnabled();
case "bulkheadmaxconcurrentcalls":
case "bulkheadMaxConcurrentCalls": return target.getBulkheadMaxConcurrentCalls();
case "bulkheadmaxwaitduration":
case "bulkheadMaxWaitDuration": return target.getBulkheadMaxWaitDuration();
case "circuitbreaker":
case "circuitBreaker": return target.getCircuitBreaker();
case "config": return target.getConfig();
case "failureratethreshold":
case "failureRateThreshold": return target.getFailureRateThreshold();
case "id": return target.getId();
case "ignoreexceptions":
case "ignoreExceptions": return target.getIgnoreExceptions();
case "micrometerenabled":
case "micrometerEnabled": return target.getMicrometerEnabled();
case "minimumnumberofcalls":
case "minimumNumberOfCalls": return target.getMinimumNumberOfCalls();
case "permittednumberofcallsinhalfopenstate":
case "permittedNumberOfCallsInHalfOpenState": return target.getPermittedNumberOfCallsInHalfOpenState();
case "recordexceptions":
case "recordExceptions": return target.getRecordExceptions();
case "slidingwindowsize":
case "slidingWindowSize": return target.getSlidingWindowSize();
case "slidingwindowtype":
case "slidingWindowType": return target.getSlidingWindowType();
case "slowcalldurationthreshold":
case "slowCallDurationThreshold": return target.getSlowCallDurationThreshold();
case "slowcallratethreshold":
case "slowCallRateThreshold": return target.getSlowCallRateThreshold();
case "throwexceptionwhenhalfopenoropenstate":
case "throwExceptionWhenHalfOpenOrOpenState": return target.getThrowExceptionWhenHalfOpenOrOpenState();
case "timeoutcancelrunningfuture":
case "timeoutCancelRunningFuture": return target.getTimeoutCancelRunningFuture();
case "timeoutduration":
case "timeoutDuration": return target.getTimeoutDuration();
case "timeoutenabled":
case "timeoutEnabled": return target.getTimeoutEnabled();
case "timeoutexecutorservice":
case "timeoutExecutorService": return target.getTimeoutExecutorService();
case "waitdurationinopenstate":
case "waitDurationInOpenState": return target.getWaitDurationInOpenState();
case "writablestacktraceenabled":
case "writableStackTraceEnabled": return target.getWritableStackTraceEnabled();
default: return null;
}
}
@Override
public Object getCollectionValueType(Object target, String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "ignoreexceptions":
case "ignoreExceptions": return java.lang.String.class;
case "recordexceptions":
case "recordExceptions": return java.lang.String.class;
default: return null;
}
}
}
| Resilience4jConfigurationDefinitionConfigurer |
java | apache__maven | its/core-it-support/core-it-plugins/maven-it-plugin-plexus-lifecycle/src/main/java/org/apache/maven/its/plugins/plexuslifecycle/MojoWithPlexusLifecycle.java | {
"start": 1539,
"end": 2129
} | class ____ extends AbstractMojo implements Contextualizable, Disposable {
/**
*/
@Component
private FakeComponent fakeComponent;
public void execute() throws MojoExecutionException, MojoFailureException {
getLog().info("execute MojoWithPlexusLifecycle");
fakeComponent.doNothing();
}
public void dispose() {
getLog().info("MojoWithPlexusLifecycle :: dispose");
}
public void contextualize(Context context) throws ContextException {
getLog().info("MojoWithPlexusLifecycle :: contextualize");
}
}
| MojoWithPlexusLifecycle |
java | spring-projects__spring-framework | spring-r2dbc/src/main/java/org/springframework/r2dbc/core/binding/BindMarkersFactoryResolver.java | {
"start": 2658,
"end": 2815
} | interface ____ discovered through Spring's
* {@link SpringFactoriesLoader} mechanism.
*
* @see SpringFactoriesLoader
*/
@FunctionalInterface
public | are |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/core/annotation/AnnotationBackCompatibilityTests.java | {
"start": 2148,
"end": 2279
} | interface ____ {
}
@Retention(RetentionPolicy.RUNTIME)
@TestAnnotation("testAndMetaTest")
@MetaTestAnnotation
@ | MetaTestAnnotation |
java | apache__camel | components/camel-test/camel-test-spring-junit5/src/test/java/org/apache/camel/test/spring/SpringTestExecutionListenerSorterTest.java | {
"start": 1435,
"end": 2263
} | class ____ {
@Test
void getPrecedencesForRegisteredClassesShouldReturnCorrectOrder() {
List<Class<?>> listenersInExpectedOrder = new ArrayList<>();
listenersInExpectedOrder.add(CamelSpringTestContextLoaderTestExecutionListener.class);
listenersInExpectedOrder.add(StopWatchTestExecutionListener.class);
List<Class<?>> listenersSortedByPrecedence = new ArrayList<>(listenersInExpectedOrder);
listenersSortedByPrecedence.sort(Comparator.comparingInt(SpringTestExecutionListenerSorter::getPrecedence));
assertEquals(listenersInExpectedOrder, listenersSortedByPrecedence);
}
@Test
void getPrecedenceForWrongClassShouldThrow() {
assertThrows(IllegalArgumentException.class, () -> getPrecedence(Object.class));
}
}
| SpringTestExecutionListenerSorterTest |
java | apache__kafka | server-common/src/main/java/org/apache/kafka/server/util/json/DecodeJson.java | {
"start": 2313,
"end": 2654
} | class ____ implements DecodeJson<Integer> {
@Override
public Integer decode(JsonNode node) throws JsonMappingException {
if (node.isInt()) {
return node.intValue();
}
throw throwJsonMappingException(Integer.class.getSimpleName(), node);
}
}
final | DecodeInteger |
java | redisson__redisson | redisson/src/main/java/org/redisson/api/SyncArgs.java | {
"start": 893,
"end": 1939
} | interface ____<T> {
/**
* Sets the synchronization mode to be used for current operation.
* <p>
* Default value is SyncMode.AUTO
*
* @param syncMode the synchronization mode
* @return arguments object
* @see SyncMode
*/
T syncMode(SyncMode syncMode);
/**
* Sets the behavior when synchronization with secondary nodes fails.
* <p>
* Default value is SyncFailureMode.LOG_WARNING
*
* @param syncFailureMode the failure handling mode
* @return The current instance for method chaining
* @see SyncFailureMode
*/
T syncFailureMode(SyncFailureMode syncFailureMode);
/**
* Sets the timeout duration for synchronization of the current operation.
* Defines how long the system will wait for acknowledgment from secondary nodes.
* <p>
* Default value is 1 second.
*
* @param timeout The maximum time to wait for synchronization to complete
* @return arguments object
*/
T syncTimeout(Duration timeout);
}
| SyncArgs |
java | apache__flink | flink-core/src/test/java/org/apache/flink/api/common/operators/util/TestNonRichOutputFormat.java | {
"start": 1096,
"end": 1554
} | class ____ implements OutputFormat<String> {
public final List<String> output = new LinkedList<>();
@Override
public void configure(Configuration parameters) {}
@Override
public void open(InitializationContext context) {}
@Override
public void close() {}
@Override
public void writeRecord(String record) {
output.add(record);
}
public void clear() {
output.clear();
}
}
| TestNonRichOutputFormat |
java | google__dagger | javatests/dagger/hilt/android/processor/internal/aggregateddeps/TestInstallInTest.java | {
"start": 7739,
"end": 8252
} | class ____ {}");
Source testInstallInModule =
HiltCompilerTests.javaSource(
"test.TestInstallInModule",
"package test;",
"",
"import dagger.Module;",
"import dagger.hilt.components.SingletonComponent;",
"import dagger.hilt.testing.TestInstallIn;",
"",
"@Module",
"@TestInstallIn(",
" components = SingletonComponent.class,",
" replaces = Foo.class)",
" | Foo |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/internal/ComparablesBaseTest.java | {
"start": 986,
"end": 1327
} | class ____ testing <code>{@link Comparables}</code>, set up an instance with {@link StandardComparisonStrategy} and another
* with {@link ComparatorBasedComparisonStrategy}.
* <p>
* Is in <code>org.assertj.core.internal</code> package to be able to set {@link Comparables#failures} appropriately.
*
* @author Joel Costigliola
*/
public | for |
java | google__truth | core/src/main/java/com/google/common/truth/ActualValueInference.java | {
"start": 46869,
"end": 47486
} | class ____ extends ClassVisitor {
/**
* The method to visit.
*
* <p>We don't really <i>need</i> the method name: We could just visit the whole class, since we
* look at data for only the relevant line. But it's nice not to process the whole class,
* especially during debugging. (And it might also help avoid triggering any bugs in the
* inference code.)
*/
private final String methodNameToVisit;
private final ImmutableSetMultimap.Builder<Integer, StackEntry> actualValueAtLine =
ImmutableSetMultimap.builder();
// TODO(cpovirk): Can the | InferenceClassVisitor |
java | apache__dubbo | dubbo-common/src/main/java/org/apache/dubbo/rpc/model/StubServiceDescriptor.java | {
"start": 1348,
"end": 4972
} | class ____ implements ServiceDescriptor {
private final String interfaceName;
private final Class<?> serviceInterfaceClass;
// to accelerate search
private final Map<String, List<MethodDescriptor>> methods = new HashMap<>();
private final Map<String, Map<String, MethodDescriptor>> descToMethods = new HashMap<>();
private final ConcurrentNavigableMap<String, FullServiceDefinition> serviceDefinitions =
new ConcurrentSkipListMap<>();
public StubServiceDescriptor(String interfaceName, Class<?> interfaceClass) {
this.interfaceName = interfaceName;
this.serviceInterfaceClass = interfaceClass;
}
public void addMethod(MethodDescriptor methodDescriptor) {
methods.put(methodDescriptor.getMethodName(), Collections.singletonList(methodDescriptor));
Map<String, MethodDescriptor> descMap =
descToMethods.computeIfAbsent(methodDescriptor.getMethodName(), k -> new HashMap<>());
descMap.put(methodDescriptor.getParamDesc(), methodDescriptor);
}
public FullServiceDefinition getFullServiceDefinition(String serviceKey) {
return serviceDefinitions.computeIfAbsent(
serviceKey,
(k) -> ServiceDefinitionBuilder.buildFullDefinition(serviceInterfaceClass, Collections.emptyMap()));
}
public String getInterfaceName() {
return interfaceName;
}
public Class<?> getServiceInterfaceClass() {
return serviceInterfaceClass;
}
public Set<MethodDescriptor> getAllMethods() {
Set<MethodDescriptor> methodModels = new HashSet<>();
methods.forEach((k, v) -> methodModels.addAll(v));
return methodModels;
}
/**
* Does not use Optional as return type to avoid potential performance decrease.
*
* @param methodName
* @param params
* @return
*/
public MethodDescriptor getMethod(String methodName, String params) {
Map<String, MethodDescriptor> methods = descToMethods.get(methodName);
if (CollectionUtils.isNotEmptyMap(methods)) {
return methods.get(params);
}
return null;
}
/**
* Does not use Optional as return type to avoid potential performance decrease.
*
* @param methodName
* @param paramTypes
* @return
*/
public MethodDescriptor getMethod(String methodName, Class<?>[] paramTypes) {
List<MethodDescriptor> methodModels = methods.get(methodName);
if (CollectionUtils.isNotEmpty(methodModels)) {
for (MethodDescriptor descriptor : methodModels) {
if (Arrays.equals(paramTypes, descriptor.getParameterClasses())) {
return descriptor;
}
}
}
return null;
}
public List<MethodDescriptor> getMethods(String methodName) {
return methods.get(methodName);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
StubServiceDescriptor that = (StubServiceDescriptor) o;
return Objects.equals(interfaceName, that.interfaceName)
&& Objects.equals(serviceInterfaceClass, that.serviceInterfaceClass)
&& Objects.equals(methods, that.methods)
&& Objects.equals(descToMethods, that.descToMethods);
}
@Override
public int hashCode() {
return Objects.hash(interfaceName, serviceInterfaceClass, methods, descToMethods);
}
}
| StubServiceDescriptor |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/convert/ConvertingAbstractSerializer795Test.java | {
"start": 1044,
"end": 1398
} | class ____ {
@JsonProperty
@JsonDeserialize(converter = AbstractCustomTypeDeserializationConverter.class)
protected AbstractCustomType customField;
public AbstractCustomTypeUser(@JsonProperty("customField") AbstractCustomType cf) {
this.customField = cf;
}
}
public static | AbstractCustomTypeUser |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/sql/exec/spi/JdbcMutation.java | {
"start": 280,
"end": 332
} | interface ____ extends PrimaryOperation {
}
| JdbcMutation |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/contextual/ContextualSerializationTest.java | {
"start": 1271,
"end": 1514
} | class ____
{
protected final String _value;
public ContextualBean(String s) { _value = s; }
@Prefix("see:")
public String getValue() { return _value; }
}
// For [JACKSON-569]
static | ContextualBean |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/DeltaIntGroupingAggregatorFunction.java | {
"start": 1116,
"end": 16470
} | class ____ implements GroupingAggregatorFunction {
private static final List<IntermediateStateDesc> INTERMEDIATE_STATE_DESC = List.of(
new IntermediateStateDesc("samples", ElementType.LONG),
new IntermediateStateDesc("timestamps", ElementType.LONG),
new IntermediateStateDesc("values", ElementType.INT) );
private final DeltaIntAggregator.IntDeltaGroupingState state;
private final List<Integer> channels;
private final DriverContext driverContext;
public DeltaIntGroupingAggregatorFunction(List<Integer> channels,
DeltaIntAggregator.IntDeltaGroupingState state, DriverContext driverContext) {
this.channels = channels;
this.state = state;
this.driverContext = driverContext;
}
public static DeltaIntGroupingAggregatorFunction create(List<Integer> channels,
DriverContext driverContext) {
return new DeltaIntGroupingAggregatorFunction(channels, DeltaIntAggregator.initGrouping(driverContext), driverContext);
}
public static List<IntermediateStateDesc> intermediateStateDesc() {
return INTERMEDIATE_STATE_DESC;
}
@Override
public int intermediateBlockCount() {
return INTERMEDIATE_STATE_DESC.size();
}
@Override
public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds,
Page page) {
IntBlock valueBlock = page.getBlock(channels.get(0));
LongBlock timestampBlock = page.getBlock(channels.get(1));
IntVector valueVector = valueBlock.asVector();
if (valueVector == null) {
maybeEnableGroupIdTracking(seenGroupIds, valueBlock, timestampBlock);
return new GroupingAggregatorFunction.AddInput() {
@Override
public void add(int positionOffset, IntArrayBlock groupIds) {
addRawInput(positionOffset, groupIds, valueBlock, timestampBlock);
}
@Override
public void add(int positionOffset, IntBigArrayBlock groupIds) {
addRawInput(positionOffset, groupIds, valueBlock, timestampBlock);
}
@Override
public void add(int positionOffset, IntVector groupIds) {
addRawInput(positionOffset, groupIds, valueBlock, timestampBlock);
}
@Override
public void close() {
}
};
}
LongVector timestampVector = timestampBlock.asVector();
if (timestampVector == null) {
maybeEnableGroupIdTracking(seenGroupIds, valueBlock, timestampBlock);
return new GroupingAggregatorFunction.AddInput() {
@Override
public void add(int positionOffset, IntArrayBlock groupIds) {
addRawInput(positionOffset, groupIds, valueBlock, timestampBlock);
}
@Override
public void add(int positionOffset, IntBigArrayBlock groupIds) {
addRawInput(positionOffset, groupIds, valueBlock, timestampBlock);
}
@Override
public void add(int positionOffset, IntVector groupIds) {
addRawInput(positionOffset, groupIds, valueBlock, timestampBlock);
}
@Override
public void close() {
}
};
}
return new GroupingAggregatorFunction.AddInput() {
@Override
public void add(int positionOffset, IntArrayBlock groupIds) {
addRawInput(positionOffset, groupIds, valueVector, timestampVector);
}
@Override
public void add(int positionOffset, IntBigArrayBlock groupIds) {
addRawInput(positionOffset, groupIds, valueVector, timestampVector);
}
@Override
public void add(int positionOffset, IntVector groupIds) {
addRawInput(positionOffset, groupIds, valueVector, timestampVector);
}
@Override
public void close() {
}
};
}
private void addRawInput(int positionOffset, IntArrayBlock groups, IntBlock valueBlock,
LongBlock timestampBlock) {
for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) {
if (groups.isNull(groupPosition)) {
continue;
}
int valuesPosition = groupPosition + positionOffset;
if (valueBlock.isNull(valuesPosition)) {
continue;
}
if (timestampBlock.isNull(valuesPosition)) {
continue;
}
int groupStart = groups.getFirstValueIndex(groupPosition);
int groupEnd = groupStart + groups.getValueCount(groupPosition);
for (int g = groupStart; g < groupEnd; g++) {
int groupId = groups.getInt(g);
int valueStart = valueBlock.getFirstValueIndex(valuesPosition);
int valueEnd = valueStart + valueBlock.getValueCount(valuesPosition);
for (int valueOffset = valueStart; valueOffset < valueEnd; valueOffset++) {
int valueValue = valueBlock.getInt(valueOffset);
int timestampStart = timestampBlock.getFirstValueIndex(valuesPosition);
int timestampEnd = timestampStart + timestampBlock.getValueCount(valuesPosition);
for (int timestampOffset = timestampStart; timestampOffset < timestampEnd; timestampOffset++) {
long timestampValue = timestampBlock.getLong(timestampOffset);
DeltaIntAggregator.combine(state, groupId, valueValue, timestampValue);
}
}
}
}
}
private void addRawInput(int positionOffset, IntArrayBlock groups, IntVector valueVector,
LongVector timestampVector) {
for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) {
if (groups.isNull(groupPosition)) {
continue;
}
int valuesPosition = groupPosition + positionOffset;
int groupStart = groups.getFirstValueIndex(groupPosition);
int groupEnd = groupStart + groups.getValueCount(groupPosition);
for (int g = groupStart; g < groupEnd; g++) {
int groupId = groups.getInt(g);
int valueValue = valueVector.getInt(valuesPosition);
long timestampValue = timestampVector.getLong(valuesPosition);
DeltaIntAggregator.combine(state, groupId, valueValue, timestampValue);
}
}
}
@Override
public void addIntermediateInput(int positionOffset, IntArrayBlock groups, Page page) {
state.enableGroupIdTracking(new SeenGroupIds.Empty());
assert channels.size() == intermediateBlockCount();
Block samplesUncast = page.getBlock(channels.get(0));
if (samplesUncast.areAllValuesNull()) {
return;
}
LongBlock samples = (LongBlock) samplesUncast;
Block timestampsUncast = page.getBlock(channels.get(1));
if (timestampsUncast.areAllValuesNull()) {
return;
}
LongBlock timestamps = (LongBlock) timestampsUncast;
Block valuesUncast = page.getBlock(channels.get(2));
if (valuesUncast.areAllValuesNull()) {
return;
}
IntBlock values = (IntBlock) valuesUncast;
assert samples.getPositionCount() == timestamps.getPositionCount() && samples.getPositionCount() == values.getPositionCount();
for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) {
if (groups.isNull(groupPosition)) {
continue;
}
int groupStart = groups.getFirstValueIndex(groupPosition);
int groupEnd = groupStart + groups.getValueCount(groupPosition);
for (int g = groupStart; g < groupEnd; g++) {
int groupId = groups.getInt(g);
int valuesPosition = groupPosition + positionOffset;
DeltaIntAggregator.combineIntermediate(state, groupId, samples, timestamps, values, valuesPosition);
}
}
}
private void addRawInput(int positionOffset, IntBigArrayBlock groups, IntBlock valueBlock,
LongBlock timestampBlock) {
for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) {
if (groups.isNull(groupPosition)) {
continue;
}
int valuesPosition = groupPosition + positionOffset;
if (valueBlock.isNull(valuesPosition)) {
continue;
}
if (timestampBlock.isNull(valuesPosition)) {
continue;
}
int groupStart = groups.getFirstValueIndex(groupPosition);
int groupEnd = groupStart + groups.getValueCount(groupPosition);
for (int g = groupStart; g < groupEnd; g++) {
int groupId = groups.getInt(g);
int valueStart = valueBlock.getFirstValueIndex(valuesPosition);
int valueEnd = valueStart + valueBlock.getValueCount(valuesPosition);
for (int valueOffset = valueStart; valueOffset < valueEnd; valueOffset++) {
int valueValue = valueBlock.getInt(valueOffset);
int timestampStart = timestampBlock.getFirstValueIndex(valuesPosition);
int timestampEnd = timestampStart + timestampBlock.getValueCount(valuesPosition);
for (int timestampOffset = timestampStart; timestampOffset < timestampEnd; timestampOffset++) {
long timestampValue = timestampBlock.getLong(timestampOffset);
DeltaIntAggregator.combine(state, groupId, valueValue, timestampValue);
}
}
}
}
}
private void addRawInput(int positionOffset, IntBigArrayBlock groups, IntVector valueVector,
LongVector timestampVector) {
for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) {
if (groups.isNull(groupPosition)) {
continue;
}
int valuesPosition = groupPosition + positionOffset;
int groupStart = groups.getFirstValueIndex(groupPosition);
int groupEnd = groupStart + groups.getValueCount(groupPosition);
for (int g = groupStart; g < groupEnd; g++) {
int groupId = groups.getInt(g);
int valueValue = valueVector.getInt(valuesPosition);
long timestampValue = timestampVector.getLong(valuesPosition);
DeltaIntAggregator.combine(state, groupId, valueValue, timestampValue);
}
}
}
@Override
public void addIntermediateInput(int positionOffset, IntBigArrayBlock groups, Page page) {
state.enableGroupIdTracking(new SeenGroupIds.Empty());
assert channels.size() == intermediateBlockCount();
Block samplesUncast = page.getBlock(channels.get(0));
if (samplesUncast.areAllValuesNull()) {
return;
}
LongBlock samples = (LongBlock) samplesUncast;
Block timestampsUncast = page.getBlock(channels.get(1));
if (timestampsUncast.areAllValuesNull()) {
return;
}
LongBlock timestamps = (LongBlock) timestampsUncast;
Block valuesUncast = page.getBlock(channels.get(2));
if (valuesUncast.areAllValuesNull()) {
return;
}
IntBlock values = (IntBlock) valuesUncast;
assert samples.getPositionCount() == timestamps.getPositionCount() && samples.getPositionCount() == values.getPositionCount();
for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) {
if (groups.isNull(groupPosition)) {
continue;
}
int groupStart = groups.getFirstValueIndex(groupPosition);
int groupEnd = groupStart + groups.getValueCount(groupPosition);
for (int g = groupStart; g < groupEnd; g++) {
int groupId = groups.getInt(g);
int valuesPosition = groupPosition + positionOffset;
DeltaIntAggregator.combineIntermediate(state, groupId, samples, timestamps, values, valuesPosition);
}
}
}
private void addRawInput(int positionOffset, IntVector groups, IntBlock valueBlock,
LongBlock timestampBlock) {
for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) {
int valuesPosition = groupPosition + positionOffset;
if (valueBlock.isNull(valuesPosition)) {
continue;
}
if (timestampBlock.isNull(valuesPosition)) {
continue;
}
int groupId = groups.getInt(groupPosition);
int valueStart = valueBlock.getFirstValueIndex(valuesPosition);
int valueEnd = valueStart + valueBlock.getValueCount(valuesPosition);
for (int valueOffset = valueStart; valueOffset < valueEnd; valueOffset++) {
int valueValue = valueBlock.getInt(valueOffset);
int timestampStart = timestampBlock.getFirstValueIndex(valuesPosition);
int timestampEnd = timestampStart + timestampBlock.getValueCount(valuesPosition);
for (int timestampOffset = timestampStart; timestampOffset < timestampEnd; timestampOffset++) {
long timestampValue = timestampBlock.getLong(timestampOffset);
DeltaIntAggregator.combine(state, groupId, valueValue, timestampValue);
}
}
}
}
private void addRawInput(int positionOffset, IntVector groups, IntVector valueVector,
LongVector timestampVector) {
for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) {
int valuesPosition = groupPosition + positionOffset;
int groupId = groups.getInt(groupPosition);
int valueValue = valueVector.getInt(valuesPosition);
long timestampValue = timestampVector.getLong(valuesPosition);
DeltaIntAggregator.combine(state, groupId, valueValue, timestampValue);
}
}
@Override
public void addIntermediateInput(int positionOffset, IntVector groups, Page page) {
state.enableGroupIdTracking(new SeenGroupIds.Empty());
assert channels.size() == intermediateBlockCount();
Block samplesUncast = page.getBlock(channels.get(0));
if (samplesUncast.areAllValuesNull()) {
return;
}
LongBlock samples = (LongBlock) samplesUncast;
Block timestampsUncast = page.getBlock(channels.get(1));
if (timestampsUncast.areAllValuesNull()) {
return;
}
LongBlock timestamps = (LongBlock) timestampsUncast;
Block valuesUncast = page.getBlock(channels.get(2));
if (valuesUncast.areAllValuesNull()) {
return;
}
IntBlock values = (IntBlock) valuesUncast;
assert samples.getPositionCount() == timestamps.getPositionCount() && samples.getPositionCount() == values.getPositionCount();
for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) {
int groupId = groups.getInt(groupPosition);
int valuesPosition = groupPosition + positionOffset;
DeltaIntAggregator.combineIntermediate(state, groupId, samples, timestamps, values, valuesPosition);
}
}
private void maybeEnableGroupIdTracking(SeenGroupIds seenGroupIds, IntBlock valueBlock,
LongBlock timestampBlock) {
if (valueBlock.mayHaveNulls()) {
state.enableGroupIdTracking(seenGroupIds);
}
if (timestampBlock.mayHaveNulls()) {
state.enableGroupIdTracking(seenGroupIds);
}
}
@Override
public void selectedMayContainUnseenGroups(SeenGroupIds seenGroupIds) {
state.enableGroupIdTracking(seenGroupIds);
}
@Override
public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) {
state.toIntermediate(blocks, offset, selected, driverContext);
}
@Override
public void evaluateFinal(Block[] blocks, int offset, IntVector selected,
GroupingAggregatorEvaluationContext ctx) {
blocks[offset] = DeltaIntAggregator.evaluateFinal(state, selected, ctx);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getClass().getSimpleName()).append("[");
sb.append("channels=").append(channels);
sb.append("]");
return sb.toString();
}
@Override
public void close() {
state.close();
}
}
| DeltaIntGroupingAggregatorFunction |
java | apache__flink | flink-core/src/main/java/org/apache/flink/configuration/ReadableConfig.java | {
"start": 1159,
"end": 2418
} | interface ____ {
/**
* Reads a value using the metadata included in {@link ConfigOption}. Returns the {@link
* ConfigOption#defaultValue()} if value key not present in the configuration.
*
* @param option metadata of the option to read
* @param <T> type of the value to read
* @return read value or {@link ConfigOption#defaultValue()} if not found
* @see #getOptional(ConfigOption)
*/
<T> T get(ConfigOption<T> option);
/**
* Reads a value using the metadata included in {@link ConfigOption}. In contrast to {@link
* #get(ConfigOption)} returns {@link Optional#empty()} if value not present.
*
* @param option metadata of the option to read
* @param <T> type of the value to read
* @return read value or {@link Optional#empty()} if not found
* @see #get(ConfigOption)
*/
<T> Optional<T> getOptional(ConfigOption<T> option);
/**
* Converts the configuration items into a map of string key-value pairs.
*
* @return a map containing the configuration properties, where the keys are strings and the
* values are the corresponding configuration values in string format.
*/
@Internal
Map<String, String> toMap();
}
| ReadableConfig |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/jsontype/jdk/AbstractContainerTypingTest.java | {
"start": 1721,
"end": 2899
} | class ____ extends LinkedList<String> implements IDataValueList { }
/*
/**********************************************************
/* Test methods
/**********************************************************
*/
private final ObjectMapper MAPPER = newJsonMapper();
@Test
public void testAbstractLists() throws Exception
{
ListWrapper w = new ListWrapper();
w.list.add("x");
String json = MAPPER.writeValueAsString(w);
Object o = MAPPER.readValue(json, ListWrapper.class);
assertEquals(ListWrapper.class, o.getClass());
ListWrapper out = (ListWrapper) o;
assertNotNull(out.list);
assertEquals(1, out.list.size());
assertEquals("x", out.list.get(0));
}
@Test
public void testAbstractMaps() throws Exception
{
MapWrapper w = new MapWrapper();
w.map.put("key1", "name1");
String json = MAPPER.writeValueAsString(w);
Object o = MAPPER.readValue(json, MapWrapper.class);
assertEquals(MapWrapper.class, o.getClass());
MapWrapper out = (MapWrapper) o;
assertEquals(1, out.map.size());
}
}
| DataValueList |
java | elastic__elasticsearch | modules/lang-painless/src/main/java/org/elasticsearch/painless/LambdaBootstrap.java | {
"start": 5771,
"end": 6096
} | class ____ {
private final String name;
private final Type type;
private final String desc;
/**
* Converts incoming parameters into the name, type, and
* descriptor for the captured argument.
* @param count The captured argument count
* @param type The | Capture |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/web/configurers/oauth2/client/OidcLogoutConfigurerTests.java | {
"start": 16089,
"end": 16853
} | class ____ {
@Autowired(required = false)
MockWebServer web;
@Bean
ClientRegistration clientRegistration() {
if (this.web == null) {
return TestClientRegistrations.clientRegistration().build();
}
String issuer = this.web.url("/").toString();
return TestClientRegistrations.clientRegistration()
.issuerUri(issuer)
.jwkSetUri(issuer + "jwks")
.tokenUri(issuer + "token")
.userInfoUri(issuer + "user")
.scope("openid")
.build();
}
@Bean
ClientRegistrationRepository clientRegistrationRepository(ClientRegistration clientRegistration) {
return new InMemoryClientRegistrationRepository(clientRegistration);
}
}
@Configuration
@EnableWebSecurity
@Import(RegistrationConfig.class)
static | RegistrationConfig |
java | micronaut-projects__micronaut-core | context/src/main/java/io/micronaut/scheduling/processor/ScheduledMethodProcessor.java | {
"start": 2504,
"end": 11568
} | class ____ implements ExecutableMethodProcessor<Scheduled>, Closeable {
private static final Logger LOG = LoggerFactory.getLogger(TaskScheduler.class);
private static final String MEMBER_FIXED_RATE = "fixedRate";
private static final String MEMBER_INITIAL_DELAY = "initialDelay";
private static final String MEMBER_CRON = "cron";
private static final String MEMBER_ZONE_ID = "zoneId";
private static final String MEMBER_FIXED_DELAY = "fixedDelay";
private static final String MEMBER_SCHEDULER = "scheduler";
private static final String MEMBER_CONDITION = "condition";
private final BeanContext beanContext;
private final ConversionService conversionService;
private final Queue<ScheduledFuture<?>> scheduledTasks = new ConcurrentLinkedDeque<>();
private final List<ScheduledDefinition<?>> scheduledMethodsDefinitions = new ArrayList<>();
private final TaskExceptionHandler<?, ?> taskExceptionHandler;
/**
* @param beanContext The bean context for DI of beans annotated with @Inject
* @param conversionService To convert one type to another
* @param taskExceptionHandler The default task exception handler
*/
public ScheduledMethodProcessor(BeanContext beanContext, ConversionService conversionService, TaskExceptionHandler<?, ?> taskExceptionHandler) {
this.beanContext = beanContext;
this.conversionService = conversionService;
this.taskExceptionHandler = taskExceptionHandler;
}
@Override
public <B> void process(BeanDefinition<B> beanDefinition, ExecutableMethod<B, ?> method) {
scheduledMethodsDefinitions.add(new ScheduledDefinition(beanDefinition, method));
}
/**
* On startup event listener that schedules the active tasks.
* @param ignore The startup event.
*/
@EventListener
void scheduleTasks(StartupEvent ignore) {
scheduledMethodsDefinitions.parallelStream().forEach(this::scheduleTask);
scheduledMethodsDefinitions.clear();
}
private <B> void scheduleTask(ScheduledDefinition<B> scheduledDefinition) {
ExecutableMethod<B, ?> method = scheduledDefinition.method();
BeanDefinition<B> beanDefinition = scheduledDefinition.definition();
List<AnnotationValue<Scheduled>> scheduledAnnotations = method.getAnnotationValuesByType(Scheduled.class);
for (AnnotationValue<Scheduled> scheduledAnnotation : scheduledAnnotations) {
String fixedRate = scheduledAnnotation.stringValue(MEMBER_FIXED_RATE).orElse(null);
String initialDelayStr = scheduledAnnotation.stringValue(MEMBER_INITIAL_DELAY).orElse(null);
Duration initialDelay = null;
if (StringUtils.hasText(initialDelayStr)) {
initialDelay = conversionService.convert(initialDelayStr, Duration.class).orElseThrow(() ->
new SchedulerConfigurationException(method, "Invalid initial delay definition: " + initialDelayStr)
);
}
String scheduler = scheduledAnnotation.stringValue(MEMBER_SCHEDULER).orElse(TaskExecutors.SCHEDULED);
Optional<TaskScheduler> optionalTaskScheduler = beanContext
.findBean(TaskScheduler.class, Qualifiers.byName(scheduler));
if (optionalTaskScheduler.isEmpty()) {
optionalTaskScheduler = beanContext.findBean(ExecutorService.class, Qualifiers.byName(scheduler))
.filter(ScheduledExecutorService.class::isInstance)
.map(ScheduledExecutorTaskScheduler::new);
}
TaskScheduler taskScheduler = optionalTaskScheduler.orElseThrow(() -> new SchedulerConfigurationException(method, "No scheduler of type TaskScheduler configured for name: " + scheduler));
Runnable task = () -> {
try {
ExecutableBeanContextBinder binder = new DefaultExecutableBeanContextBinder();
BoundExecutable<B, ?> boundExecutable = binder.bind(method, beanContext);
B bean = beanContext.getBean(beanDefinition);
AnnotationValue<Scheduled> finalAnnotationValue = scheduledAnnotation;
if (finalAnnotationValue instanceof EvaluatedAnnotationValue<Scheduled> evaluated) {
finalAnnotationValue = evaluated.withArguments(bean, boundExecutable.getBoundArguments());
}
boolean shouldRun = finalAnnotationValue.booleanValue(MEMBER_CONDITION).orElse(true);
if (shouldRun) {
try {
boundExecutable.invoke(bean);
} catch (Throwable e) {
handleException(beanDefinition.getBeanType(), bean, e);
}
}
} catch (NoSuchBeanException noSuchBeanException) {
// ignore: a timing issue can occur when the context is being shutdown. If a scheduled job runs and the context
// is shutdown and available beans cleared then the bean is no longer available. The best thing to do here is just ignore the failure.
LOG.debug("Scheduled job skipped for context shutdown: {}.{}", beanDefinition.getBeanType().getSimpleName(), method.getDescription(true));
} catch (Exception e) {
TaskExceptionHandler<B, Throwable> finalHandler = findHandler(beanDefinition.getBeanType(), e);
finalHandler.handleCreationFailure(beanDefinition, e);
}
};
String cronExpr = scheduledAnnotation.stringValue(MEMBER_CRON).orElse(null);
String zoneIdStr = scheduledAnnotation.stringValue(MEMBER_ZONE_ID).orElse(null);
String fixedDelay = scheduledAnnotation.stringValue(MEMBER_FIXED_DELAY).orElse(null);
if (StringUtils.isNotEmpty(cronExpr)) {
if (LOG.isDebugEnabled()) {
LOG.debug("Scheduling cron task [{}] for method: {}", cronExpr, method);
}
ScheduledFuture<?> scheduledFuture = taskScheduler.schedule(cronExpr, zoneIdStr, task);
scheduledTasks.add(scheduledFuture);
} else if (StringUtils.isNotEmpty(fixedRate)) {
Optional<Duration> converted = conversionService.convert(fixedRate, Duration.class);
Duration duration = converted.orElseThrow(() ->
new SchedulerConfigurationException(method, "Invalid fixed rate definition: " + fixedRate)
);
if (LOG.isDebugEnabled()) {
LOG.debug("Scheduling fixed rate task [{}] for method: {}", duration, method);
}
ScheduledFuture<?> scheduledFuture = taskScheduler.scheduleAtFixedRate(initialDelay, duration, task);
scheduledTasks.add(scheduledFuture);
} else if (StringUtils.isNotEmpty(fixedDelay)) {
Optional<Duration> converted = conversionService.convert(fixedDelay, Duration.class);
Duration duration = converted.orElseThrow(() ->
new SchedulerConfigurationException(method, "Invalid fixed delay definition: " + fixedDelay)
);
if (LOG.isDebugEnabled()) {
LOG.debug("Scheduling fixed delay task [{}] for method: {}", duration, method);
}
ScheduledFuture<?> scheduledFuture = taskScheduler.scheduleWithFixedDelay(initialDelay, duration, task);
scheduledTasks.add(scheduledFuture);
} else if (initialDelay != null) {
ScheduledFuture<?> scheduledFuture = taskScheduler.schedule(initialDelay, task);
scheduledTasks.add(scheduledFuture);
} else {
throw new SchedulerConfigurationException(method, "Failed to schedule task. Invalid definition");
}
}
}
private <B> void handleException(Class<B> beanType, B bean, Throwable e) {
TaskExceptionHandler<B, Throwable> finalHandler = findHandler(beanType, e);
finalHandler.handle(bean, e);
}
@SuppressWarnings("unchecked")
private <B> TaskExceptionHandler<B, Throwable> findHandler(Class<B> beanType, Throwable e) {
return beanContext.findBean(Argument.of(TaskExceptionHandler.class, beanType, e.getClass()))
.orElse(this.taskExceptionHandler);
}
@Override
@PreDestroy
public void close() {
try {
for (ScheduledFuture<?> scheduledTask : scheduledTasks) {
if (!scheduledTask.isCancelled()) {
scheduledTask.cancel(false);
}
}
} finally {
scheduledTasks.clear();
}
}
private record ScheduledDefinition<B>(BeanDefinition<B> definition,
ExecutableMethod<B, ?> method) {
}
}
| ScheduledMethodProcessor |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/ThreadJoinLoopTest.java | {
"start": 7680,
"end": 11652
} | class ____ {
public void basicCase(Thread thread) throws InterruptedException {
thread.join();
}
public void inIf(Thread thread) {
try {
if (7 == 7) {
thread.join();
}
} catch (InterruptedException e) {
e.printStackTrace();
}
}
public void basicCaseTry(Thread thread) {
try {
thread.join();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
public void basicCaseWhile(Thread thread, List<String> list) {
while (list.size() == 7) {
try {
thread.join();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
public void basicCaseFor(Thread thread, List<String> list) {
for (int i = 0; i < list.size(); i++) {
try {
thread.join();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
public void basicCaseForEach(Thread thread, List<String> list) {
for (String str : list) {
try {
thread.join();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
public void multipleCatches(Thread thread, int[] arr) {
try {
thread.join();
int test = arr[10];
} catch (ArrayIndexOutOfBoundsException e) {
// ignore
} catch (InterruptedException e) {
System.out.println("test");
}
}
public void fullInterruptedFullException(Thread thread) {
try {
thread.join();
} catch (InterruptedException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
}
public void justException(Thread thread) {
try {
thread.join();
} catch (Exception e) {
e.printStackTrace();
}
}
public void multipleMethodInvocations(Thread thread, Thread thread2) {
try {
thread.join();
thread2.join();
} catch (Exception e) {
e.printStackTrace();
}
}
public void tryFinally(Thread thread) {
try {
thread.join();
} catch (InterruptedException e) {
// ignore
} finally {
System.out.println("test finally");
}
}
public void tryAssigningThread(Thread thread) {
while (true) {
try {
thread.join();
thread = null;
} catch (InterruptedException e) {
// ignore
}
}
}
}\
""")
.doTest();
}
@Test
public void fixes() {
BugCheckerRefactoringTestHelper.newInstance(ThreadJoinLoop.class, getClass())
.addInputLines(
"ThreadJoinLoopPositiveCases.java",
"""
package com.google.errorprone.bugpatterns.testdata;
/**
* @author mariasam@google.com (Maria Sam) on 7/10/17.
*/
| ThreadJoinLoopNegativeCases |
java | apache__logging-log4j2 | log4j-perf-test/src/main/java/org/apache/logging/log4j/perf/jmh/InstantFormatBenchmark.java | {
"start": 6093,
"end": 7247
} | class ____ implements Formatter {
private final Instant[] instants;
private final StringBuilder stringBuilder = new StringBuilder();
private final DateTimeFormatter dateTimeFormatter;
private JavaDateTimeFormatter(final String pattern) {
this.instants = INSTANTS;
this.dateTimeFormatter =
DateTimeFormatter.ofPattern(pattern, LOCALE).withZone(TIME_ZONE.toZoneId());
}
@Override
public void benchmark(final Blackhole blackhole) {
for (final Instant instant : instants) {
stringBuilder.setLength(0);
dateTimeFormatter.formatTo(instant, stringBuilder);
blackhole.consume(stringBuilder.length());
}
}
}
@Benchmark
public void log4jFdf(final Blackhole blackhole) {
log4jFdf.benchmark(blackhole);
}
@Benchmark
public void commonsFdf(final Blackhole blackhole) {
commonsFdf.benchmark(blackhole);
}
@Benchmark
public void javaDtf(final Blackhole blackhole) {
javaDtf.benchmark(blackhole);
}
}
| JavaDateTimeFormatter |
java | micronaut-projects__micronaut-core | core/src/main/java/io/micronaut/core/annotation/AnnotationValue.java | {
"start": 52687,
"end": 52761
} | enum ____
* @param rawValue The raw value
* @param <E> The | type |
java | spring-projects__spring-data-jpa | spring-data-jpa/src/test/java/org/springframework/data/jpa/repository/sample/UserRepositoryCustom.java | {
"start": 855,
"end": 1078
} | interface ____ {
/**
* Method actually triggering a finder but being overridden.
*/
void findByOverridingMethod();
/**
* Some custom method to implement.
*/
void someCustomMethod(User user);
}
| UserRepositoryCustom |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/odps/OdpsListTest.java | {
"start": 281,
"end": 3078
} | class ____ extends TestCase {
public void test_0() throws Exception {
String sql = "list roles";
OdpsStatementParser parser = new OdpsStatementParser(sql);
SQLStatement stmt = parser.parseStatementList().get(0);
parser.match(Token.EOF);
String output = SQLUtils.toOdpsString(stmt);
// System.out.println(output);
assertEquals("LIST roles", output);
}
public void test_1() throws Exception {
String sql = "list users";
OdpsStatementParser parser = new OdpsStatementParser(sql);
SQLStatement stmt = parser.parseStatementList().get(0);
parser.match(Token.EOF);
String output = SQLUtils.toOdpsString(stmt);
// System.out.println(output);
assertEquals("LIST users", output);
}
public void test_2() throws Exception {
String sql = "list functions";
OdpsStatementParser parser = new OdpsStatementParser(sql);
SQLStatement stmt = parser.parseStatementList().get(0);
parser.match(Token.EOF);
String output = SQLUtils.toOdpsString(stmt);
// System.out.println(output);
assertEquals("LIST functions", output);
}
public void test_3() throws Exception {
String sql = "list resources";
OdpsStatementParser parser = new OdpsStatementParser(sql);
SQLStatement stmt = parser.parseStatementList().get(0);
parser.match(Token.EOF);
String output = SQLUtils.toOdpsString(stmt);
// System.out.println(output);
assertEquals("LIST resources", output);
}
public void test_4() throws Exception {
String sql = "list accountproviders";
OdpsStatementParser parser = new OdpsStatementParser(sql);
SQLStatement stmt = parser.parseStatementList().get(0);
parser.match(Token.EOF);
String output = SQLUtils.toOdpsString(stmt);
// System.out.println(output);
assertEquals("LIST accountproviders", output);
}
public void test_5() throws Exception {
String sql = "list jobs";
OdpsStatementParser parser = new OdpsStatementParser(sql);
SQLStatement stmt = parser.parseStatementList().get(0);
parser.match(Token.EOF);
String output = SQLUtils.toOdpsString(stmt);
// System.out.println(output);
assertEquals("LIST jobs", output);
}
public void test_6() throws Exception {
String sql = "list trustedprojects";
OdpsStatementParser parser = new OdpsStatementParser(sql);
SQLStatement stmt = parser.parseStatementList().get(0);
parser.match(Token.EOF);
String output = SQLUtils.toOdpsString(stmt);
// System.out.println(output);
assertEquals("LIST trustedprojects", output);
}
}
| OdpsListTest |
java | quarkusio__quarkus | integration-tests/hibernate-orm-compatibility-5.6/database-generator/src/main/java/io/quarkus/it/hibernate/compatibility/MyEntity.java | {
"start": 460,
"end": 861
} | class ____ {
@Id
@GeneratedValue
public Long id;
public Duration duration;
public UUID uuid;
public Instant instant;
public OffsetTime offsetTime;
public OffsetDateTime offsetDateTime;
public ZonedDateTime zonedDateTime;
public int[] intArray;
public ArrayList<String> stringList;
@Enumerated(EnumType.ORDINAL)
public MyEnum myEnum;
}
| MyEntity |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/SequenceFileInputFilter.java | {
"start": 2521,
"end": 2570
} | class ____ Filters
*/
public static abstract | for |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/flowable/FlowablePublishMulticast.java | {
"start": 2454,
"end": 3693
} | class ____<R> implements FlowableSubscriber<R>, Subscription {
final Subscriber<? super R> downstream;
final MulticastProcessor<?> processor;
Subscription upstream;
OutputCanceller(Subscriber<? super R> actual, MulticastProcessor<?> processor) {
this.downstream = actual;
this.processor = processor;
}
@Override
public void onSubscribe(Subscription s) {
if (SubscriptionHelper.validate(this.upstream, s)) {
this.upstream = s;
downstream.onSubscribe(this);
}
}
@Override
public void onNext(R t) {
downstream.onNext(t);
}
@Override
public void onError(Throwable t) {
downstream.onError(t);
processor.dispose();
}
@Override
public void onComplete() {
downstream.onComplete();
processor.dispose();
}
@Override
public void request(long n) {
upstream.request(n);
}
@Override
public void cancel() {
upstream.cancel();
processor.dispose();
}
}
static final | OutputCanceller |
java | google__auto | value/src/test/java/com/google/auto/value/processor/ExtensionTest.java | {
"start": 44955,
"end": 45383
} | interface ____<T, BuilderT, BuiltT> {",
" BuilderT setThing(T x);",
" BuiltT build();",
" }",
"}");
JavaFileObject autoValueClass =
JavaFileObjects.forSourceLines(
"foo.bar.Baz",
"package foo.bar;",
"",
"import com.google.auto.value.AutoValue;",
"",
"@AutoValue",
"abstract | Builder |
java | google__guice | core/test/com/google/inject/RestrictedBindingSourceTest.java | {
"start": 2975,
"end": 3095
} | interface ____ {
int getNextHopIpAddress(int destinationIpAddress);
}
@NetworkLibrary
private static | RoutingTable |
java | google__dagger | javatests/dagger/internal/codegen/XExecutableTypesTest.java | {
"start": 1332,
"end": 1584
} | class ____ {",
" void p(String s) { throw new RuntimeException(); }",
"}");
Source bar =
CompilerTests.javaSource(
"test.Bar",
"package test;",
"import java.util.*;",
" | Foo |
java | resilience4j__resilience4j | resilience4j-spring/src/main/java/io/github/resilience4j/retry/configure/RetryAspect.java | {
"start": 7650,
"end": 9680
} | interface ____ is acting as a proxy");
return AnnotationExtractor
.extractAnnotationFromProxy(proceedingJoinPoint.getTarget(), Retry.class);
} else {
return AnnotationExtractor
.extract(proceedingJoinPoint.getTarget().getClass(), Retry.class);
}
}
/**
* @param proceedingJoinPoint the AOP logic joint point
* @param retry the configured sync retry
* @return the result object if any
* @throws Throwable
*/
private Object handleDefaultJoinPoint(ProceedingJoinPoint proceedingJoinPoint,
io.github.resilience4j.retry.Retry retry) throws Throwable {
return retry.executeCheckedSupplier(proceedingJoinPoint::proceed);
}
/**
* @param proceedingJoinPoint the AOP logic joint point
* @param retry the configured async retry
* @return the result object if any
*/
@SuppressWarnings("unchecked")
private Object handleJoinPointCompletableFuture(ProceedingJoinPoint proceedingJoinPoint,
io.github.resilience4j.retry.Retry retry) {
return retry.executeCompletionStage(retryExecutorService, () -> {
try {
return (CompletionStage<Object>) proceedingJoinPoint.proceed();
} catch (Throwable throwable) {
throw new CompletionException(throwable);
}
});
}
@Override
public int getOrder() {
return retryConfigurationProperties.getRetryAspectOrder();
}
@Override
public void close() throws Exception {
retryExecutorService.shutdown();
try {
if (!retryExecutorService.awaitTermination(5, TimeUnit.SECONDS)) {
retryExecutorService.shutdownNow();
}
} catch (InterruptedException e) {
if (!retryExecutorService.isTerminated()) {
retryExecutorService.shutdownNow();
}
Thread.currentThread().interrupt();
}
}
}
| which |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/util/ReflectionUtilsTests.java | {
"start": 11185,
"end": 11793
} | class ____ extends Parent {
@Override
public Integer m1() {
return 42;
}
}
Method[] methods = ReflectionUtils.getUniqueDeclaredMethods(Leaf.class);
assertThat(methods).extracting(Method::getName).filteredOn("m1"::equals).hasSize(1);
assertThat(methods).contains(Leaf.class.getMethod("m1"));
assertThat(methods).doesNotContain(Parent.class.getMethod("m1"));
}
@Test
void getDeclaredMethodsReturnsCopy() {
Method[] m1 = ReflectionUtils.getDeclaredMethods(A.class);
Method[] m2 = ReflectionUtils.getDeclaredMethods(A.class);
assertThat(m1). isNotSameAs(m2);
}
private static | Leaf |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationAllPermitsAcquisitionTests.java | {
"start": 28325,
"end": 30605
} | class ____ extends TestAction {
private final TimeValue timeout = TimeValue.timeValueSeconds(30L);
AllPermitsThenBlockAction(
Settings settings,
String actionName,
TransportService transportService,
ClusterService clusterService,
ShardStateAction shardStateAction,
ThreadPool threadPool,
ShardId shardId,
IndexShard primary,
IndexShard replica
) {
super(
settings,
actionName,
transportService,
clusterService,
shardStateAction,
threadPool,
shardId,
primary,
replica,
new SetOnce<>()
);
}
@Override
protected void acquirePrimaryOperationPermit(IndexShard shard, Request request, ActionListener<Releasable> onAcquired) {
shard.acquireAllPrimaryOperationsPermits(onAcquired, timeout);
}
@Override
protected void acquireReplicaOperationPermit(
IndexShard shard,
Request request,
ActionListener<Releasable> onAcquired,
long primaryTerm,
long globalCheckpoint,
long maxSeqNo
) {
shard.acquireAllReplicaOperationsPermits(primaryTerm, globalCheckpoint, maxSeqNo, onAcquired, timeout);
}
@Override
protected void shardOperationOnPrimary(
Request shardRequest,
IndexShard shard,
ActionListener<PrimaryResult<Request, Response>> listener
) {
assertEquals("All permits must be acquired", IndexShard.OPERATIONS_BLOCKED, shard.getActiveOperationsCount());
super.shardOperationOnPrimary(shardRequest, shard, listener);
}
@Override
protected void shardOperationOnReplica(Request shardRequest, IndexShard replica, ActionListener<ReplicaResult> listener) {
assertEquals("All permits must be acquired", IndexShard.OPERATIONS_BLOCKED, replica.getActiveOperationsCount());
super.shardOperationOnReplica(shardRequest, replica, listener);
}
}
static | AllPermitsThenBlockAction |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/builtin/jodatime/bean/LocalDateTimeBean.java | {
"start": 282,
"end": 550
} | class ____ {
private LocalDateTime localDateTime;
public LocalDateTime getLocalDateTime() {
return localDateTime;
}
public void setLocalDateTime(LocalDateTime localDateTime) {
this.localDateTime = localDateTime;
}
}
| LocalDateTimeBean |
java | elastic__elasticsearch | x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/schema/TransformConfigTests.java | {
"start": 761,
"end": 1349
} | class ____ extends AbstractSchemaValidationTestCase<TransformConfig> {
protected static Params TO_XCONTENT_PARAMS = new ToXContent.MapParams(
Collections.singletonMap(TransformField.EXCLUDE_GENERATED, "true")
);
@Override
protected TransformConfig createTestInstance() {
return randomTransformConfig();
}
@Override
protected String getJsonSchemaFileName() {
return "transform_config.schema.json";
}
@Override
protected ToXContent.Params getToXContentParams() {
return TO_XCONTENT_PARAMS;
}
}
| TransformConfigTests |
java | quarkusio__quarkus | extensions/websockets-next/deployment/src/test/java/io/quarkus/websockets/next/test/telemetry/endpoints/onbinarymessage/SingleTextReceived_UniTextResponse_Endpoint.java | {
"start": 274,
"end": 468
} | class ____ {
@OnBinaryMessage
public Uni<String> onMessage(String message) {
return Uni.createFrom().item("echo 0: " + message);
}
}
| SingleTextReceived_UniTextResponse_Endpoint |
java | spring-projects__spring-framework | spring-r2dbc/src/main/java/org/springframework/r2dbc/core/DataClassRowMapper.java | {
"start": 1516,
"end": 1903
} | class ____ {@code static} nested class, and it may expose either
* a <em>data class</em> constructor with named parameters corresponding to column
* names or classic bean property setter methods with property names corresponding
* to column names (or even a combination of both).
*
* <p>The term "data class" applies to Java <em>records</em>, Kotlin <em>data
* classes</em>, and any | or |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.