language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/SecondaryAuthentication.java | {
"start": 941,
"end": 1113
} | class ____ a representation of that secondary user that can be activated in the security context while processing specific blocks
* of code or within a listener.
*/
public | is |
java | apache__flink | flink-connectors/flink-connector-files/src/main/java/org/apache/flink/connector/file/src/AbstractFileSource.java | {
"start": 2361,
"end": 2749
} | class ____ File Sources. The main implementation to use is the {@link FileSource}, which
* also has the majority of the documentation.
*
* <p>To read new formats, one commonly does NOT need to extend this class, but should implement a
* new Format Reader (like {@link StreamFormat}, {@link BulkFormat} and use it with the {@code
* FileSource}.
*
* <p>The only reason to extend this | for |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/RestEndpointComponentBuilderFactory.java | {
"start": 8861,
"end": 10389
} | class ____
extends AbstractComponentBuilder<RestComponent>
implements RestEndpointComponentBuilder {
@Override
protected RestComponent buildConcreteComponent() {
return new RestComponent();
}
@Override
protected boolean setPropertyOnComponent(
Component component,
String name,
Object value) {
switch (name) {
case "bridgeErrorHandler": ((RestComponent) component).setBridgeErrorHandler((boolean) value); return true;
case "consumerComponentName": ((RestComponent) component).setConsumerComponentName((java.lang.String) value); return true;
case "apiDoc": ((RestComponent) component).setApiDoc((java.lang.String) value); return true;
case "host": ((RestComponent) component).setHost((java.lang.String) value); return true;
case "lazyStartProducer": ((RestComponent) component).setLazyStartProducer((boolean) value); return true;
case "producerComponentName": ((RestComponent) component).setProducerComponentName((java.lang.String) value); return true;
case "autowiredEnabled": ((RestComponent) component).setAutowiredEnabled((boolean) value); return true;
case "headerFilterStrategy": ((RestComponent) component).setHeaderFilterStrategy((org.apache.camel.spi.HeaderFilterStrategy) value); return true;
default: return false;
}
}
}
} | RestEndpointComponentBuilderImpl |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/operators/testutils/TestData.java | {
"start": 2005,
"end": 2133
} | class ____ not be instantiated) */
private TestData() {}
/** Tuple2<Integer, String> generator. */
public static | should |
java | elastic__elasticsearch | x-pack/plugin/identity-provider/src/internalClusterTest/java/org/elasticsearch/xpack/idp/saml/test/IdentityProviderIntegTestCase.java | {
"start": 3388,
"end": 13260
} | class ____ extends ESIntegTestCase {
// Local Security Cluster user
public static final String SAMPLE_USER_NAME = "es_user";
public static final String SAMPLE_USER_PASSWORD = "es_user_password";
public static final String SAMPLE_USER_PASSWORD_HASHED = new String(
Hasher.resolve("bcrypt9").hash(new SecureString(SAMPLE_USER_PASSWORD.toCharArray()))
);
public static final String SAMPLE_USER_ROLE = "es_user_role";
// User that is authenticated to the Security Cluster in order to perform SSO to cloud resources
public static final String SAMPLE_IDPUSER_NAME = "idp_user";
public static final String SAMPLE_IDPUSER_PASSWORD = "idp_user_password";
public static final String SAMPLE_IDPUSER_PASSWORD_HASHED = new String(
Hasher.resolve("bcrypt9").hash(new SecureString(SAMPLE_IDPUSER_PASSWORD.toCharArray()))
);
public static final String SAMPLE_IDPUSER_ROLE = "idp_user_role";
// Cloud console user that calls all IDP related APIs
public static final String CONSOLE_USER_NAME = "console_user";
public static final String CONSOLE_USER_PASSWORD = "console_user_password";
public static final String CONSOLE_USER_PASSWORD_HASHED = new String(
Hasher.resolve("bcrypt9").hash(new SecureString(CONSOLE_USER_PASSWORD.toCharArray()))
);
public static final String CONSOLE_USER_ROLE = "console_user_role";
public static final String SP_ENTITY_ID = "ec:abcdef:123456";
public static final RequestOptions REQUEST_OPTIONS_AS_CONSOLE_USER = RequestOptions.DEFAULT.toBuilder()
.addHeader("Authorization", basicAuthHeaderValue(CONSOLE_USER_NAME, new SecureString(CONSOLE_USER_PASSWORD.toCharArray())))
.build();
private static Path PARENT_DIR;
@BeforeClass
public static void setup() {
PARENT_DIR = createTempDir();
}
@Override
protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) {
final Path home = dataPath(PARENT_DIR, nodeOrdinal);
final Path xpackConf = home.resolve("config");
try {
Files.createDirectories(xpackConf);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
writeFile(xpackConf, "roles.yml", configRoles());
writeFile(xpackConf, "users", configUsers());
writeFile(xpackConf, "users_roles", configUsersRoles());
Settings.Builder builder = Settings.builder()
.put(super.nodeSettings(nodeOrdinal, otherSettings))
.put(XPackSettings.SECURITY_ENABLED.getKey(), true)
.put(NetworkModule.TRANSPORT_TYPE_KEY, SecurityField.NAME4)
.put(NetworkModule.HTTP_TYPE_KEY, SecurityField.NAME4)
.put("xpack.idp.enabled", true)
.put(IDP_ENTITY_ID.getKey(), "urn:elastic:cloud:idp")
.put(IDP_SSO_REDIRECT_ENDPOINT.getKey(), "https://idp.org/sso/redirect")
.put(IDP_ORGANIZATION_NAME.getKey(), "Identity Provider")
.put(IDP_ORGANIZATION_DISPLAY_NAME.getKey(), "Identity Provider")
.put(IDP_ORGANIZATION_URL.getKey(), "https://idp.org")
.put(IDP_CONTACT_GIVEN_NAME.getKey(), "Tony")
.put(IDP_CONTACT_SURNAME.getKey(), "Stark")
.put(IDP_CONTACT_EMAIL.getKey(), "tony@starkindustries.com")
.put(APPLICATION_NAME_SETTING.getKey(), "elastic-cloud")
.put(NAMEID_FORMAT_SETTING.getKey(), TRANSIENT)
.put("xpack.idp.signing.key", resolveResourcePath("/keypair/keypair_RSA_2048.key"))
.put("xpack.idp.signing.certificate", resolveResourcePath("/keypair/keypair_RSA_2048.crt"))
.put("xpack.security.authc.realms." + FileRealmSettings.TYPE + ".file.order", 0)
.put("xpack.security.authc.realms." + NativeRealmSettings.TYPE + ".index.order", "1")
.put(XPackSettings.API_KEY_SERVICE_ENABLED_SETTING.getKey(), true)
.put("xpack.license.self_generated.type", "trial");
return builder.build();
}
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return List.of(LocalStateIdentityProviderPlugin.class, Netty4Plugin.class, CommonAnalysisPlugin.class);
}
@Override
protected boolean addMockTransportService() {
return false; // security has its own transport service
}
@Override
protected boolean addMockHttpTransport() {
return false; // enable http
}
@Override
protected Function<Client, Client> getClientWrapper() {
Map<String, String> headers = Collections.singletonMap(
"Authorization",
basicAuthHeaderValue(SAMPLE_USER_NAME, new SecureString(SAMPLE_USER_PASSWORD.toCharArray()))
);
// we need to wrap node clients because we do not specify a user for nodes and all requests will use the system
// user. This is ok for internal n2n stuff but the test framework does other things like wiping indices, repositories, etc
// that the system user cannot do. so we wrap the node client with a user that can do these things since the client() calls
// return a node client
return client -> asInstanceOf(NodeClient.class, client).filterWithHeader(headers);
}
@Override
protected Path nodeConfigPath(int nodeOrdinal) {
return dataPath(PARENT_DIR, nodeOrdinal).resolve("config");
}
private String configRoles() {
// test role allows for everything
// IDP end user doesn't need any privileges on the security cluster
// Could switch to grant apikey for user and call this as console_user
// Console user should be able to call all IDP related endpoints and register application privileges
return Strings.format("""
%s:
cluster: [ ALL ]
indices:
- names: '*'
allow_restricted_indices: true
privileges: [ ALL ]
%s:
cluster: ['cluster:admin/xpack/security/api_key/create']
indices: []
applications:
- application: elastic-cloud
resources: [ '%s' ]
privileges: [ 'sso:superuser' ]
%s:
cluster: ['cluster:admin/idp/*', 'cluster:admin/xpack/security/privilege/*' ]
indices: []
""", SAMPLE_USER_ROLE, SAMPLE_IDPUSER_ROLE, SP_ENTITY_ID, CONSOLE_USER_ROLE);
}
private String configUsers() {
return SAMPLE_USER_NAME
+ ":"
+ SAMPLE_USER_PASSWORD_HASHED
+ "\n"
+ SAMPLE_IDPUSER_NAME
+ ":"
+ SAMPLE_IDPUSER_PASSWORD_HASHED
+ "\n"
+ CONSOLE_USER_NAME
+ ":"
+ CONSOLE_USER_PASSWORD_HASHED
+ "\n";
}
private String configUsersRoles() {
return SAMPLE_USER_ROLE
+ ":"
+ SAMPLE_USER_NAME
+ "\n"
+ SAMPLE_IDPUSER_ROLE
+ ":"
+ SAMPLE_IDPUSER_NAME
+ "\n"
+ CONSOLE_USER_ROLE
+ ":"
+ CONSOLE_USER_NAME
+ "\n";
}
Path dataPath(Path confDir, final int nodeOrdinal) {
return confDir.resolve(getCurrentClusterScope() + "-" + nodeOrdinal);
}
protected Scope getCurrentClusterScope() {
return getCurrentClusterScope(this.getClass());
}
private static Scope getCurrentClusterScope(Class<?> clazz) {
ClusterScope annotation = getAnnotation(clazz);
return annotation == null ? Scope.SUITE : annotation.scope();
}
private static ClusterScope getAnnotation(Class<?> clazz) {
if (clazz == Object.class || clazz == IdentityProviderIntegTestCase.class) {
return null;
}
ClusterScope annotation = clazz.getAnnotation(ClusterScope.class);
if (annotation != null) {
return annotation;
}
return getAnnotation(clazz.getSuperclass());
}
private static String writeFile(Path folder, String name, byte[] content) {
final Path path = folder.resolve(name);
Path tempFile = null;
try {
tempFile = Files.createTempFile(path.getParent(), path.getFileName().toString(), "tmp");
try (OutputStream os = Files.newOutputStream(tempFile, CREATE, TRUNCATE_EXISTING, WRITE)) {
Streams.copy(content, os);
}
try {
Files.move(tempFile, path, REPLACE_EXISTING, ATOMIC_MOVE);
} catch (final AtomicMoveNotSupportedException e) {
Files.move(tempFile, path, REPLACE_EXISTING);
}
} catch (final IOException e) {
throw new UncheckedIOException(Strings.format("could not write file [%s]", path.toAbsolutePath()), e);
} finally {
// we are ignoring exceptions here, so we do not need handle whether or not tempFile was initialized nor if the file exists
IOUtils.deleteFilesIgnoringExceptions(tempFile);
}
return path.toAbsolutePath().toString();
}
private static String writeFile(Path folder, String name, String content) {
return writeFile(folder, name, content.getBytes(StandardCharsets.UTF_8));
}
private Path resolveResourcePath(String resourcePathToFile) {
try {
Path path = createTempFile();
try (InputStream resourceInput = IdentityProviderIntegTestCase.class.getResourceAsStream(resourcePathToFile)) {
Files.copy(resourceInput, path, StandardCopyOption.REPLACE_EXISTING);
}
return path;
} catch (IOException e) {
throw new ElasticsearchException("Failed to resolve resource (Path=[{}])", e, resourcePathToFile);
}
}
}
| IdentityProviderIntegTestCase |
java | apache__hadoop | hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsAHCHttpOperation.java | {
"start": 3030,
"end": 14273
} | class ____ extends AbfsHttpOperation {
private static final Logger LOG = LoggerFactory.getLogger(
AbfsAHCHttpOperation.class);
/**
* Request object for network call over ApacheHttpClient.
*/
private final HttpRequestBase httpRequestBase;
/**
* Response object received from a server call over ApacheHttpClient.
*/
private HttpResponse httpResponse;
/**
* Flag to indicate if the request is a payload request. HTTP methods PUT, POST,
* PATCH qualify for payload requests.
*/
private final boolean isPayloadRequest;
/**
* ApacheHttpClient to make network calls.
*/
private final AbfsApacheHttpClient abfsApacheHttpClient;
/**
* Timeout in milliseconds that defines maximum allowed time to execute operation.
* This timeout starts when execution starts and includes E2E processing time of request.
* This is based on tail latency observed in the system.
*/
private final long tailLatencyTimeout;
public AbfsAHCHttpOperation(final URL url,
final String method,
final List<AbfsHttpHeader> requestHeaders,
final Duration connectionTimeout,
final Duration readTimeout,
final long tailLatencyTimeout,
final AbfsApacheHttpClient abfsApacheHttpClient,
final AbfsClient abfsClient) throws IOException {
super(LOG, url, method, requestHeaders, connectionTimeout, readTimeout,
abfsClient);
this.isPayloadRequest = HTTP_METHOD_PUT.equals(method)
|| HTTP_METHOD_PATCH.equals(method)
|| HTTP_METHOD_POST.equals(method);
this.abfsApacheHttpClient = abfsApacheHttpClient;
this.tailLatencyTimeout = tailLatencyTimeout;
LOG.debug("Creating AbfsAHCHttpOperation for URL: {}, method: {}",
url, method);
final URI requestUri;
try {
requestUri = url.toURI();
} catch (URISyntaxException e) {
throw new IOException(e);
}
switch (getMethod()) {
case HTTP_METHOD_PUT:
httpRequestBase = new HttpPut(requestUri);
break;
case HTTP_METHOD_PATCH:
httpRequestBase = new HttpPatch(requestUri);
break;
case HTTP_METHOD_POST:
httpRequestBase = new HttpPost(requestUri);
break;
case HTTP_METHOD_GET:
httpRequestBase = new HttpGet(requestUri);
break;
case HTTP_METHOD_DELETE:
httpRequestBase = new HttpDelete(requestUri);
break;
case HTTP_METHOD_HEAD:
httpRequestBase = new HttpHead(requestUri);
break;
default:
/*
* This would not happen as the AbfsClient would always be sending valid
* method.
*/
throw new PathIOException(getUrl().toString(),
"Unsupported HTTP method: " + getMethod());
}
// Set the request headers in the http request object.
// Earlier we were setting it just before sending the request.
// Setting here ensures that same header will get used while signing
// the request as well as validating the request at server's end.
for (AbfsHttpHeader header : requestHeaders) {
setRequestProperty(header.getName(), header.getValue());
}
}
/**
* @return AbfsManagedHttpClientContext instance that captures latencies at
* different phases of network call.
*/
@VisibleForTesting
AbfsManagedHttpClientContext getHttpClientContext() {
return new AbfsManagedHttpClientContext();
}
long getTailLatencyTimeout() {
return tailLatencyTimeout;
}
/**{@inheritDoc}*/
@Override
protected InputStream getErrorStream() throws IOException {
HttpEntity entity = httpResponse.getEntity();
if (entity == null) {
return null;
}
return entity.getContent();
}
/**{@inheritDoc}*/
@Override
String getConnProperty(final String key) {
Header header = httpRequestBase.getFirstHeader(key);
if (header == null) {
return null;
}
return header.getValue();
}
/**{@inheritDoc}*/
@Override
URL getConnUrl() {
return getUrl();
}
/**{@inheritDoc}*/
@Override
Integer getConnResponseCode() throws IOException {
return getStatusCode();
}
/**{@inheritDoc}*/
@Override
String getConnResponseMessage() throws IOException {
return getStatusDescription();
}
/**{@inheritDoc}*/
@Override
public void processResponse(final byte[] buffer,
final int offset,
final int length) throws IOException {
try {
if (!isPayloadRequest) {
LOG.debug("Sending request: {}", httpRequestBase);
httpResponse = executeRequest();
LOG.debug("Request sent: {}; response {}", httpRequestBase,
httpResponse);
}
parseResponseHeaderAndBody(buffer, offset, length);
} finally {
if (httpResponse != null) {
try {
EntityUtils.consume(httpResponse.getEntity());
} finally {
if (httpResponse instanceof CloseableHttpResponse) {
((CloseableHttpResponse) httpResponse).close();
}
}
}
}
}
/**
* Parse response stream for headers and body.
*
* @param buffer byte array to store response body.
* @param offset offset in the buffer to start storing the response body.
* @param length length of the response body.
*
* @throws IOException network error while read response stream
*/
@VisibleForTesting
void parseResponseHeaderAndBody(final byte[] buffer,
final int offset,
final int length) throws IOException {
setStatusCode(parseStatusCode(httpResponse));
setStatusDescription(httpResponse.getStatusLine().getReasonPhrase());
setRequestId();
// dump the headers
if (LOG.isDebugEnabled()) {
AbfsIoUtils.dumpHeadersToDebugLog("Request Headers",
getRequestProperties());
}
parseResponse(buffer, offset, length);
}
/**
* Parse status code from response
*
* @param httpResponse response object
* @return status code
*/
@VisibleForTesting
int parseStatusCode(HttpResponse httpResponse) {
return httpResponse.getStatusLine().getStatusCode();
}
/**
* Execute network call for the request
*
* @return response object
* @throws IOException network error while executing the request
*/
@VisibleForTesting
HttpResponse executeRequest() throws IOException {
AbfsManagedHttpClientContext abfsHttpClientContext
= getHttpClientContext();
try {
LOG.debug("Executing request: {}", httpRequestBase);
HttpResponse response = abfsApacheHttpClient.execute(httpRequestBase,
abfsHttpClientContext, getConnectionTimeout(), getReadTimeout(), getTailLatencyTimeout());
setConnectionTimeMs(abfsHttpClientContext.getConnectTime());
setSendRequestTimeMs(abfsHttpClientContext.getSendTime());
setRecvResponseTimeMs(abfsHttpClientContext.getReadTime());
return response;
} catch (IOException e) {
LOG.debug("Failed to execute request: {}", httpRequestBase, e);
throw e;
}
}
/**{@inheritDoc}*/
@Override
public void setRequestProperty(final String key, final String value) {
// Content-Length is managed by HttpClient for entity enclosing requests.
// Setting it manually can lead to protocol errors.
if (httpRequestBase instanceof HttpEntityEnclosingRequestBase
&& CONTENT_LENGTH.equals(key)) {
return;
}
httpRequestBase.setHeader(key, value);
}
/**{@inheritDoc}*/
@Override
Map<String, List<String>> getRequestProperties() {
Map<String, List<String>> map = new HashMap<>();
for (Header header : httpRequestBase.getAllHeaders()) {
map.put(header.getName(),
new ArrayList<String>() {{
add(header.getValue());
}});
}
return map;
}
/**{@inheritDoc}*/
@Override
public String getResponseHeader(final String headerName) {
if (httpResponse == null) {
return null;
}
Header header = httpResponse.getFirstHeader(headerName);
if (header == null) {
return null;
}
return header.getValue();
}
/**{@inheritDoc}*/
@Override
public Map<String, List<String>> getResponseHeaders() {
Map<String, List<String>> headers = new HashMap<>();
if (httpResponse == null) {
return headers;
}
for (Header header : httpResponse.getAllHeaders()) {
headers.computeIfAbsent(header.getName(), k -> new ArrayList<>())
.add(header.getValue());
}
return headers;
}
/**{@inheritDoc}*/
@Override
public String getResponseHeaderIgnoreCase(final String headerName) {
Map<String, List<String>> responseHeaders = getResponseHeaders();
if (responseHeaders == null || responseHeaders.isEmpty()) {
return null;
}
// Search for the header value case-insensitively
return responseHeaders.entrySet().stream()
.filter(entry -> entry.getKey() != null
&& entry.getKey().equalsIgnoreCase(headerName))
.flatMap(entry -> entry.getValue().stream())
.findFirst()
.orElse(null); // Return null if no match is found
}
/**{@inheritDoc}*/
@Override
protected InputStream getContentInputStream()
throws IOException {
if (httpResponse == null || httpResponse.getEntity() == null) {
return null;
}
return httpResponse.getEntity().getContent();
}
/**{@inheritDoc}*/
@Override
public void sendPayload(final byte[] buffer,
final int offset,
final int length)
throws IOException {
if (!isPayloadRequest) {
return;
}
setExpectedBytesToBeSent(length);
if (buffer != null) {
HttpEntity httpEntity = new ByteArrayEntity(buffer, offset, length,
TEXT_PLAIN);
((HttpEntityEnclosingRequestBase) httpRequestBase).setEntity(
httpEntity);
}
try {
LOG.debug("Sending request: {}", httpRequestBase);
httpResponse = executeRequest();
} catch (AbfsApacheHttpExpect100Exception ex) {
LOG.debug(
"Getting output stream failed with expect header enabled, returning back."
+ "Expect 100 assertion failed for uri {} with status code: {}",
getMaskedUrl(), parseStatusCode(ex.getHttpResponse()),
ex);
setConnectionDisconnectedOnError();
httpResponse = ex.getHttpResponse();
} catch (IOException ex) {
LOG.debug("Getting output stream failed for uri {}, exception: {}",
getMaskedUrl(), ex);
throw ex;
} finally {
if (httpResponse != null) {
LOG.debug("Request sent: {}; response {}", httpRequestBase,
httpResponse);
}
if (!isConnectionDisconnectedOnError()
&& httpRequestBase instanceof HttpEntityEnclosingRequestBase) {
setBytesSent(length);
}
}
}
/**{@inheritDoc}*/
@Override
public String getRequestProperty(String name) {
for (Header header : httpRequestBase.getAllHeaders()) {
if (header.getName().equals(name)) {
String val = header.getValue();
val = val == null ? EMPTY_STRING : val;
if (EMPTY_STRING.equals(val)) {
continue;
}
return val;
}
}
return EMPTY_STRING;
}
/**{@inheritDoc}*/
@Override
public String getTracingContextSuffix() {
return APACHE_IMPL;
}
}
| AbfsAHCHttpOperation |
java | apache__kafka | clients/src/test/java/org/apache/kafka/common/network/SslTransportLayerTest.java | {
"start": 76868,
"end": 83435
} | class ____ {
private Integer bufSizeOverride;
ResizeableBufferSize(Integer bufSizeOverride) {
this.bufSizeOverride = bufSizeOverride;
}
int updateAndGet(int actualSize, boolean update) {
int size = actualSize;
if (bufSizeOverride != null) {
if (update)
bufSizeOverride = Math.min(bufSizeOverride * 2, size);
size = bufSizeOverride;
}
return size;
}
}
}
/**
* SSLEngine implementations may transition from NEED_UNWRAP to NEED_UNWRAP
* even after reading all the data from the socket. This test ensures we
* continue unwrapping and not break early.
* Please refer <a href="https://issues.apache.org/jira/browse/KAFKA-16305">KAFKA-16305</a>
* for more information.
*/
@Test
public void testHandshakeUnwrapContinuesUnwrappingOnNeedUnwrapAfterAllBytesRead() throws IOException {
// Given
byte[] data = "ClientHello?".getBytes(StandardCharsets.UTF_8);
SSLEngine sslEngine = mock(SSLEngine.class);
SocketChannel socketChannel = mock(SocketChannel.class);
SelectionKey selectionKey = mock(SelectionKey.class);
when(selectionKey.channel()).thenReturn(socketChannel);
SSLSession sslSession = mock(SSLSession.class);
SslTransportLayer sslTransportLayer = new SslTransportLayer(
"test-channel",
selectionKey,
sslEngine,
mock(ChannelMetadataRegistry.class)
);
when(sslEngine.getSession()).thenReturn(sslSession);
when(sslSession.getPacketBufferSize()).thenReturn(data.length * 2);
sslTransportLayer.startHandshake(); // to initialize the buffers
ByteBuffer netReadBuffer = sslTransportLayer.netReadBuffer();
netReadBuffer.clear();
ByteBuffer appReadBuffer = sslTransportLayer.appReadBuffer();
when(socketChannel.read(any(ByteBuffer.class))).then(invocation -> {
((ByteBuffer) invocation.getArgument(0)).put(data);
return data.length;
});
when(sslEngine.unwrap(netReadBuffer, appReadBuffer))
.thenAnswer(invocation -> {
netReadBuffer.flip();
return new SSLEngineResult(SSLEngineResult.Status.OK, SSLEngineResult.HandshakeStatus.NEED_UNWRAP, data.length, 0);
}).thenReturn(new SSLEngineResult(SSLEngineResult.Status.OK, SSLEngineResult.HandshakeStatus.NEED_WRAP, 0, 0));
// When
SSLEngineResult result = sslTransportLayer.handshakeUnwrap(true, false);
// Then
verify(sslEngine, times(2)).unwrap(netReadBuffer, appReadBuffer);
assertEquals(SSLEngineResult.Status.OK, result.getStatus());
assertEquals(SSLEngineResult.HandshakeStatus.NEED_WRAP, result.getHandshakeStatus());
}
@Test
public void testSSLEngineCloseInboundInvokedOnClose() throws IOException {
// Given
SSLEngine sslEngine = mock(SSLEngine.class);
Socket socket = mock(Socket.class);
SocketChannel socketChannel = mock(SocketChannel.class);
SelectionKey selectionKey = mock(SelectionKey.class);
when(socketChannel.socket()).thenReturn(socket);
when(selectionKey.channel()).thenReturn(socketChannel);
doThrow(new SSLException("Mock exception")).when(sslEngine).closeInbound();
SslTransportLayer sslTransportLayer = new SslTransportLayer(
"test-channel",
selectionKey,
sslEngine,
mock(ChannelMetadataRegistry.class)
);
// When
sslTransportLayer.close();
// Then
verify(sslEngine, times(1)).closeOutbound();
verify(sslEngine, times(1)).closeInbound();
verifyNoMoreInteractions(sslEngine);
}
@Test
public void testGatheringWrite() throws IOException {
SSLEngine sslEngine = mock(SSLEngine.class);
SelectionKey selectionKey = mock(SelectionKey.class);
SslTransportLayer sslTransportLayer = spy(new SslTransportLayer(
"test-channel",
selectionKey,
sslEngine,
mock(ChannelMetadataRegistry.class)
));
doReturn(false).when(sslTransportLayer).hasPendingWrites();
ByteBuffer mockSocket = ByteBuffer.allocate(1024);
when(sslTransportLayer.write(any(ByteBuffer.class))).then(invocation -> {
ByteBuffer buf = invocation.getArgument(0);
int written = buf.remaining();
mockSocket.put(buf);
return written;
});
ByteBuffer[] srcs = {
ByteBuffer.wrap("Hello, ".getBytes(StandardCharsets.UTF_8)),
ByteBuffer.wrap("World".getBytes(StandardCharsets.UTF_8)),
ByteBuffer.wrap("!".getBytes(StandardCharsets.UTF_8))
};
byte[] expected = "World!".getBytes(StandardCharsets.UTF_8);
assertEquals(expected.length, sslTransportLayer.write(srcs, 1, 2));
mockSocket.flip();
byte[] actual = new byte[expected.length];
mockSocket.get(actual);
assertArrayEquals(expected, actual);
}
@Test
public void testScatteringRead() throws IOException {
SSLEngine sslEngine = mock(SSLEngine.class);
SelectionKey selectionKey = mock(SelectionKey.class);
SslTransportLayer sslTransportLayer = spy(new SslTransportLayer(
"test-channel",
selectionKey,
sslEngine,
mock(ChannelMetadataRegistry.class)
));
ByteBuffer mockSocket = ByteBuffer.wrap("Hello, World!".getBytes(StandardCharsets.UTF_8));
when(sslTransportLayer.read(any(ByteBuffer.class))).then(invocation -> {
ByteBuffer buf = invocation.getArgument(0);
int read = buf.remaining();
for (int i = 0; i < read; i++) {
buf.put(mockSocket.get());
}
return read;
});
ByteBuffer[] dsts = {
ByteBuffer.allocate(2),
ByteBuffer.allocate(3),
ByteBuffer.allocate(4)
};
assertEquals(7, sslTransportLayer.read(dsts, 1, 2));
assertArrayEquals("Hel".getBytes(StandardCharsets.UTF_8), dsts[1].array());
assertArrayEquals("lo, ".getBytes(StandardCharsets.UTF_8), dsts[2].array());
}
}
| ResizeableBufferSize |
java | hibernate__hibernate-orm | tooling/metamodel-generator/src/test/java/org/hibernate/processor/test/superdao/generic/SuperDao.java | {
"start": 376,
"end": 564
} | interface ____<T,K> {
EntityManager em();
@Find
T get(K isbn);
@Find
List<T> books1(@Pattern String title);
@HQL("where title like :title")
List<T> books2(String title);
}
| SuperDao |
java | netty__netty | codec-http/src/main/java/io/netty/handler/codec/http/Cookie.java | {
"start": 902,
"end": 5548
} | interface ____ extends io.netty.handler.codec.http.cookie.Cookie {
/**
* @deprecated Use {@link #name()} instead.
*/
@Deprecated
String getName();
/**
* @deprecated Use {@link #value()} instead.
*/
@Deprecated
String getValue();
/**
* @deprecated Use {@link #domain()} instead.
*/
@Deprecated
String getDomain();
/**
* @deprecated Use {@link #path()} instead.
*/
@Deprecated
String getPath();
/**
* @deprecated Use {@link #comment()} instead.
*/
@Deprecated
String getComment();
/**
* Returns the comment of this {@link Cookie}.
*
* @return The comment of this {@link Cookie}
*
* @deprecated Not part of RFC6265
*/
@Deprecated
String comment();
/**
* Sets the comment of this {@link Cookie}.
*
* @param comment The comment to use
*
* @deprecated Not part of RFC6265
*/
@Deprecated
void setComment(String comment);
/**
* @deprecated Use {@link #maxAge()} instead.
*/
@Deprecated
long getMaxAge();
/**
* Returns the maximum age of this {@link Cookie} in seconds or {@link Long#MIN_VALUE} if unspecified
*
* @return The maximum age of this {@link Cookie}
*
* @deprecated Not part of RFC6265
*/
@Deprecated
@Override
long maxAge();
/**
* Sets the maximum age of this {@link Cookie} in seconds.
* If an age of {@code 0} is specified, this {@link Cookie} will be
* automatically removed by browser because it will expire immediately.
* If {@link Long#MIN_VALUE} is specified, this {@link Cookie} will be removed when the
* browser is closed.
*
* @param maxAge The maximum age of this {@link Cookie} in seconds
*
* @deprecated Not part of RFC6265
*/
@Deprecated
@Override
void setMaxAge(long maxAge);
/**
* @deprecated Use {@link #version()} instead.
*/
@Deprecated
int getVersion();
/**
* Returns the version of this {@link Cookie}.
*
* @return The version of this {@link Cookie}
*
* @deprecated Not part of RFC6265
*/
@Deprecated
int version();
/**
* Sets the version of this {@link Cookie}.
*
* @param version The new version to use
*
* @deprecated Not part of RFC6265
*/
@Deprecated
void setVersion(int version);
/**
* @deprecated Use {@link #commentUrl()} instead.
*/
@Deprecated
String getCommentUrl();
/**
* Returns the comment URL of this {@link Cookie}.
*
* @return The comment URL of this {@link Cookie}
*
* @deprecated Not part of RFC6265
*/
@Deprecated
String commentUrl();
/**
* Sets the comment URL of this {@link Cookie}.
*
* @param commentUrl The comment URL to use
*
* @deprecated Not part of RFC6265
*/
@Deprecated
void setCommentUrl(String commentUrl);
/**
* Checks to see if this {@link Cookie} is to be discarded by the browser
* at the end of the current session.
*
* @return True if this {@link Cookie} is to be discarded, otherwise false
*
* @deprecated Not part of RFC6265
*/
@Deprecated
boolean isDiscard();
/**
* Sets the discard flag of this {@link Cookie}.
* If set to true, this {@link Cookie} will be discarded by the browser
* at the end of the current session
*
* @param discard True if the {@link Cookie} is to be discarded
*
* @deprecated Not part of RFC6265
*/
@Deprecated
void setDiscard(boolean discard);
/**
* @deprecated Use {@link #ports()} instead.
*/
@Deprecated
Set<Integer> getPorts();
/**
* Returns the ports that this {@link Cookie} can be accessed on.
*
* @return The {@link Set} of ports that this {@link Cookie} can use
*
* @deprecated Not part of RFC6265
*/
@Deprecated
Set<Integer> ports();
/**
* Sets the ports that this {@link Cookie} can be accessed on.
*
* @param ports The ports that this {@link Cookie} can be accessed on
*
* @deprecated Not part of RFC6265
*/
@Deprecated
void setPorts(int... ports);
/**
* Sets the ports that this {@link Cookie} can be accessed on.
*
* @param ports The {@link Iterable} collection of ports that this
* {@link Cookie} can be accessed on.
*
* @deprecated Not part of RFC6265
*/
@Deprecated
void setPorts(Iterable<Integer> ports);
}
| Cookie |
java | apache__camel | components/camel-netty-http/src/test/java/org/apache/camel/component/netty/http/NettyHttpRedeliveryTest.java | {
"start": 1266,
"end": 3099
} | class ____ extends BaseNettyTest {
private final CountDownLatch latch = new CountDownLatch(5);
@Test
public void testHttpRedelivery() throws Exception {
getMockEndpoint("mock:result").expectedBodiesReceived("Bye World");
assertTrue(latch.await(5, TimeUnit.SECONDS));
context.getRouteController().startRoute("bar");
MockEndpoint.assertIsSatisfied(context);
context.getRouteController().stopRoute("foo");
assertEquals(0, context.getInflightRepository().size());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
onException(Exception.class)
.maximumRedeliveries(50).redeliveryDelay(100).onExceptionOccurred(
new Processor() {
@Override
public void process(Exchange exchange) {
// signal to start the route (after 5 attempts)
latch.countDown();
// and there is only 1 inflight
assertEquals(1, context.getInflightRepository().size());
}
});
from("timer:foo?repeatCount=1").routeId("foo")
.to("netty-http:http://0.0.0.0:{{port}}/bar?keepAlive=false&disconnect=true&connectTimeout=100ms")
.to("mock:result");
from("netty-http:http://0.0.0.0:{{port}}/bar").routeId("bar").autoStartup(false)
.setBody().constant("Bye World");
}
};
}
}
| NettyHttpRedeliveryTest |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/impl/converter/TypeConvertersTest.java | {
"start": 1056,
"end": 2701
} | class ____ extends ContextTestSupport {
private final MyConverters converters = new MyConverters();
@Test
public void testAdd() {
int before = context.getTypeConverterRegistry().size();
context.getTypeConverterRegistry().addTypeConverters(converters);
int after = context.getTypeConverterRegistry().size();
int delta = after - before;
assertEquals(2, delta, "There should be 2 more type converters");
Country country = context.getTypeConverter().convertTo(Country.class, "en");
assertNotNull(country);
assertEquals("England", country.getName());
String iso = context.getTypeConverter().convertTo(String.class, country);
assertNotNull(iso);
assertEquals("en", iso);
}
@Test
public void testStringToPrimitiveTypes() throws Exception {
assertEquals(Short.parseShort("1"), context.getTypeConverter().mandatoryConvertTo(short.class, "1"));
assertEquals(Integer.parseInt("1"), context.getTypeConverter().mandatoryConvertTo(int.class, "1"));
assertEquals(Long.parseLong("1"), context.getTypeConverter().mandatoryConvertTo(long.class, "1"));
assertEquals(Float.parseFloat("1.1"), context.getTypeConverter().mandatoryConvertTo(float.class, "1.1"));
assertEquals(Double.parseDouble("1.1"), context.getTypeConverter().mandatoryConvertTo(double.class, "1.1"));
assertEquals('a', context.getTypeConverter().mandatoryConvertTo(char.class, "a"));
assertEquals(Boolean.parseBoolean("true"), context.getTypeConverter().mandatoryConvertTo(boolean.class, "true"));
}
}
| TypeConvertersTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/insertordering/InsertOrderingTest.java | {
"start": 2401,
"end": 2729
} | class ____ implements BatchBuilder {
@Override
public Batch buildBatch(BatchKey key, Integer batchSize, Supplier<PreparedStatementGroup> statementGroupSupplier, JdbcCoordinator jdbcCoordinator) {
return new StatsBatch( key, batchSize, statementGroupSupplier.get(), jdbcCoordinator );
}
}
public static | StatsBatchBuilder |
java | spring-projects__spring-framework | spring-jms/src/main/java/org/springframework/jms/core/DefaultJmsClient.java | {
"start": 2942,
"end": 7067
} | class ____ implements OperationSpec {
private final JmsMessagingTemplate delegate;
private @Nullable JmsTemplate customTemplate;
public DefaultOperationSpec(Destination destination) {
this.delegate = newDelegate();
this.delegate.setDefaultDestination(destination);
}
public DefaultOperationSpec(String destinationName) {
this.delegate = newDelegate();
this.delegate.setDefaultDestinationName(destinationName);
}
private JmsTemplate enforceCustomTemplate(boolean qos) {
if (this.customTemplate == null) {
JmsOperations jmsOperations = DefaultJmsClient.this.jmsTemplate;
if (!(jmsOperations instanceof JmsAccessor original)) {
throw new IllegalStateException(
"Needs to be bound to a JmsAccessor for custom settings support: " + jmsOperations);
}
this.customTemplate = new JmsTemplate(original);
this.delegate.setJmsTemplate(this.customTemplate);
}
if (qos) {
this.customTemplate.setExplicitQosEnabled(true);
}
return this.customTemplate;
}
@Override
public OperationSpec withReceiveTimeout(long receiveTimeout) {
enforceCustomTemplate(false).setReceiveTimeout(receiveTimeout);
return this;
}
@Override
public OperationSpec withDeliveryDelay(long deliveryDelay) {
enforceCustomTemplate(false).setDeliveryDelay(deliveryDelay);
return this;
}
@Override
public OperationSpec withDeliveryPersistent(boolean persistent) {
enforceCustomTemplate(true).setDeliveryPersistent(persistent);
return this;
}
@Override
public OperationSpec withPriority(int priority) {
enforceCustomTemplate(true).setPriority(priority);
return this;
}
@Override
public OperationSpec withTimeToLive(long timeToLive) {
enforceCustomTemplate(true).setTimeToLive(timeToLive);
return this;
}
@Override
public void send(Message<?> message) throws MessagingException {
message = postProcessMessage(message);
this.delegate.send(message);
}
@Override
public void send(Object payload) throws MessagingException {
this.delegate.convertAndSend(payload, DefaultJmsClient.this.messagePostProcessor);
}
@Override
public void send(Object payload, Map<String, Object> headers) throws MessagingException {
this.delegate.convertAndSend(payload, headers, DefaultJmsClient.this.messagePostProcessor);
}
@Override
public Optional<Message<?>> receive() throws MessagingException {
return Optional.ofNullable(this.delegate.receive());
}
@Override
public <T> Optional<T> receive(Class<T> targetClass) throws MessagingException {
return Optional.ofNullable(this.delegate.receiveAndConvert(targetClass));
}
@Override
public Optional<Message<?>> receive(String messageSelector) throws MessagingException {
return Optional.ofNullable(this.delegate.receiveSelected(messageSelector));
}
@Override
public <T> Optional<T> receive(String messageSelector, Class<T> targetClass) throws MessagingException {
return Optional.ofNullable(this.delegate.receiveSelectedAndConvert(messageSelector, targetClass));
}
@Override
public Optional<Message<?>> sendAndReceive(Message<?> requestMessage) throws MessagingException {
requestMessage = postProcessMessage(requestMessage);
return Optional.ofNullable(this.delegate.sendAndReceive(requestMessage));
}
@Override
public <T> Optional<T> sendAndReceive(Object request, Class<T> targetClass) throws MessagingException {
return Optional.ofNullable(this.delegate.convertSendAndReceive(request, targetClass, DefaultJmsClient.this.messagePostProcessor));
}
@Override
public <T> Optional<T> sendAndReceive(Object request, Map<String, Object> headers, Class<T> targetClass)
throws MessagingException {
return Optional.ofNullable(this.delegate.convertSendAndReceive(request, headers, targetClass, DefaultJmsClient.this.messagePostProcessor));
}
private Message<?> postProcessMessage(Message<?> message) {
if (DefaultJmsClient.this.messagePostProcessor != null) {
return DefaultJmsClient.this.messagePostProcessor.postProcessMessage(message);
}
return message;
}
}
}
| DefaultOperationSpec |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/dev/testing/JunitTestRunner.java | {
"start": 53561,
"end": 54659
} | class ____ implements PostDiscoveryFilter {
final boolean exclude;
final Pattern pattern;
private RegexFilter(boolean exclude, Pattern pattern) {
this.exclude = exclude;
this.pattern = pattern;
}
@Override
public FilterResult apply(TestDescriptor testDescriptor) {
if (testDescriptor.getSource().isPresent()) {
if (testDescriptor.getSource().get() instanceof MethodSource methodSource) {
String name = methodSource.getClassName();
if (pattern.matcher(name).matches()) {
return FilterResult.includedIf(!exclude);
}
return FilterResult.includedIf(exclude);
}
}
return FilterResult.included("not a method");
}
}
// https://maven.apache.org/surefire/maven-surefire-plugin/test-mojo.html#test
// org.apache.maven.surefire.api.testset.TestListResolver
// org.apache.maven.surefire.api.testset.ResolvedTest
private static | RegexFilter |
java | alibaba__nacos | sys/src/test/java/com/alibaba/nacos/sys/utils/MethodUtilTest.java | {
"start": 851,
"end": 1915
} | class ____ {
private static final Method DOUBLE_METHOD;
private static final Method LONG_METHOD;
static {
try {
DOUBLE_METHOD = InternalMethod.class.getMethod("getD");
LONG_METHOD = InternalMethod.class.getMethod("getL");
} catch (NoSuchMethodException e) {
throw new RuntimeException(e);
}
}
@Test
void invokeAndReturnDouble() {
InternalMethod internalMethod = new InternalMethod();
assertNotEquals(Double.NaN, MethodUtil.invokeAndReturnDouble(DOUBLE_METHOD, internalMethod), 0.000001d);
assertEquals(Double.NaN, MethodUtil.invokeAndReturnDouble(LONG_METHOD, internalMethod), 0.000001d);
}
@Test
void invokeAndReturnLong() {
InternalMethod internalMethod = new InternalMethod();
assertEquals(100L, MethodUtil.invokeAndReturnLong(LONG_METHOD, internalMethod));
assertNotEquals(100L, MethodUtil.invokeAndReturnLong(DOUBLE_METHOD, internalMethod));
}
public static | MethodUtilTest |
java | micronaut-projects__micronaut-core | inject-java/src/test/groovy/io/micronaut/inject/generics/inheritance/UserDaoClient.java | {
"start": 103,
"end": 240
} | class ____ extends DaoClient<User>{
public UserDaoClient(Dao<User> constructorDao) {
super(constructorDao);
}
}
| UserDaoClient |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/reservation/ReservationSystemUtil.java | {
"start": 2235,
"end": 7669
} | class ____ {
private ReservationSystemUtil() {
// not called
}
public static Resource toResource(ReservationRequest request) {
Resource resource = Resources.multiply(request.getCapability(),
(float) request.getNumContainers());
return resource;
}
public static Map<ReservationInterval, Resource> toResources(
Map<ReservationInterval, ReservationRequest> allocations) {
Map<ReservationInterval, Resource> resources =
new HashMap<ReservationInterval, Resource>();
for (Map.Entry<ReservationInterval, ReservationRequest> entry :
allocations.entrySet()) {
resources.put(entry.getKey(),
toResource(entry.getValue()));
}
return resources;
}
public static ReservationAllocationStateProto buildStateProto(
ReservationAllocation allocation) {
ReservationAllocationStateProto.Builder builder =
ReservationAllocationStateProto.newBuilder();
builder.setAcceptanceTime(allocation.getAcceptanceTime());
builder.setContainsGangs(allocation.containsGangs());
builder.setStartTime(allocation.getStartTime());
builder.setEndTime(allocation.getEndTime());
builder.setUser(allocation.getUser());
ReservationDefinitionProto definitionProto = convertToProtoFormat(
allocation.getReservationDefinition());
builder.setReservationDefinition(definitionProto);
for (Map.Entry<ReservationInterval, Resource> entry :
allocation.getAllocationRequests().entrySet()) {
ResourceAllocationRequestProto p =
ResourceAllocationRequestProto.newBuilder()
.setStartTime(entry.getKey().getStartTime())
.setEndTime(entry.getKey().getEndTime())
.setResource(convertToProtoFormat(entry.getValue()))
.build();
builder.addAllocationRequests(p);
}
ReservationAllocationStateProto allocationProto = builder.build();
return allocationProto;
}
private static ReservationDefinitionProto convertToProtoFormat(
ReservationDefinition reservationDefinition) {
return ((ReservationDefinitionPBImpl)reservationDefinition).getProto();
}
public static ResourceProto convertToProtoFormat(Resource e) {
return YarnProtos.ResourceProto.newBuilder()
.setMemory(e.getMemorySize())
.setVirtualCores(e.getVirtualCores())
.build();
}
public static Map<ReservationInterval, Resource> toAllocations(
List<ResourceAllocationRequestProto> allocationRequestsList) {
Map<ReservationInterval, Resource> allocations = new HashMap<>();
for (ResourceAllocationRequestProto proto : allocationRequestsList) {
allocations.put(
new ReservationInterval(proto.getStartTime(), proto.getEndTime()),
convertFromProtoFormat(proto.getResource()));
}
return allocations;
}
private static ResourcePBImpl convertFromProtoFormat(ResourceProto resource) {
return new ResourcePBImpl(resource);
}
public static ReservationDefinitionPBImpl convertFromProtoFormat(
ReservationDefinitionProto r) {
return new ReservationDefinitionPBImpl(r);
}
public static ReservationIdPBImpl convertFromProtoFormat(
ReservationIdProto r) {
return new ReservationIdPBImpl(r);
}
public static ReservationId toReservationId(
ReservationIdProto reservationId) {
return new ReservationIdPBImpl(reservationId);
}
public static InMemoryReservationAllocation toInMemoryAllocation(
String planName, ReservationId reservationId,
ReservationAllocationStateProto allocationState, Resource minAlloc,
ResourceCalculator planResourceCalculator) {
ReservationDefinition definition =
convertFromProtoFormat(
allocationState.getReservationDefinition());
Map<ReservationInterval, Resource> allocations = toAllocations(
allocationState.getAllocationRequestsList());
InMemoryReservationAllocation allocation =
new InMemoryReservationAllocation(reservationId, definition,
allocationState.getUser(), planName, allocationState.getStartTime(),
allocationState.getEndTime(), allocations, planResourceCalculator,
minAlloc, allocationState.getContainsGangs());
return allocation;
}
public static List<ReservationAllocationState>
convertAllocationsToReservationInfo(Set<ReservationAllocation> res,
boolean includeResourceAllocations) {
List<ReservationAllocationState> reservationInfo = new ArrayList<>();
Map<ReservationInterval, Resource> requests;
for (ReservationAllocation allocation : res) {
List<ResourceAllocationRequest> allocations = new ArrayList<>();
if (includeResourceAllocations) {
requests = allocation.getAllocationRequests();
for (Map.Entry<ReservationInterval, Resource> request :
requests.entrySet()) {
ReservationInterval interval = request.getKey();
allocations.add(ResourceAllocationRequest.newInstance(
interval.getStartTime(), interval.getEndTime(),
request.getValue()));
}
}
reservationInfo.add(ReservationAllocationState.newInstance(
allocation.getAcceptanceTime(), allocation.getUser(),
allocations, allocation.getReservationId(),
allocation.getReservationDefinition()));
}
return reservationInfo;
}
}
| ReservationSystemUtil |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestMultiThreadedHflush.java | {
"start": 1745,
"end": 2770
} | class ____ {
static final int blockSize = 1024*1024;
private static final int NUM_THREADS = 10;
private static final int WRITE_SIZE = 517;
private static final int NUM_WRITES_PER_THREAD = 1000;
private byte[] toWrite = null;
private final SampleQuantiles quantiles = new SampleQuantiles(
new Quantile[] {
new Quantile(0.50, 0.050),
new Quantile(0.75, 0.025), new Quantile(0.90, 0.010),
new Quantile(0.95, 0.005), new Quantile(0.99, 0.001) });
/*
* creates a file but does not close it
*/
private FSDataOutputStream createFile(FileSystem fileSys, Path name, int repl)
throws IOException {
FSDataOutputStream stm = fileSys.create(name, true, fileSys.getConf()
.getInt(CommonConfigurationKeys.IO_FILE_BUFFER_SIZE_KEY, 4096),
(short) repl, blockSize);
return stm;
}
private void initBuffer(int size) {
long seed = AppendTestUtil.nextLong();
toWrite = AppendTestUtil.randomBytes(seed, size);
}
private | TestMultiThreadedHflush |
java | redisson__redisson | redisson-micronaut/redisson-micronaut-30/src/test/java/org/redisson/micronaut/RedissonSessionTest.java | {
"start": 1215,
"end": 7055
} | class ____ implements ApplicationEventListener<AbstractSessionEvent> {
List<AbstractSessionEvent> events = new ArrayList<>();
@Override
public void onApplicationEvent(AbstractSessionEvent event) {
events.add(event);
}
public List<AbstractSessionEvent> getEvents() {
return events;
}
}
@Test
public void testWriteBehind() throws ExecutionException, InterruptedException {
Map<String, Object> map = new HashMap<>();
map.put("redisson.threads", "10");
map.put("micronaut.session.http.redisson.enabled", "true");
map.put("micronaut.session.http.redisson.updateMode", "WRITE_BEHIND");
map.put("redisson.singleServerConfig.address", "redis://127.0.0.1:6379");
ApplicationContext ac = ApplicationContext.run(map);
RedissonClient rc = ac.getBean(RedissonClient.class);
rc.getKeys().flushall();
RedissonSessionStore sessionStore = ac.getBean(RedissonSessionStore.class);
RedissonSession session = sessionStore.newSession();
session.put("key1", "oleg");
session.put("key2", new MyObject("myname"));
session.setMaxInactiveInterval(Duration.ofSeconds(30));
RedissonSession saved = sessionStore.save(session).get();
saved.remove("key2");
saved.put("key1", "alba");
RedissonSession s = sessionStore.findSession(saved.getId()).get().get();
assertThat(s.get("key1").get()).isEqualTo("alba");
assertThat(s.contains("key2")).isFalse();
ac.stop();
}
@Test
public void testSessionExpiration() throws ExecutionException, InterruptedException {
Map<String, Object> map = new HashMap<>();
map.put("redisson.threads", "10");
map.put("micronaut.session.http.redisson.enabled", "true");
map.put("redisson.singleServerConfig.address", "redis://127.0.0.1:6379");
ApplicationContext ac = ApplicationContext.run(map);
RedissonClient rc = ac.getBean(RedissonClient.class);
rc.getKeys().flushall();
RedissonSessionStore sessionStore = ac.getBean(RedissonSessionStore.class);
RedissonSession session = sessionStore.newSession();
session.put("username", "oleg");
session.put("foo", new MyObject("myname"));
session.setMaxInactiveInterval(Duration.ofSeconds(30));
RedissonSession saved = sessionStore.save(session).get();
testData(saved);
Thread.sleep(30500);
Optional<RedissonSession> noSession = sessionStore.findSession(saved.getId()).get();
assertThat(noSession).isEmpty();
Thread.sleep(10000);
assertThat(rc.getKeys().count()).isZero();
ac.stop();
}
@Test
public void testSessionCreate() throws ExecutionException, InterruptedException {
Map<String, Object> map = new HashMap<>();
map.put("redisson.threads", "10");
map.put("micronaut.session.http.redisson.enabled", "true");
map.put("redisson.singleServerConfig.address", "redis://127.0.0.1:6379");
ApplicationContext ac = ApplicationContext.run(map);
RedissonClient rc = ac.getBean(RedissonClient.class);
AppListener listener = ac.getBean(AppListener.class);
rc.getKeys().flushall();
RedissonSessionStore sessionStore = ac.getBean(RedissonSessionStore.class);
RedissonSession session = sessionStore.newSession();
session.put("username", "oleg");
session.put("foo", new MyObject("myname"));
RedissonSession saved = sessionStore.save(session).get();
testData(saved);
assertThat(listener.getEvents()).hasSize(1);
assertThat(listener.getEvents().get(0)).isInstanceOf(SessionCreatedEvent.class);
listener.getEvents().clear();
RedissonSession loaded = sessionStore.findSession(saved.getId()).get().get();
testData(loaded);
loaded.put("key", "value");
loaded.remove("username");
loaded.setLastAccessedTime(Instant.now());
loaded.setMaxInactiveInterval(Duration.ofMinutes(1));
sessionStore.save(loaded).get();
assertThat(listener.getEvents()).isEmpty();
loaded = sessionStore.findSession(saved.getId()).get().get();
assertThat(listener.getEvents()).isEmpty();
assertThat(loaded.contains("username")).isFalse();
assertThat(((MyObject) loaded.get("foo").get()).getName()).isEqualTo("myname");
assertThat(loaded.get("key").get()).isEqualTo("value");
assertThat(loaded.isExpired()).isFalse();
assertThat(loaded.getCreationTime().getEpochSecond()).isEqualTo(saved.getCreationTime().getEpochSecond());
assertThat(loaded.getMaxInactiveInterval()).isEqualTo(Duration.ofMinutes(1));
assertThat(loaded.getId()).isEqualTo(saved.getId());
Boolean deleted = sessionStore.deleteSession(saved.getId()).get();
assertThat(deleted).isTrue();
Thread.sleep(1500);
assertThat(listener.getEvents()).hasSize(1);
assertThat(listener.getEvents().get(0)).isInstanceOf(SessionDeletedEvent.class);
Optional<RedissonSession> noSession = sessionStore.findSession(saved.getId()).get();
assertThat(noSession).isEmpty();
Thread.sleep(11000);
assertThat(rc.getKeys().count()).isZero();
ac.stop();
}
private void testData(RedissonSession saved) {
assertThat(saved.get("username").get()).isEqualTo("oleg");
assertThat(((MyObject) saved.get("foo").get()).getName()).isEqualTo("myname");
assertThat(saved.isExpired()).isFalse();
assertThat(saved.getCreationTime()).isNotNull();
assertThat(saved.getMaxInactiveInterval()).isNotNull();
assertThat(saved.getId()).isNotNull();
}
}
| AppListener |
java | alibaba__nacos | plugin/environment/src/main/java/com/alibaba/nacos/plugin/environment/spi/CustomEnvironmentPluginService.java | {
"start": 794,
"end": 1371
} | interface ____ {
/**
* customValue interface.
*
* @param property property key value
* @return custom key value
*/
Map<String, Object> customValue(Map<String, Object> property);
/**
* propertyKey interface.
*
* @return propertyKey property Key
*/
Set<String> propertyKey();
/**
* order The larger the priority, the higher the priority.
*
* @return order
*/
Integer order();
/**
* pluginName.
*
* @return
*/
String pluginName();
}
| CustomEnvironmentPluginService |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/server/vertx/src/test/java/org/jboss/resteasy/reactive/server/vertx/test/response/NullHeaderTestCase.java | {
"start": 671,
"end": 1122
} | class ____ {
@RegisterExtension
static ResteasyReactiveUnitTest runner = new ResteasyReactiveUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(NullFilter.class, NullResource.class));
@Test
void nullHeaderTest() {
when()
.get("/null")
.then().statusCode(200)
.header("nullHeader", "");
}
@Provider
public static | NullHeaderTestCase |
java | hibernate__hibernate-orm | hibernate-envers/src/main/java/org/hibernate/envers/configuration/internal/metadata/ComponentMetadataGenerator.java | {
"start": 1519,
"end": 5854
} | class ____ extends AbstractMetadataGenerator {
private final ValueMetadataGenerator valueGenerator;
ComponentMetadataGenerator(EnversMetadataBuildingContext metadataBuildingContext, ValueMetadataGenerator valueGenerator) {
super( metadataBuildingContext );
this.valueGenerator = valueGenerator;
}
@SuppressWarnings("unchecked")
public void addComponent(
AttributeContainer attributeContainer,
PropertyAuditingData propertyAuditingData,
Value value,
CompositeMapperBuilder mapper,
String entityName,
EntityMappingData mappingData,
boolean firstPass) {
final Component propComponent = (Component) value;
final EmbeddableInstantiator instantiator;
if ( propComponent.getCustomInstantiator() != null ) {
if ( !getMetadataBuildingContext().getBuildingOptions().isAllowExtensionsInCdi() ) {
instantiator = FallbackBeanInstanceProducer.INSTANCE.produceBeanInstance( propComponent.getCustomInstantiator() );
}
else {
instantiator =
getMetadataBuildingContext().getBootstrapContext().getManagedBeanRegistry()
.getBean( propComponent.getCustomInstantiator() )
.getBeanInstance();
}
}
else if ( propComponent.getTypeName() != null ) {
final Class<CompositeUserType<?>> userTypeClass = getMetadataBuildingContext().getBootstrapContext()
.getClassLoaderAccess()
.classForName( propComponent.getTypeName() );
if ( !getMetadataBuildingContext().getBuildingOptions().isAllowExtensionsInCdi() ) {
final CompositeUserType<?> compositeUserType = FallbackBeanInstanceProducer.INSTANCE.produceBeanInstance( userTypeClass );
//noinspection rawtypes
instantiator = new EmbeddableCompositeUserTypeInstantiator( (CompositeUserType) compositeUserType );
}
else {
final CompositeUserType<Object> compositeUserType = (CompositeUserType<Object>)
getMetadataBuildingContext().getBootstrapContext().getManagedBeanRegistry()
.getBean( userTypeClass )
.getBeanInstance();
instantiator = new EmbeddableCompositeUserTypeInstantiator( compositeUserType );
}
}
else if ( propComponent.getInstantiator() != null ) {
instantiator = EmbeddableInstantiatorPojoIndirecting.of(
propComponent.getPropertyNames(),
propComponent.getInstantiator(),
propComponent.getInstantiatorPropertyNames()
);
}
else if ( propComponent.getComponentClass() != null &&
propComponent.getComponentClass().isRecord() ) {
if ( propComponent.sortProperties() == null ) {
instantiator = new EmbeddableInstantiatorRecordStandard( propComponent.getComponentClass() );
}
else {
instantiator = EmbeddableInstantiatorRecordIndirecting.of(
propComponent.getComponentClass(),
propComponent.getPropertyNames()
);
}
}
else {
instantiator = null;
}
final CompositeMapperBuilder componentMapper = mapper.addComponent(
propertyAuditingData.resolvePropertyData(),
ClassLoaderAccessHelper.loadClass(
getMetadataBuildingContext(),
getClassNameForComponent( propComponent )
),
instantiator
);
// The property auditing data must be for a component.
final ComponentAuditingData componentAuditingData = (ComponentAuditingData) propertyAuditingData;
// Adding all properties of the component
propComponent.sortProperties();
for ( Property property : propComponent.getProperties() ) {
final PropertyAuditingData componentPropertyAuditingData =
componentAuditingData.getPropertyAuditingData( property.getName() );
// Checking if that property is audited
if ( componentPropertyAuditingData != null ) {
valueGenerator.addValue(
attributeContainer,
property.getValue(),
property.getPropertyAccessStrategy(),
componentMapper,
entityName,
mappingData,
componentPropertyAuditingData,
property.isInsertable(),
firstPass,
false
);
}
}
if ( !firstPass ) {
final EntityConfiguration owningEntityConfiguration = getAuditedEntityConfigurations().get( entityName );
owningEntityConfiguration.addToOneComponent( propertyAuditingData.getName(), componentAuditingData );
}
}
private String getClassNameForComponent(Component component) {
return component.isDynamic() ? Map.class.getCanonicalName() : component.getComponentClassName();
}
}
| ComponentMetadataGenerator |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jpa/metamodel/EntityInterfaceExtendsMapTest.java | {
"start": 2774,
"end": 2873
} | interface ____ extends Map<String, BookEntity> {
}
@Entity( name = "LibraryEntity" )
static | Library |
java | elastic__elasticsearch | x-pack/plugin/snapshot-repo-test-kit/qa/s3/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/analyze/AbstractS3RepositoryAnalysisRestTestCase.java | {
"start": 1178,
"end": 5891
} | class ____ extends S3HttpFixture {
RepositoryAnalysisHttpFixture(boolean enabled) {
super(enabled, "bucket", "base_path_integration_tests", fixedAccessKey("s3_test_access_key", regionSupplier, "s3"));
}
private volatile boolean repoAnalysisStarted;
@Override
protected HttpHandler createHandler() {
final var delegateHandler = asInstanceOf(S3HttpHandler.class, super.createHandler());
return exchange -> {
ensurePurposeParameterPresent(delegateHandler.parseRequest(exchange));
delegateHandler.handle(exchange);
};
}
private void ensurePurposeParameterPresent(S3HttpHandler.S3Request request) {
if (request.path().startsWith("/bucket/base_path_integration_tests/temp-analysis-")) {
repoAnalysisStarted = true;
}
if (repoAnalysisStarted == false) {
if (Regex.simpleMatch("/bucket/base_path_integration_tests/tests-*/master.dat", request.path())
|| Regex.simpleMatch("/bucket/base_path_integration_tests/tests-*/data-*.dat", request.path())
|| (request.isListObjectsRequest()
&& request.getQueryParamOnce("prefix").startsWith("base_path_integration_tests/tests-"))
|| (request.isMultiObjectDeleteRequest())) {
// verify repository is not part of repo analysis so will have different/missing x-purpose parameter
return;
}
if (request.isListObjectsRequest() && request.getQueryParamOnce("prefix").equals("base_path_integration_tests/index-")) {
// getRepositoryData looking for root index-N blob will have different/missing x-purpose parameter
return;
}
repoAnalysisStarted = true;
}
assertTrue(request.toString(), request.hasQueryParamOnce("x-purpose"));
assertEquals(request.toString(), "RepositoryAnalysis", request.getQueryParamOnce("x-purpose"));
}
}
protected static final String CLIENT_NAME = "repo_test_kit";
protected static ElasticsearchCluster buildCluster(S3HttpFixture s3HttpFixture, boolean enabled) {
final var clientPrefix = "s3.client." + CLIENT_NAME + ".";
return ElasticsearchCluster.local()
.distribution(DistributionType.DEFAULT)
.keystore(clientPrefix + "access_key", System.getProperty("s3AccessKey"))
.keystore(clientPrefix + "secret_key", System.getProperty("s3SecretKey"))
.setting(clientPrefix + "protocol", () -> "http", (n) -> enabled)
.setting(clientPrefix + "region", regionSupplier, (n) -> enabled)
.setting(clientPrefix + "add_purpose_custom_query_parameter", () -> randomFrom("true", "false"), n -> randomBoolean())
.setting(clientPrefix + "endpoint", s3HttpFixture::getAddress, (n) -> enabled)
.setting(
"repository_s3.compare_and_exchange.anti_contention_delay",
() -> randomFrom("1s" /* == default */, "1ms"),
n -> randomBoolean()
)
.setting("xpack.security.enabled", "false")
.setting("thread_pool.snapshot.max", "10")
.build();
}
@Override
protected Settings repositorySettings() {
final String bucket = System.getProperty("test.s3.bucket");
assertThat(bucket, not(blankOrNullString()));
final String basePath = System.getProperty("test.s3.base_path");
assertThat(basePath, not(blankOrNullString()));
return Settings.builder()
.put("client", CLIENT_NAME)
.put("bucket", bucket)
.put("base_path", basePath)
.put("delete_objects_max_size", between(1, 1000))
.put("buffer_size", ByteSizeValue.ofMb(5)) // so some uploads are multipart ones
.put("max_copy_size_before_multipart", ByteSizeValue.ofMb(5))
// verify we always set the x-purpose header even if disabled for other repository operations
.put(randomBooleanSetting("add_purpose_custom_query_parameter"))
// this parameter is ignored for repo analysis
.put(randomBooleanSetting("unsafely_incompatible_with_s3_conditional_writes"))
.build();
}
private static Settings randomBooleanSetting(String settingKey) {
return randomFrom(Settings.EMPTY, Settings.builder().put(settingKey, randomBoolean()).build());
}
@Override
protected String repositoryType() {
return "s3";
}
}
| RepositoryAnalysisHttpFixture |
java | google__guava | android/guava/src/com/google/common/collect/LinkedHashMultimap.java | {
"start": 10325,
"end": 18766
} | class ____ extends Sets.ImprovedAbstractSet<V> {
/*
* We currently use a fixed load factor of 1.0, a bit higher than normal to reduce memory
* consumption.
*/
@ParametricNullness private final K key;
@VisibleForTesting @Nullable ValueEntry<K, V>[] hashTable;
private int size = 0;
private int modCount = 0;
private @Nullable ValueEntry<K, V> firstEntry;
private @Nullable ValueEntry<K, V> lastEntry;
ValueSet(@ParametricNullness K key, int expectedValues) {
this.key = key;
// Round expected values up to a power of 2 to get the table size.
int tableSize = Hashing.closedTableSize(expectedValues, VALUE_SET_LOAD_FACTOR);
@SuppressWarnings({"rawtypes", "unchecked"})
@Nullable ValueEntry<K, V>[] hashTable = new @Nullable ValueEntry[tableSize];
this.hashTable = hashTable;
}
private void succeedsInValueSet(
@Nullable ValueEntry<K, V> pred, @Nullable ValueEntry<K, V> succ) {
if (pred == null) {
firstEntry = succ;
} else {
pred.successorInValueSet = succ;
}
if (succ == null) {
lastEntry = pred;
} else {
succ.predecessorInValueSet = pred;
}
}
private void deleteFromValueSet(ValueEntry<K, V> entry) {
succeedsInValueSet(entry.predecessorInValueSet, entry.successorInValueSet);
}
private void appendToValueSet(ValueEntry<K, V> newEntry) {
succeedsInValueSet(lastEntry, newEntry);
lastEntry = newEntry;
}
private int mask() {
return hashTable.length - 1;
}
@Override
public Iterator<V> iterator() {
return new Iterator<V>() {
@Nullable ValueEntry<K, V> nextEntry = firstEntry;
@Nullable ValueEntry<K, V> toRemove;
int expectedModCount = modCount;
private void checkForComodification() {
if (modCount != expectedModCount) {
throw new ConcurrentModificationException();
}
}
@Override
public boolean hasNext() {
checkForComodification();
return nextEntry != null;
}
@Override
@ParametricNullness
public V next() {
checkForComodification();
ValueEntry<K, V> entry = nextEntry;
if (entry == null) {
throw new NoSuchElementException();
}
V result = entry.getValue();
toRemove = entry;
nextEntry = entry.successorInValueSet;
return result;
}
@Override
public void remove() {
checkForComodification();
checkState(toRemove != null, "no calls to next() since the last call to remove()");
ValueSet.this.remove(toRemove.getValue());
expectedModCount = modCount;
toRemove = null;
}
};
}
@Override
public int size() {
return size;
}
@Override
public boolean contains(@Nullable Object o) {
int smearedHash = Hashing.smearedHash(o);
for (ValueEntry<K, V> entry = hashTable[smearedHash & mask()];
entry != null;
entry = entry.nextInValueBucket) {
if (entry.matchesValue(o, smearedHash)) {
return true;
}
}
return false;
}
@Override
public boolean add(@ParametricNullness V value) {
int smearedHash = Hashing.smearedHash(value);
int bucket = smearedHash & mask();
ValueEntry<K, V> rowHead = hashTable[bucket];
for (ValueEntry<K, V> entry = rowHead; entry != null; entry = entry.nextInValueBucket) {
if (entry.matchesValue(value, smearedHash)) {
return false;
}
}
ValueEntry<K, V> newEntry = new ValueEntry<>(key, value, smearedHash, rowHead);
appendToValueSet(newEntry);
multimapIterationChain.append(newEntry);
hashTable[bucket] = newEntry;
size++;
modCount++;
rehashIfNecessary();
return true;
}
private void rehashIfNecessary() {
if (Hashing.needsResizing(size, hashTable.length, VALUE_SET_LOAD_FACTOR)) {
@SuppressWarnings("unchecked")
ValueEntry<K, V>[] hashTable =
(ValueEntry<K, V>[]) new ValueEntry<?, ?>[this.hashTable.length * 2];
this.hashTable = hashTable;
int mask = hashTable.length - 1;
for (ValueEntry<K, V> entry = firstEntry;
entry != null;
entry = entry.successorInValueSet) {
int bucket = entry.smearedValueHash & mask;
entry.nextInValueBucket = hashTable[bucket];
hashTable[bucket] = entry;
}
}
}
@CanIgnoreReturnValue
@Override
public boolean remove(@Nullable Object o) {
int smearedHash = Hashing.smearedHash(o);
int bucket = smearedHash & mask();
ValueEntry<K, V> prev = null;
for (ValueEntry<K, V> entry = hashTable[bucket];
entry != null;
prev = entry, entry = entry.nextInValueBucket) {
if (entry.matchesValue(o, smearedHash)) {
if (prev == null) {
// first entry in the bucket
hashTable[bucket] = entry.nextInValueBucket;
} else {
prev.nextInValueBucket = entry.nextInValueBucket;
}
deleteFromValueSet(entry);
multimapIterationChain.delete(entry);
size--;
modCount++;
return true;
}
}
return false;
}
@Override
public void clear() {
Arrays.fill(hashTable, null);
size = 0;
for (ValueEntry<K, V> entry = firstEntry; entry != null; entry = entry.successorInValueSet) {
multimapIterationChain.delete(entry);
// TODO(cpovirk): Also clear *InValueSet (after reading next) and nextInValueBucket?
}
firstEntry = null;
lastEntry = null;
modCount++;
}
}
@Override
Iterator<Entry<K, V>> entryIterator() {
return new Iterator<Entry<K, V>>() {
@Nullable ValueEntry<K, V> nextEntry = multimapIterationChain.firstEntry;
@Nullable ValueEntry<K, V> toRemove;
@Override
public boolean hasNext() {
return nextEntry != null;
}
@Override
public Entry<K, V> next() {
ValueEntry<K, V> entry = nextEntry;
if (entry == null) {
throw new NoSuchElementException();
}
toRemove = entry;
nextEntry = entry.successorInMultimap;
return entry;
}
@Override
public void remove() {
checkState(toRemove != null, "no calls to next() since the last call to remove()");
LinkedHashMultimap.this.remove(toRemove.getKey(), toRemove.getValue());
toRemove = null;
}
};
}
@Override
Iterator<V> valueIterator() {
return Maps.valueIterator(entryIterator());
}
/**
* @serialData the expected values per key, the number of distinct keys, the number of entries,
* and the entries in order
*/
@GwtIncompatible
@J2ktIncompatible
private void writeObject(ObjectOutputStream stream) throws IOException {
stream.defaultWriteObject();
stream.writeInt(keySet().size());
for (K key : keySet()) {
stream.writeObject(key);
}
stream.writeInt(size());
for (Entry<K, V> entry : entries()) {
stream.writeObject(entry.getKey());
stream.writeObject(entry.getValue());
}
}
@GwtIncompatible
@J2ktIncompatible
private void readObject(ObjectInputStream stream) throws IOException, ClassNotFoundException {
stream.defaultReadObject();
multimapIterationChain = new MultimapIterationChain<>();
valueSetCapacity = DEFAULT_VALUE_SET_CAPACITY;
int distinctKeys = stream.readInt();
Map<K, Collection<V>> map = Platform.newLinkedHashMapWithExpectedSize(12);
for (int i = 0; i < distinctKeys; i++) {
@SuppressWarnings("unchecked")
K key = (K) stream.readObject();
map.put(key, createCollection(key));
}
int entries = stream.readInt();
for (int i = 0; i < entries; i++) {
@SuppressWarnings("unchecked")
K key = (K) stream.readObject();
@SuppressWarnings("unchecked")
V value = (V) stream.readObject();
/*
* requireNonNull is safe for a properly serialized multimap: We've already inserted a
* collection for each key that we expect.
*/
requireNonNull(map.get(key)).add(value);
}
setMap(map);
}
private static final | ValueSet |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/idclass/IdClassSingleOneToOneTest.java | {
"start": 926,
"end": 1520
} | class ____ {
@Test
public void test(SessionFactoryScope scope) {
scope.getSessionFactory();
scope.inTransaction( session -> {
EntityA entityA = new EntityA(3);
EntityB entityB = new EntityB( entityA );
entityA.entityB = entityB;
session.persist( entityA );
session.persist( entityB );
assertEquals( new EntityBId(3),
session.getIdentifier( entityB ) );
} );
scope.inTransaction( session -> {
EntityB entityB = session.find( EntityB.class, new EntityBId(3) );
assertNotNull( entityB );
} );
}
@Entity( name = "EntityA" )
static | IdClassSingleOneToOneTest |
java | spring-projects__spring-boot | module/spring-boot-micrometer-tracing/src/test/java/org/springframework/boot/micrometer/tracing/autoconfigure/MicrometerTracingAutoConfigurationTests.java | {
"start": 10321,
"end": 10497
} | class ____ {
@Bean
Propagator propagator() {
return mock(Propagator.class);
}
}
@Configuration(proxyBeanMethods = false)
private static final | PropagatorConfiguration |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/web/servlet/assertj/MockMvcTesterIntegrationTests.java | {
"start": 15474,
"end": 21713
} | class ____ {
@Test
void hasFailedWithUnresolvedException() {
assertThat(mvc.get().uri("/error/1")).hasFailed();
}
@Test
void hasFailedWithResolvedException() {
assertThat(mvc.get().uri("/error/2")).hasFailed().hasStatus(HttpStatus.PAYMENT_REQUIRED);
}
@Test
void doesNotHaveFailedWithoutException() {
assertThat(mvc.get().uri("/greet")).doesNotHaveFailed();
}
@Test
void doesNotHaveFailedWithUnresolvedException() {
assertThatExceptionOfType(AssertionError.class)
.isThrownBy(() -> assertThat(mvc.get().uri("/error/1")).doesNotHaveFailed())
.withMessage("Expected request to succeed, but it failed");
}
@Test
void doesNotHaveFailedWithResolvedException() {
assertThatExceptionOfType(AssertionError.class)
.isThrownBy(() -> assertThat(mvc.get().uri("/error/2")).doesNotHaveFailed())
.withMessage("Expected request to succeed, but it failed");
}
@Test
void hasFailedWithoutException() {
assertThatExceptionOfType(AssertionError.class)
.isThrownBy(() -> assertThat(mvc.get().uri("/greet")).hasFailed())
.withMessage("Expected request to fail, but it succeeded");
}
@Test
void failureWithUnresolvedException() {
assertThat(mvc.get().uri("/error/1")).failure()
.isInstanceOf(ServletException.class)
.cause().isInstanceOf(IllegalStateException.class).hasMessage("Expected");
}
@Test
void failureWithResolvedException() {
assertThat(mvc.get().uri("/error/2")).failure()
.isInstanceOfSatisfying(ResponseStatusException.class, ex ->
assertThat(ex.getStatusCode()).isEqualTo(HttpStatus.PAYMENT_REQUIRED));
}
@Test
void failureWithoutException() {
assertThatExceptionOfType(AssertionError.class)
.isThrownBy(() -> assertThat(mvc.get().uri("/greet")).failure())
.withMessage("Expected request to fail, but it succeeded");
}
// Check that assertions fail immediately if request failed with unresolved exception
@Test
void assertAndApplyWithUnresolvedException() {
testAssertionFailureWithUnresolvableException(
result -> assertThat(result).apply(mvcResult -> {}));
}
@Test
void assertContentTypeWithUnresolvedException() {
testAssertionFailureWithUnresolvableException(
result -> assertThat(result).contentType());
}
@Test
void assertCookiesWithUnresolvedException() {
testAssertionFailureWithUnresolvableException(
result -> assertThat(result).cookies());
}
@Test
void assertFlashWithUnresolvedException() {
testAssertionFailureWithUnresolvableException(
result -> assertThat(result).flash());
}
@Test
void assertStatusWithUnresolvedException() {
testAssertionFailureWithUnresolvableException(
result -> assertThat(result).hasStatus(3));
}
@Test
void assertHeaderWithUnresolvedException() {
testAssertionFailureWithUnresolvableException(
result -> assertThat(result).headers());
}
@Test
void assertViewNameWithUnresolvedException() {
testAssertionFailureWithUnresolvableException(
result -> assertThat(result).hasViewName("test"));
}
@Test
void assertForwardedUrlWithUnresolvedException() {
testAssertionFailureWithUnresolvableException(
result -> assertThat(result).hasForwardedUrl("test"));
}
@Test
void assertRedirectedUrlWithUnresolvedException() {
testAssertionFailureWithUnresolvableException(
result -> assertThat(result).hasRedirectedUrl("test"));
}
@Test
void assertErrorMessageWithUnresolvedException() {
assertThatExceptionOfType(AssertionError.class)
.isThrownBy(() -> assertThat(mvc.get().uri("/error/message")).hasErrorMessage("invalid"))
.withMessageContainingAll("[Servlet error message]", "invalid", "expected error message");
}
@Test
void assertRequestWithUnresolvedException() {
testAssertionFailureWithUnresolvableException(
result -> assertThat(result).request());
}
@Test
void assertModelWithUnresolvedException() {
testAssertionFailureWithUnresolvableException(
result -> assertThat(result).model());
}
@Test
void assertBodyWithUnresolvedException() {
testAssertionFailureWithUnresolvableException(
result -> assertThat(result).body());
}
private void testAssertionFailureWithUnresolvableException(Consumer<MvcTestResult> assertions) {
MvcTestResult result = mvc.get().uri("/error/1").exchange();
assertThatExceptionOfType(AssertionError.class)
.isThrownBy(() -> assertions.accept(result))
.withMessageContainingAll("Request failed unexpectedly:",
ServletException.class.getName(), IllegalStateException.class.getName(),
"Expected");
}
}
@Test
void hasForwardUrl() {
assertThat(mvc.get().uri("/persons/John")).hasForwardedUrl("persons/index");
}
@Test
void hasRedirectUrl() {
assertThat(mvc.post().uri("/persons").param("name", "Andy")).hasStatus(HttpStatus.FOUND)
.hasRedirectedUrl("/persons/Andy");
}
@Test
void satisfiesAllowsAdditionalAssertions() {
assertThat(mvc.get().uri("/greet")).satisfies(result -> {
assertThat(result).isInstanceOf(MvcTestResult.class);
assertThat(result).hasStatusOk();
});
}
@Test
void resultMatcherCanBeReused() throws Exception {
MvcTestResult result = mvc.get().uri("/greet").exchange();
ResultMatcher matcher = mock(ResultMatcher.class);
assertThat(result).matches(matcher);
verify(matcher).match(result.getMvcResult());
}
@Test
void resultMatcherFailsWithDedicatedException() {
ResultMatcher matcher = result -> assertThat(result.getResponse().getStatus())
.isEqualTo(HttpStatus.NOT_FOUND.value());
assertThatExceptionOfType(AssertionError.class)
.isThrownBy(() -> assertThat(mvc.get().uri("/greet")).matches(matcher))
.withMessageContaining("expected: 404").withMessageContaining(" but was: 200");
}
@Test
void shouldApplyResultHandler() { // Spring RESTDocs example
AtomicBoolean applied = new AtomicBoolean();
assertThat(mvc.get().uri("/greet")).apply(result -> applied.set(true));
assertThat(applied).isTrue();
}
@Configuration
@EnableWebMvc
@Import({ TestController.class, PersonController.class, AsyncController.class,
MultipartController.class, SessionController.class, ErrorController.class })
static | ExceptionTests |
java | quarkusio__quarkus | extensions/netty/runtime/src/main/java/io/quarkus/netty/runtime/graal/NettySubstitutions.java | {
"start": 22381,
"end": 23879
} | class ____ {
@Alias
boolean strict;
@Substitute
protected EmbeddedChannel newContentDecompressor(ChannelHandlerContext ctx, CharSequence contentEncoding)
throws Http2Exception {
if (GZIP.contentEqualsIgnoreCase(contentEncoding) || X_GZIP.contentEqualsIgnoreCase(contentEncoding)) {
return new EmbeddedChannel(ctx.channel().id(), ctx.channel().metadata().hasDisconnect(),
ctx.channel().config(), ZlibCodecFactory.newZlibDecoder(ZlibWrapper.GZIP));
}
if (DEFLATE.contentEqualsIgnoreCase(contentEncoding) || X_DEFLATE.contentEqualsIgnoreCase(contentEncoding)) {
final ZlibWrapper wrapper = strict ? ZlibWrapper.ZLIB : ZlibWrapper.ZLIB_OR_NONE;
// To be strict, 'deflate' means ZLIB, but some servers were not implemented correctly.
return new EmbeddedChannel(ctx.channel().id(), ctx.channel().metadata().hasDisconnect(),
ctx.channel().config(), ZlibCodecFactory.newZlibDecoder(wrapper));
}
if (Brotli.isAvailable() && BR.contentEqualsIgnoreCase(contentEncoding)) {
return new EmbeddedChannel(ctx.channel().id(), ctx.channel().metadata().hasDisconnect(),
ctx.channel().config(), new BrotliDecoder());
}
// 'identity' or unsupported
return null;
}
}
@TargetClass(className = "io.netty.handler.ssl.SslHandler")
final | Target_io_netty_handler_codec_http2_DelegatingDecompressorFrameListener |
java | micronaut-projects__micronaut-core | context/src/main/java/io/micronaut/logging/impl/LogbackLoggingSystem.java | {
"start": 1250,
"end": 3141
} | class ____ implements LoggingSystem {
private static final String DEFAULT_LOGBACK_LOCATION = "logback.xml";
private final String logbackXmlLocation;
/**
* @param logbackExternalConfigLocation The location of the logback configuration file set via logback properties
* @param logbackXmlLocation The location of the logback configuration file set via micronaut properties
* @since 3.8.8
*/
public LogbackLoggingSystem(
@Nullable @Property(name = "logback.configurationFile") String logbackExternalConfigLocation,
@Nullable @Property(name = "logger.config") String logbackXmlLocation
) {
if (logbackExternalConfigLocation != null) {
this.logbackXmlLocation = logbackExternalConfigLocation;
} else if (logbackXmlLocation != null) {
this.logbackXmlLocation = logbackXmlLocation;
} else {
this.logbackXmlLocation = DEFAULT_LOGBACK_LOCATION;
}
}
@Override
public void setLogLevel(String name, LogLevel level) {
getLoggerContext().getLogger(name).setLevel(toLevel(level));
}
@Override
public void refresh() {
LoggerContext context = getLoggerContext();
context.reset();
LogbackUtils.configure(getClass().getClassLoader(), context, logbackXmlLocation);
}
/**
* @return The logback {@link LoggerContext}
*/
private static LoggerContext getLoggerContext() {
return (LoggerContext) LoggerFactory.getILoggerFactory();
}
/**
* @param logLevel The micronaut {@link LogLevel} to convert
* @return The converted logback {@link Level}
*/
private static Level toLevel(LogLevel logLevel) {
if (logLevel == LogLevel.NOT_SPECIFIED) {
return null;
} else {
return Level.valueOf(logLevel.name());
}
}
}
| LogbackLoggingSystem |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfigurationFieldsBase.java | {
"start": 13922,
"end": 14088
} | class ____
assertNotNull(xmlFilename, "XML file name is null");
assertNotNull(configurationClasses, "Configuration classes array is null");
// Create | members |
java | elastic__elasticsearch | x-pack/plugin/security/qa/smoke-test-all-realms/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/FileRealmAuthIT.java | {
"start": 948,
"end": 4344
} | class ____ extends SecurityRealmSmokeTestCase {
// Declared in build.gradle
private static final String USERNAME = "security_test_user";
private static final String ANOTHER_USERNAME = "index_and_app_user";
private static final SecureString PASSWORD = new SecureString("security-test-password".toCharArray());
private static final String ROLE_NAME = "security_test_role";
public void testAuthenticationUsingFileRealm() throws IOException {
Map<String, Object> authenticate = super.authenticate(
RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", UsernamePasswordToken.basicAuthHeaderValue(USERNAME, PASSWORD))
);
assertUsername(authenticate, USERNAME);
assertRealm(authenticate, "file", "file0");
assertRoles(authenticate, ROLE_NAME);
assertNoApiKeyInfo(authenticate, Authentication.AuthenticationType.REALM);
}
public void testAuthenticationUsingFileRealmAndNoSecurityIndex() throws IOException {
Map<String, Object> authenticate = super.authenticate(
RequestOptions.DEFAULT.toBuilder()
.addHeader("Authorization", UsernamePasswordToken.basicAuthHeaderValue(ANOTHER_USERNAME, PASSWORD))
);
try {
// create user to ensure the .security-7 index exists
createUser("dummy", new SecureString("longpassword".toCharArray()), List.of("whatever"));
// close the .security-7 to simulate making it unavailable
Request closeRequest = new Request(HttpPost.METHOD_NAME, TestRestrictedIndices.INTERNAL_SECURITY_MAIN_INDEX_7 + "/_close");
closeRequest.setOptions(
RequestOptions.DEFAULT.toBuilder()
.addHeader("Authorization", UsernamePasswordToken.basicAuthHeaderValue(ANOTHER_USERNAME, PASSWORD))
.setWarningsHandler(WarningsHandler.PERMISSIVE)
);
assertOK(client().performRequest(closeRequest));
// clear the authentication cache
Request clearCachesRequest = new Request(HttpPost.METHOD_NAME, "_security/realm/*/_clear_cache");
clearCachesRequest.setOptions(
RequestOptions.DEFAULT.toBuilder()
.addHeader("Authorization", UsernamePasswordToken.basicAuthHeaderValue(ANOTHER_USERNAME, PASSWORD))
);
assertOK(client().performRequest(clearCachesRequest));
// file-realm authentication still works when cache is cleared and .security-7 is out
assertUsername(authenticate, ANOTHER_USERNAME);
assertRealm(authenticate, "file", "file0");
assertRoles(authenticate, "all_index_privileges", "all_application_privileges");
assertNoApiKeyInfo(authenticate, Authentication.AuthenticationType.REALM);
} finally {
Request openRequest = new Request(HttpPost.METHOD_NAME, TestRestrictedIndices.INTERNAL_SECURITY_MAIN_INDEX_7 + "/_open");
openRequest.setOptions(
RequestOptions.DEFAULT.toBuilder()
.addHeader("Authorization", UsernamePasswordToken.basicAuthHeaderValue(ANOTHER_USERNAME, PASSWORD))
.setWarningsHandler(WarningsHandler.PERMISSIVE)
);
assertOK(client().performRequest(openRequest));
deleteUser("dummy");
}
}
}
| FileRealmAuthIT |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/InheritanceToOneSubtypeJoinGroupByTest.java | {
"start": 3166,
"end": 3540
} | class ____ {
@ManyToOne
private EntityB b;
@ManyToOne
private EntityA a;
public WhitelistEntryPK() {
}
public EntityB getB() {
return b;
}
public void setB(EntityB b) {
this.b = b;
}
public EntityA getA() {
return a;
}
public void setA(EntityA a) {
this.a = a;
}
}
@Entity(name = "WhitelistEntry")
public static | WhitelistEntryPK |
java | quarkusio__quarkus | extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/ProgrammaticApiTest.java | {
"start": 10797,
"end": 11738
} | class ____ {
Cache constructorInjectedCache;
Cache methodInjectedCache;
public CachedService(@CacheName(CACHE_NAME_1) Cache cache) {
constructorInjectedCache = cache;
}
public Cache getConstructorInjectedCache() {
return constructorInjectedCache;
}
public Cache getMethodInjectedCache() {
return methodInjectedCache;
}
@Inject
public void setMethodInjectedCache(@CacheName(CACHE_NAME_1) Cache cache) {
methodInjectedCache = cache;
}
@CacheResult(cacheName = CACHE_NAME_1)
public String cachedMethod(Object key) {
return new String();
}
@CacheInvalidate(cacheName = CACHE_NAME_1)
public void invalidate(Object key) {
}
@CacheInvalidateAll(cacheName = CACHE_NAME_1)
public void invalidateAll() {
}
}
}
| CachedService |
java | apache__maven | impl/maven-core/src/test/java/org/apache/maven/lifecycle/internal/LifecycleExecutionPlanCalculatorTest.java | {
"start": 1574,
"end": 4010
} | class ____ extends AbstractCoreMavenComponentTestCase {
@Test
void testCalculateExecutionPlanWithGoalTasks() throws Exception {
MojoDescriptorCreator mojoDescriptorCreator = createMojoDescriptorCreator();
LifecycleExecutionPlanCalculator lifecycleExecutionPlanCalculator =
createExecutionPlaceCalculator(mojoDescriptorCreator);
final GoalTask goalTask1 = new GoalTask("compiler:compile");
final GoalTask goalTask2 = new GoalTask("surefire:test");
final TaskSegment taskSegment1 = new TaskSegment(false, goalTask1, goalTask2);
final MavenSession session1 = ProjectDependencyGraphStub.getMavenSession(ProjectDependencyGraphStub.A);
MavenExecutionPlan executionPlan = lifecycleExecutionPlanCalculator.calculateExecutionPlan(
session1, ProjectDependencyGraphStub.A, taskSegment1.getTasks());
assertEquals(2, executionPlan.size());
final GoalTask goalTask3 = new GoalTask("surefire:test");
final TaskSegment taskSegment2 = new TaskSegment(false, goalTask1, goalTask2, goalTask3);
MavenExecutionPlan executionPlan2 = lifecycleExecutionPlanCalculator.calculateExecutionPlan(
session1, ProjectDependencyGraphStub.A, taskSegment2.getTasks());
assertEquals(3, executionPlan2.size());
}
// Maybe also make one with LifeCycleTasks
public static LifecycleExecutionPlanCalculator createExecutionPlaceCalculator(
MojoDescriptorCreator mojoDescriptorCreator) throws ComponentLookupException {
LifecyclePluginResolver lifecyclePluginResolver = new LifecyclePluginResolver(new PluginVersionResolverStub());
return new DefaultLifecycleExecutionPlanCalculator(
new BuildPluginManagerStub(),
DefaultLifecyclesStub.createDefaultLifecycles(),
mojoDescriptorCreator,
lifecyclePluginResolver);
}
public static MojoDescriptorCreator createMojoDescriptorCreator() {
return new MojoDescriptorCreator(
new PluginVersionResolverStub(),
new BuildPluginManagerStub(),
new PluginPrefixResolverStub(),
new LifecyclePluginResolver(new PluginVersionResolverStub()));
}
@Override
protected String getProjectsDirectory() {
return "src/test/projects/lifecycle-executor";
}
}
| LifecycleExecutionPlanCalculatorTest |
java | netty__netty | handler/src/main/java/io/netty/handler/ssl/ApplicationProtocolConfig.java | {
"start": 4567,
"end": 4748
} | enum ____ {
NONE, NPN, ALPN, NPN_AND_ALPN
}
/**
* Defines the most common behaviors for the peer that selects the application protocol.
*/
public | Protocol |
java | quarkusio__quarkus | extensions/tls-registry/deployment/src/test/java/io/quarkus/tls/TooManyKeyStoreConfiguredPemAndP12Test.java | {
"start": 728,
"end": 1598
} | class ____ {
private static final String configuration = """
quarkus.tls.key-store.pem.0.cert=target/certs/test-formats.crt
quarkus.tls.key-store.pem.0.key=target/certs/test-formats.key
quarkus.tls.key-store.p12.path=target/certs/test-formats-keystore.p12
quarkus.tls.key-store.p12.password=password
""";
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest().setArchiveProducer(
() -> ShrinkWrap.create(JavaArchive.class)
.add(new StringAsset(configuration), "application.properties"))
.assertException(t -> {
assertThat(t).hasMessageContaining("PEM", "PKCS12");
});
@Test
void shouldNotBeCalled() {
fail("This test should not be called");
}
}
| TooManyKeyStoreConfiguredPemAndP12Test |
java | google__guava | android/guava/src/com/google/common/collect/MapMakerInternalMap.java | {
"start": 10499,
"end": 11660
} | enum ____ {
STRONG {
@Override
Equivalence<Object> defaultEquivalence() {
return Equivalence.equals();
}
},
WEAK {
@Override
Equivalence<Object> defaultEquivalence() {
return Equivalence.identity();
}
};
/**
* Returns the default equivalence strategy used to compare and hash keys or values referenced
* at this strength. This strategy will be used unless the user explicitly specifies an
* alternate strategy.
*/
abstract Equivalence<Object> defaultEquivalence();
}
/**
* A helper object for operating on {@link InternalEntry} instances in a type-safe and efficient
* manner.
*
* <p>For each of the four combinations of strong/weak key and strong/weak value, there are
* corresponding {@link InternalEntry}, {@link Segment}, and {@link InternalEntryHelper}
* implementations.
*
* @param <K> the type of the key in each entry
* @param <V> the type of the value in each entry
* @param <E> the type of the {@link InternalEntry} entry implementation
* @param <S> the type of the {@link Segment} entry implementation
*/
| Strength |
java | netty__netty | codec-http2/src/test/java/io/netty/handler/codec/http2/TestHeaderListener.java | {
"start": 1304,
"end": 1681
} | class ____ extends DefaultHttp2Headers {
private final List<HpackHeaderField> headers;
TestHeaderListener(List<HpackHeaderField> headers) {
this.headers = headers;
}
@Override
public TestHeaderListener add(CharSequence name, CharSequence value) {
headers.add(new HpackHeaderField(name, value));
return this;
}
}
| TestHeaderListener |
java | micronaut-projects__micronaut-core | inject/src/main/java/io/micronaut/context/BeanDefinitionRegistry.java | {
"start": 1254,
"end": 1382
} | interface ____ methods to find {@link BeanDefinition} instances.</p>
*
* @author Graeme Rocher
* @since 1.0
*/
public | containing |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/jaxrs/ResponseBuilderImpl.java | {
"start": 412,
"end": 1786
} | class ____ extends AbstractResponseBuilder {
@Override
public Response.ResponseBuilder location(URI location) {
if (location == null) {
metadata.remove(HttpHeaders.LOCATION);
return this;
}
metadata.putSingle(HttpHeaders.LOCATION, determineLocation(location));
return this;
}
@Override
public Response.ResponseBuilder contentLocation(URI location) {
if (location == null) {
metadata.remove(HttpHeaders.CONTENT_LOCATION);
return this;
}
metadata.putSingle(HttpHeaders.CONTENT_LOCATION, determineContentLocation(location));
return this;
}
@Override
protected AbstractResponseBuilder doClone() {
return new ResponseBuilderImpl();
}
//TODO: add the rest of static methods of Response if we need them
public static ResponseBuilderImpl withStatus(Response.Status status) {
return (ResponseBuilderImpl) new ResponseBuilderImpl().status(status);
}
public static ResponseBuilderImpl ok() {
return withStatus(Response.Status.OK);
}
public static ResponseBuilderImpl ok(Object entity) {
return (ResponseBuilderImpl) ok().entity(entity);
}
public static ResponseBuilderImpl noContent() {
return withStatus(Response.Status.NO_CONTENT);
}
}
| ResponseBuilderImpl |
java | elastic__elasticsearch | build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/SerializableJsonNode.java | {
"start": 1050,
"end": 1685
} | class ____<T extends JsonNode> implements Serializable {
private Object value;
private Class<? extends JsonNode> type;
SerializableJsonNode(Object value, Class<? extends JsonNode> type) {
this.value = value;
this.type = type;
}
public static SerializableJsonNode of(Object value, Class<? extends JsonNode> type) {
return new SerializableJsonNode(value, type);
}
public T toJsonNode() {
YAMLFactory YAML_FACTORY = new YAMLFactory();
ObjectMapper MAPPER = new ObjectMapper(YAML_FACTORY);
return (T) MAPPER.convertValue(value, type);
}
}
| SerializableJsonNode |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java | {
"start": 1347,
"end": 4567
} | class ____ extends HandledTransportAction<
GetCalendarEventsAction.Request,
GetCalendarEventsAction.Response> {
private final JobResultsProvider jobResultsProvider;
private final JobConfigProvider jobConfigProvider;
@Inject
public TransportGetCalendarEventsAction(
TransportService transportService,
ActionFilters actionFilters,
JobResultsProvider jobResultsProvider,
JobConfigProvider jobConfigProvider
) {
super(
GetCalendarEventsAction.NAME,
transportService,
actionFilters,
GetCalendarEventsAction.Request::new,
EsExecutors.DIRECT_EXECUTOR_SERVICE
);
this.jobResultsProvider = jobResultsProvider;
this.jobConfigProvider = jobConfigProvider;
}
@Override
protected void doExecute(
Task task,
GetCalendarEventsAction.Request request,
ActionListener<GetCalendarEventsAction.Response> listener
) {
final String[] calendarId = Strings.splitStringByCommaToArray(request.getCalendarId());
checkCalendarExists(calendarId, listener.delegateFailureAndWrap((outerDelegate, r) -> {
ScheduledEventsQueryBuilder query = new ScheduledEventsQueryBuilder().start(request.getStart())
.end(request.getEnd())
.from(request.getPageParams().getFrom())
.size(request.getPageParams().getSize())
.calendarIds(calendarId);
ActionListener<QueryPage<ScheduledEvent>> eventsListener = outerDelegate.delegateFailureAndWrap(
(l, events) -> l.onResponse(new GetCalendarEventsAction.Response(events))
);
if (request.getJobId() != null) {
jobConfigProvider.getJob(request.getJobId(), null, ActionListener.wrap(jobBuilder -> {
Job job = jobBuilder.build();
jobResultsProvider.scheduledEventsForJob(request.getJobId(), job.getGroups(), query, eventsListener);
}, jobNotFound -> {
// is the request Id a group?
jobConfigProvider.groupExists(request.getJobId(), eventsListener.delegateFailureAndWrap((delegate, groupExists) -> {
if (groupExists) {
jobResultsProvider.scheduledEventsForJob(null, Collections.singletonList(request.getJobId()), query, delegate);
} else {
delegate.onFailure(ExceptionsHelper.missingJobException(request.getJobId()));
}
}));
}));
} else {
jobResultsProvider.scheduledEvents(query, eventsListener);
}
}));
}
private void checkCalendarExists(String[] calendarId, ActionListener<Boolean> listener) {
if (Strings.isAllOrWildcard(calendarId)) {
listener.onResponse(true);
return;
}
jobResultsProvider.calendars(
CalendarQueryBuilder.builder().calendarIdTokens(calendarId),
listener.delegateFailureAndWrap((l, c) -> l.onResponse(true))
);
}
}
| TransportGetCalendarEventsAction |
java | quarkusio__quarkus | integration-tests/test-extension/extension/deployment/src/test/java/io/quarkus/config/BuildTimeRunTimeConfigTest.java | {
"start": 881,
"end": 3296
} | class ____ {
@RegisterExtension
static final QuarkusDevModeTest TEST = new QuarkusDevModeTest()
.setArchiveProducer(() -> {
try {
String props = new String(FileUtil.readFileContents(
BuildTimeRunTimeConfigTest.class.getClassLoader().getResourceAsStream("application.properties")),
StandardCharsets.UTF_8);
return ShrinkWrap.create(JavaArchive.class)
.addClasses(DevBean.class)
.addAsResource(new StringAsset(props + "\n" +
"quarkus.application.name=my-app\n" +
"quarkus.application.version=${quarkus.http.ssl.client-auth}"),
"application.properties");
} catch (IOException e) {
throw new RuntimeException(e);
}
}).setLogRecordPredicate(logRecord -> !logRecord.getMessage().contains("but it is build time fixed to"));
@Test
void buildTimeRunTimeConfig() {
// A combination of QuarkusUnitTest and QuarkusProdModeTest tests ordering may mess with the port leaving it in
// 8081 and QuarkusDevModeTest does not change to the right port.
RestAssured.port = -1;
RestAssured.when().get("/application").then()
.statusCode(200)
.body(is("my-app"));
RestAssured.when().get("/tls").then()
.statusCode(200)
.body(is("false"));
RestAssured.when().get("/source/quarkus.application.name").then()
.statusCode(200)
.body(is("BuildTime RunTime Fixed"));
TEST.modifyResourceFile("application.properties", s -> s + "\n" +
"quarkus.application.name=modified-app\n" +
"quarkus.tls.trust-all=true\n");
RestAssured.when().get("/application").then()
.statusCode(200)
.body(is("modified-app"));
RestAssured.when().get("/tls").then()
.statusCode(200)
.body(is("true"));
RestAssured.when().get("/source/quarkus.application.name").then()
.statusCode(200)
.body(is("BuildTime RunTime Fixed"));
}
@ApplicationScoped
public static | BuildTimeRunTimeConfigTest |
java | apache__kafka | metadata/src/main/java/org/apache/kafka/metadata/authorizer/StandardAuthorizerData.java | {
"start": 21795,
"end": 22596
} | class ____ implements MatchingRule {
private static final SuperUserRule INSTANCE = new SuperUserRule();
@Override
public AuthorizationResult result() {
return ALLOWED;
}
@Override
public String toString() {
return "SuperUser";
}
}
private record DefaultRule(AuthorizationResult result) implements MatchingRule {
@Override
public String toString() {
return result == ALLOWED ? "DefaultAllow" : "DefaultDeny";
}
}
private record MatchingAclRule(StandardAcl acl, AuthorizationResult result) implements MatchingRule {
@Override
public String toString() {
return "MatchingAcl(acl=" + acl + ")";
}
}
private static | SuperUserRule |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/context/ApplicationListener.java | {
"start": 863,
"end": 1659
} | interface ____ the
* Observer design pattern.
*
* <p>An {@code ApplicationListener} can generically declare the event type that
* it is interested in. When registered with a Spring {@code ApplicationContext},
* events will be filtered accordingly, with the listener getting invoked for
* matching event objects only.
*
* @author Rod Johnson
* @author Juergen Hoeller
* @param <E> the specific {@code ApplicationEvent} subclass to listen to
* @see org.springframework.context.ApplicationEvent
* @see org.springframework.context.event.ApplicationEventMulticaster
* @see org.springframework.context.event.SmartApplicationListener
* @see org.springframework.context.event.GenericApplicationListener
* @see org.springframework.context.event.EventListener
*/
@FunctionalInterface
public | for |
java | apache__kafka | clients/src/main/java/org/apache/kafka/clients/admin/SupportedVersionRange.java | {
"start": 968,
"end": 2725
} | class ____ {
private final short minVersion;
private final short maxVersion;
/**
* Raises an exception unless the following conditions are met:
* 0 <= minVersion <= maxVersion.
*
* @param minVersion The minimum version value.
* @param maxVersion The maximum version value.
*
* @throws IllegalArgumentException Raised when the condition described above is not met.
*/
public SupportedVersionRange(final short minVersion, final short maxVersion) {
if (minVersion < 0 || maxVersion < 0 || maxVersion < minVersion) {
throw new IllegalArgumentException(
String.format(
"Expected 0 <= minVersion <= maxVersion but received minVersion:%d, maxVersion:%d.",
minVersion,
maxVersion));
}
this.minVersion = minVersion;
this.maxVersion = maxVersion;
}
public short minVersion() {
return minVersion;
}
public short maxVersion() {
return maxVersion;
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
final SupportedVersionRange that = (SupportedVersionRange) other;
return this.minVersion == that.minVersion && this.maxVersion == that.maxVersion;
}
@Override
public int hashCode() {
return Objects.hash(minVersion, maxVersion);
}
@Override
public String toString() {
return String.format("SupportedVersionRange[min_version:%d, max_version:%d]", minVersion, maxVersion);
}
}
| SupportedVersionRange |
java | spring-projects__spring-data-jpa | spring-data-jpa/src/main/java/org/springframework/data/jpa/provider/HibernateUtils.java | {
"start": 1369,
"end": 3098
} | class ____ {
private HibernateUtils() {}
/**
* Return the query string of the underlying native Hibernate query.
*
* @param query
* @return
*/
public @Nullable static String getHibernateQuery(Object query) {
try {
// Try the new Hibernate implementation first
if (query instanceof SqmQuery sqmQuery) {
String hql = sqmQuery.getQueryString();
if (!hql.equals("<criteria>")) {
return hql;
}
return sqmQuery.getSqmStatement().toHqlString();
}
// Try the new Hibernate implementation first
if (query instanceof NamedSqmQueryMemento<?> sqmQuery) {
String hql = sqmQuery.getHqlString();
if (!hql.equals("<criteria>")) {
return hql;
}
return sqmQuery.getSqmStatement().toHqlString();
}
if (query instanceof NamedNativeQueryMemento<?> nativeQuery) {
return nativeQuery.getSqlString();
}
// Couple of cases in which this still breaks, see HHH-15389
} catch (RuntimeException o_O) {}
// Try the old way, as it still works in some cases (haven't investigated in which exactly)
if (query instanceof Query<?> hibernateQuery) {
return hibernateQuery.getQueryString();
} else {
throw new IllegalArgumentException("Don't know how to extract the query string from " + query);
}
}
public static boolean isNativeQuery(Object query) {
// Try the new Hibernate implementation first
if (query instanceof SqmQuery) {
return false;
}
if (query instanceof NativeQuery<?>) {
return true;
}
// Try the new Hibernate implementation first
if (query instanceof NamedSqmQueryMemento<?>) {
return false;
}
if (query instanceof NamedNativeQueryMemento<?>) {
return true;
}
return false;
}
}
| HibernateUtils |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/selection/qualifier/named/KeyWordMapper.java | {
"start": 608,
"end": 1295
} | class ____ {
private static final Map<String, String> EN_GER = ImmutableMap.<String, String>builder()
.put( "magnificent", "Großartig" )
.put( "evergreen", "Evergreen" )
.put( "classic", "Klassiker" )
.put( "box office flop", "Kasse Flop" )
.build();
public static final KeyWordMapper INSTANCE = Mappers.getMapper( KeyWordMapper.class );
@IterableMapping( dateFormat = "", qualifiedByName = "EnglishToGerman" )
abstract List<String> mapKeyWords( List<String> keyWords );
@Named( "EnglishToGerman" )
public String mapKeyWord( String keyword ) {
return EN_GER.get( keyword );
}
}
| KeyWordMapper |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/support/processor/DefaultExchangeFormatterTest.java | {
"start": 1197,
"end": 3623
} | class ____ {
private DefaultCamelContext camelContext;
private Exchange exchange;
private DefaultExchangeFormatter exchangeFormatter;
@BeforeEach
public void setUp() {
camelContext = new DefaultCamelContext();
Message message = new DefaultMessage(camelContext);
message.setBody("This is the message body");
exchange = new DefaultExchange(camelContext);
exchange.setIn(message);
exchangeFormatter = new DefaultExchangeFormatter();
}
@Test
public void testDefaultFormat() {
String formattedExchange = exchangeFormatter.format(exchange);
assertTrue(formattedExchange.contains("This is the message body"));
}
@Test
/*
* The formatted exchange without limitation is Exchange[BodyType: String, Body: This is the message body]
* The "Exchange[", the "...", and the "]" do not count here, but the leading
* ", " that is removed later does count...
*/
public void testFormatWithMaxCharsParameter() {
exchangeFormatter.setMaxChars(60);
String formattedExchange = exchangeFormatter.format(exchange);
assertEquals(47 + "Exchange[...]".length() - ", ".length(), formattedExchange.length());
}
@Test
/*
* This limitation is really the length of the printed message body, not the
* one of the message
*/
public void testFormatWithBodyMaxChars() {
camelContext.getGlobalOptions().put(Exchange.LOG_DEBUG_BODY_MAX_CHARS, "7");
String formattedExchange = exchangeFormatter.format(exchange);
assertFalse(formattedExchange.contains("This is "));
assertTrue(formattedExchange.contains("This is"));
camelContext.getGlobalOptions().remove(Exchange.LOG_DEBUG_BODY_MAX_CHARS);
}
@Test
/*
* These two limitations will first truncate the message body and then the
* total message.
*/
public void testFormatWithBoth() {
camelContext.getGlobalOptions().put(Exchange.LOG_DEBUG_BODY_MAX_CHARS, "7");
exchangeFormatter.setMaxChars(60);
String formattedExchange = exchangeFormatter.format(exchange);
assertEquals(60 + "Exchange[...]".length() - ", ".length(), formattedExchange.length());
assertFalse(formattedExchange.contains("This is "));
camelContext.getGlobalOptions().remove(Exchange.LOG_DEBUG_BODY_MAX_CHARS);
}
}
| DefaultExchangeFormatterTest |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/maybe/MaybeTakeUntilMaybe.java | {
"start": 3824,
"end": 4736
} | class ____<U>
extends AtomicReference<Disposable> implements MaybeObserver<U> {
private static final long serialVersionUID = -1266041316834525931L;
final TakeUntilMainMaybeObserver<?, U> parent;
TakeUntilOtherMaybeObserver(TakeUntilMainMaybeObserver<?, U> parent) {
this.parent = parent;
}
@Override
public void onSubscribe(Disposable d) {
DisposableHelper.setOnce(this, d);
}
@Override
public void onSuccess(Object value) {
parent.otherComplete();
}
@Override
public void onError(Throwable e) {
parent.otherError(e);
}
@Override
public void onComplete() {
parent.otherComplete();
}
}
}
}
| TakeUntilOtherMaybeObserver |
java | google__dagger | hilt-compiler/main/java/dagger/hilt/processor/internal/Processors.java | {
"start": 2699,
"end": 2887
} | class ____ {
public static final String CONSTRUCTOR_NAME = "<init>";
public static final String STATIC_INITIALIZER_NAME = "<clinit>";
/** Generates the aggregating metadata | Processors |
java | spring-projects__spring-boot | buildpack/spring-boot-buildpack-platform/src/main/java/org/springframework/boot/buildpack/platform/io/DefaultOwner.java | {
"start": 788,
"end": 1152
} | class ____ implements Owner {
private final long uid;
private final long gid;
DefaultOwner(long uid, long gid) {
this.uid = uid;
this.gid = gid;
}
@Override
public long getUid() {
return this.uid;
}
@Override
public long getGid() {
return this.gid;
}
@Override
public String toString() {
return this.uid + "/" + this.gid;
}
}
| DefaultOwner |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/method/configuration/PrePostMethodSecurityConfigurationTests.java | {
"start": 76895,
"end": 77128
} | class ____ {
String name;
public Passenger(String name) {
this.name = name;
}
@PreAuthorize("hasAuthority('airplane:read')")
public String getName() {
return this.name;
}
}
@EnableMethodSecurity
static | Passenger |
java | mockito__mockito | mockito-core/src/test/java/org/mockitousage/annotation/MockInjectionUsingConstructorTest.java | {
"start": 5134,
"end": 5631
} | class ____ {
@InjectMocks TimeUnit f;
}
assertThatThrownBy(
() -> {
openMocks(new TestCase());
})
.isInstanceOf(MockitoException.class)
.hasMessageContaining(
"Cannot instantiate @InjectMocks field named 'f'! Cause: the type 'TimeUnit' is an enum");
}
@Test
public void injectMocksMustFailWithAbstractClass() {
| TestCase |
java | quarkusio__quarkus | extensions/resteasy-classic/resteasy/deployment/src/test/java/io/quarkus/resteasy/test/security/AuthenticationFailedExceptionHeaderTest.java | {
"start": 2319,
"end": 2472
} | class ____ {
@GET
public String hello() {
return "hello";
}
}
@ApplicationScoped
public static | HelloResource |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequestBuilder.java | {
"start": 890,
"end": 4699
} | class ____ extends MasterNodeOperationRequestBuilder<
CreateSnapshotRequest,
CreateSnapshotResponse,
CreateSnapshotRequestBuilder> {
/**
* Constructs a new create snapshot request builder with specified repository and snapshot names
*/
public CreateSnapshotRequestBuilder(ElasticsearchClient client, TimeValue masterNodeTimeout, String repository, String snapshot) {
super(client, TransportCreateSnapshotAction.TYPE, new CreateSnapshotRequest(masterNodeTimeout, repository, snapshot));
}
/**
* Sets the snapshot name
*
* @param snapshot snapshot name
* @return this builder
*/
public CreateSnapshotRequestBuilder setSnapshot(String snapshot) {
request.snapshot(snapshot);
return this;
}
/**
* Sets the repository name
*
* @param repository repository name
* @return this builder
*/
public CreateSnapshotRequestBuilder setRepository(String repository) {
request.repository(repository);
return this;
}
/**
* Sets a list of indices that should be included into the snapshot
* <p>
* The list of indices supports multi-index syntax. For example: "+test*" ,"-test42" will index all indices with
* prefix "test" except index "test42". Aliases are supported. An empty list or {"_all"} will snapshot all open
* indices in the cluster.
*
* @return this builder
*/
public CreateSnapshotRequestBuilder setIndices(String... indices) {
request.indices(indices);
return this;
}
/**
* Specifies the indices options. Like what type of requested indices to ignore. For example indices that don't exist.
*
* @param indicesOptions the desired behaviour regarding indices options
* @return this request
*/
public CreateSnapshotRequestBuilder setIndicesOptions(IndicesOptions indicesOptions) {
request.indicesOptions(indicesOptions);
return this;
}
/**
* If set to true the request should wait for the snapshot completion before returning.
*
* @param waitForCompletion true if
* @return this builder
*/
public CreateSnapshotRequestBuilder setWaitForCompletion(boolean waitForCompletion) {
request.waitForCompletion(waitForCompletion);
return this;
}
/**
* If set to true the request should snapshot indices with unavailable shards
*
* @param partial true if request should snapshot indices with unavailable shards
* @return this builder
*/
public CreateSnapshotRequestBuilder setPartial(boolean partial) {
request.partial(partial);
return this;
}
/**
* Set to true if snapshot should include global cluster state
*
* @param includeGlobalState true if snapshot should include global cluster state
* @return this builder
*/
public CreateSnapshotRequestBuilder setIncludeGlobalState(boolean includeGlobalState) {
request.includeGlobalState(includeGlobalState);
return this;
}
/**
* Provide a list of features whose state indices should be included in the snapshot
*
* @param featureStates A list of feature names
* @return this builder
*/
public CreateSnapshotRequestBuilder setFeatureStates(String... featureStates) {
request.featureStates(featureStates);
return this;
}
/**
* Provide a map of user metadata that should be included in the snapshot metadata.
*
* @param metadata user metadata map
* @return this builder
*/
public CreateSnapshotRequestBuilder setUserMetadata(@Nullable Map<String, Object> metadata) {
request.userMetadata(metadata);
return this;
}
}
| CreateSnapshotRequestBuilder |
java | apache__flink | flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/serde/ContextResolvedTableSerdeTest.java | {
"start": 18065,
"end": 22415
} | class ____ {
private final SerdeContext ctx =
serdeContext(
TableConfigOptions.CatalogPlanCompilation.SCHEMA,
TableConfigOptions.CatalogPlanRestore.IDENTIFIER);
@Test
void withPermanentTable() throws Exception {
final Tuple2<JsonNode, ContextResolvedTable> result =
serDe(ctx, PERMANENT_PLAN_CONTEXT_RESOLVED_TABLE);
assertThatJsonContains(result.f0, FIELD_NAME_IDENTIFIER);
assertThatJsonContains(result.f0, FIELD_NAME_CATALOG_TABLE);
assertThatJsonDoesNotContain(
result.f0,
FIELD_NAME_CATALOG_TABLE,
ResolvedCatalogTableJsonSerializer.OPTIONS);
assertThatJsonDoesNotContain(
result.f0,
FIELD_NAME_CATALOG_TABLE,
ResolvedCatalogTableJsonSerializer.COMMENT);
assertThat(result.f1.isPermanent()).isTrue();
assertThat(result.f1.getCatalog()).containsSame(CATALOG);
assertThat(result.f1.getIdentifier()).isEqualTo(PERMANENT_TABLE_IDENTIFIER);
assertThat(result.f1.getResolvedSchema()).isEqualTo(CATALOG_TABLE_RESOLVED_SCHEMA);
assertThat(result.f1.getResolvedTable().getOptions()).isEqualTo(CATALOG_OPTIONS);
}
@Test
void withDifferentSchema() throws Exception {
final ResolvedSchema resolvedSchema =
new ResolvedSchema(
Arrays.asList(
Column.physical("a", DataTypes.STRING()),
Column.physical("b", DataTypes.STRING()),
Column.physical("c", DataTypes.STRING())),
Collections.emptyList(),
null,
Collections.singletonList(
DefaultIndex.newIndex(
"idx", Collections.singletonList("a"))));
final ContextResolvedTable spec =
ContextResolvedTable.permanent(
PERMANENT_TABLE_IDENTIFIER,
CATALOG,
new ResolvedCatalogTable(
CatalogTable.newBuilder()
.schema(
Schema.newBuilder()
.fromResolvedSchema(resolvedSchema)
.build())
.comment("my comment")
.partitionKeys(PARTITION_KEYS)
.options(PLAN_OPTIONS)
.build(),
resolvedSchema));
final byte[] actualSerialized = createJsonObjectWriter(ctx).writeValueAsBytes(spec);
assertThatThrownBy(
() ->
createJsonObjectReader(ctx)
.readValue(
actualSerialized,
ContextResolvedTable.class))
.satisfies(
anyCauseMatches(
TableException.class,
ContextResolvedTableJsonDeserializer.schemaNotMatching(
PERMANENT_TABLE_IDENTIFIER,
resolvedSchema,
CATALOG_TABLE_RESOLVED_SCHEMA)
.getMessage()));
}
}
@Nested
@DisplayName("and CatalogPlanRestore == ALL")
| TestRestoreIdentifier |
java | micronaut-projects__micronaut-core | inject-java/src/main/java/io/micronaut/annotation/processing/visitor/JavaClassElement.java | {
"start": 4042,
"end": 35312
} | class ____<T> {}"
final List<? extends TypeMirror> typeArguments;
private final boolean isTypeVariable;
private List<PropertyElement> beanProperties;
private String simpleName;
private String name;
private String packageName;
@Nullable
private Map<String, ClassElement> resolvedTypeArguments;
@Nullable
private Map<String, Map<String, ClassElement>> resolvedAllTypeArguments;
@Nullable
private ClassElement resolvedSuperType;
@Nullable
private List<ClassElement> resolvedInterfaces;
private boolean hasErrorousInterface;
private final JavaEnclosedElementsQuery enclosedElementsQuery = new JavaEnclosedElementsQuery(false);
private final JavaEnclosedElementsQuery sourceEnclosedElementsQuery = new JavaEnclosedElementsQuery(true);
@Nullable
private ElementAnnotationMetadata elementTypeAnnotationMetadata;
@Nullable
private ClassElement theType;
@Nullable
private AnnotationMetadata annotationMetadata;
/**
* @param nativeType The native type
* @param annotationMetadataFactory The annotation metadata factory
* @param visitorContext The visitor context
*/
@Internal
public JavaClassElement(JavaNativeElement.Class nativeType, ElementAnnotationMetadataFactory annotationMetadataFactory, JavaVisitorContext visitorContext) {
this(nativeType, annotationMetadataFactory, visitorContext, null, null, 0, false, null);
}
/**
* @param nativeType The native type
* @param annotationMetadataFactory The annotation metadata factory
* @param visitorContext The visitor context
* @param typeArguments The declared type arguments
* @param resolvedTypeArguments The resolvedTypeArguments
*/
JavaClassElement(JavaNativeElement.Class nativeType,
ElementAnnotationMetadataFactory annotationMetadataFactory,
JavaVisitorContext visitorContext,
List<? extends TypeMirror> typeArguments,
@Nullable
Map<String, ClassElement> resolvedTypeArguments) {
this(nativeType, annotationMetadataFactory, visitorContext, typeArguments, resolvedTypeArguments, 0, false, null);
}
/**
* @param nativeType The native type
* @param annotationMetadataFactory The annotation metadata factory
* @param visitorContext The visitor context
* @param typeArguments The declared type arguments
* @param resolvedTypeArguments The resolvedTypeArguments
* @param doc The optional documentation
*/
JavaClassElement(JavaNativeElement.Class nativeType,
ElementAnnotationMetadataFactory annotationMetadataFactory,
JavaVisitorContext visitorContext,
List<? extends TypeMirror> typeArguments,
@Nullable
Map<String, ClassElement> resolvedTypeArguments,
String doc) {
this(nativeType, annotationMetadataFactory, visitorContext, typeArguments, resolvedTypeArguments, 0, false, doc);
}
/**
* @param nativeType The native type
* @param annotationMetadataFactory The annotation metadata factory
* @param visitorContext The visitor context
* @param typeArguments The declared type arguments
* @param resolvedTypeArguments The resolvedTypeArguments
* @param arrayDimensions The number of array dimensions
*/
JavaClassElement(JavaNativeElement.Class nativeType,
ElementAnnotationMetadataFactory annotationMetadataFactory,
JavaVisitorContext visitorContext,
List<? extends TypeMirror> typeArguments,
@Nullable
Map<String, ClassElement> resolvedTypeArguments,
int arrayDimensions) {
this(nativeType, annotationMetadataFactory, visitorContext, typeArguments, resolvedTypeArguments, arrayDimensions, false, null);
}
/**
* @param nativeType The native type
* @param annotationMetadataFactory The annotation metadata factory
* @param visitorContext The visitor context
* @param typeArguments The declared type arguments
* @param resolvedTypeArguments The resolvedTypeArguments
* @param arrayDimensions The number of array dimensions
* @param doc The optional documentation
*/
JavaClassElement(JavaNativeElement.Class nativeType,
ElementAnnotationMetadataFactory annotationMetadataFactory,
JavaVisitorContext visitorContext,
List<? extends TypeMirror> typeArguments,
@Nullable
Map<String, ClassElement> resolvedTypeArguments,
int arrayDimensions,
String doc) {
this(nativeType, annotationMetadataFactory, visitorContext, typeArguments, resolvedTypeArguments, arrayDimensions, false, doc);
}
/**
* @param nativeType The {@link TypeElement}
* @param annotationMetadataFactory The annotation metadata factory
* @param visitorContext The visitor context
* @param typeArguments The declared type arguments
* @param resolvedTypeArguments The resolvedTypeArguments
* @param arrayDimensions The number of array dimensions
* @param isTypeVariable Is the type a type variable
* @param doc The optional documentation
*/
JavaClassElement(JavaNativeElement.Class nativeType,
ElementAnnotationMetadataFactory annotationMetadataFactory,
JavaVisitorContext visitorContext,
@Nullable List<? extends TypeMirror> typeArguments,
@Nullable
Map<String, ClassElement> resolvedTypeArguments,
int arrayDimensions,
boolean isTypeVariable,
@Nullable
String doc) {
super(nativeType, annotationMetadataFactory, visitorContext);
this.classElement = nativeType.element();
this.typeArguments = typeArguments;
this.resolvedTypeArguments = resolvedTypeArguments;
this.arrayDimensions = arrayDimensions;
this.isTypeVariable = isTypeVariable;
this.doc = doc;
}
@Override
public Optional<String> getDocumentation(boolean parse) {
return !parse || doc == null ? super.getDocumentation(parse) : Optional.of(doc);
}
@Override
public String getCanonicalName() {
return classElement.getQualifiedName().toString();
}
@Override
public boolean hasUnresolvedTypes(UnresolvedTypeKind... kind) {
List<? extends TypeMirror> interfaces = this.classElement.getInterfaces();
for (UnresolvedTypeKind unresolvedTypeKind : kind) {
switch (unresolvedTypeKind) {
case INTERFACE -> {
for (TypeMirror anInterface : interfaces) {
if (anInterface.getKind() == TypeKind.ERROR) {
return true;
}
}
}
case SUPERCLASS -> {
TypeMirror superclass = this.classElement.getSuperclass();
if (superclass.getKind() == TypeKind.ERROR) {
return true;
}
}
default -> {
// no-op
}
}
}
return false;
}
@Override
public JavaNativeElement.@NonNull Class getNativeType() {
return (JavaNativeElement.Class) super.getNativeType();
}
@Override
protected JavaClassElement copyThis() {
return new JavaClassElement(getNativeType(), elementAnnotationMetadataFactory, visitorContext, typeArguments, resolvedTypeArguments, arrayDimensions);
}
@NonNull
@Override
public ClassElement withTypeArguments(Map<String, ClassElement> newTypeArguments) {
return new JavaClassElement(getNativeType(), elementAnnotationMetadataFactory, visitorContext, typeArguments, newTypeArguments, arrayDimensions);
}
@Override
public ClassElement withAnnotationMetadata(AnnotationMetadata annotationMetadata) {
return (ClassElement) super.withAnnotationMetadata(annotationMetadata);
}
@Override
protected MutableAnnotationMetadataDelegate<?> getAnnotationMetadataToWrite() {
if (getNativeType().typeMirror() == null) {
return super.getAnnotationMetadataToWrite();
}
return getTypeAnnotationMetadata();
}
@NonNull
@Override
public AnnotationMetadata getAnnotationMetadata() {
if (presetAnnotationMetadata != null) {
return presetAnnotationMetadata;
}
if (annotationMetadata == null) {
if (getNativeType().typeMirror() == null) {
annotationMetadata = super.getAnnotationMetadata();
} else {
annotationMetadata = new AnnotationMetadataHierarchy(true, super.getAnnotationMetadata(), getTypeAnnotationMetadata());
}
}
return annotationMetadata;
}
@Override
protected boolean hasNullMarked() {
return getPackage().hasStereotype(NullMarked.class) || hasStereotype(NullMarked.class);
}
@NonNull
@Override
public MutableAnnotationMetadataDelegate<AnnotationMetadata> getTypeAnnotationMetadata() {
if (elementTypeAnnotationMetadata == null) {
elementTypeAnnotationMetadata = elementAnnotationMetadataFactory.buildTypeAnnotations(this);
}
return elementTypeAnnotationMetadata;
}
@Override
public boolean isTypeVariable() {
return isTypeVariable;
}
@Override
public String toString() {
return getName();
}
@Override
public boolean isInner() {
return classElement.getNestingKind().isNested();
}
@Override
public boolean isRecord() {
return JavaModelUtils.isRecord(classElement);
}
@Override
public boolean isPrimitive() {
return ClassUtils.getPrimitiveType(getName()).isPresent();
}
@Override
public Collection<ClassElement> getInterfaces() {
if (resolvedInterfaces == null) {
if (!visitorContext.isVisitUnresolvedInterfaces()) {
resolvedInterfaces = classElement.getInterfaces().stream()
.filter(this::onlyAvailable)
.map(mirror -> newClassElement(mirror, getTypeArguments())).toList();
hasErrorousInterface = classElement.getInterfaces().size() != resolvedInterfaces.size();
} else {
resolvedInterfaces = classElement.getInterfaces().stream()
.map(mirror -> newClassElement(mirror, getTypeArguments())).toList();
hasErrorousInterface = false;
}
} else if (hasErrorousInterface && visitorContext.isVisitUnresolvedInterfaces()) {
resolvedInterfaces = classElement.getInterfaces().stream()
.map(mirror -> newClassElement(mirror, getTypeArguments())).toList();
hasErrorousInterface = false;
}
return resolvedInterfaces;
}
private boolean onlyAvailable(TypeMirror mirror) {
return !(mirror instanceof DeclaredType declaredType) || declaredType.getKind() != TypeKind.ERROR;
}
@Override
public Optional<ClassElement> getSuperType() {
if (resolvedSuperType == null) {
final TypeMirror superclass = classElement.getSuperclass();
if (superclass == null) {
return Optional.empty();
}
final Element element = visitorContext.getTypes().asElement(superclass);
if (element instanceof TypeElement superElement) {
if (Object.class.getName().equals(superElement.getQualifiedName().toString())) {
return Optional.empty();
}
resolvedSuperType = newClassElement(superclass, getTypeArguments());
}
}
return Optional.ofNullable(resolvedSuperType);
}
@Override
public boolean isAbstract() {
return classElement.getModifiers().contains(Modifier.ABSTRACT);
}
@Override
public boolean isInterface() {
return JavaModelUtils.isInterface(classElement);
}
@Override
public @NonNull List<PropertyElement> getBeanProperties() {
if (beanProperties == null) {
beanProperties = getBeanProperties(PropertyElementQuery.of(this));
}
return Collections.unmodifiableList(beanProperties);
}
@Override
public @NonNull List<PropertyElement> getBeanProperties(@NonNull PropertyElementQuery propertyElementQuery) {
if (isRecord()) {
return AstBeanPropertiesUtils.resolveBeanProperties(propertyElementQuery,
this,
this::getRecordMethods,
this::getRecordFields,
true,
Collections.emptySet(),
methodElement -> Optional.empty(),
methodElement -> Optional.empty(),
this::mapToPropertyElement);
}
Function<MethodElement, Optional<String>> customReaderPropertyNameResolver = methodElement -> Optional.empty();
Function<MethodElement, Optional<String>> customWriterPropertyNameResolver = methodElement -> Optional.empty();
if (isKotlinClass(getNativeType().element())) {
Set<String> isProperties = getEnclosedElements(ElementQuery.ALL_METHODS)
.stream()
.map(io.micronaut.inject.ast.Element::getName)
.filter(method -> method.startsWith(PREFIX_IS))
.collect(Collectors.toSet());
if (!isProperties.isEmpty()) {
customReaderPropertyNameResolver = methodElement -> {
String methodName = methodElement.getSimpleName();
if (methodName.startsWith(PREFIX_IS)) {
return Optional.of(methodName);
}
return Optional.empty();
};
customWriterPropertyNameResolver = methodElement -> {
String methodName = methodElement.getSimpleName();
String propertyName = NameUtils.getPropertyNameForSetter(methodName);
String isPropertyName = PREFIX_IS + NameUtils.capitalize(propertyName);
if (isProperties.contains(isPropertyName)) {
return Optional.of(isPropertyName);
}
return Optional.empty();
};
}
}
return AstBeanPropertiesUtils.resolveBeanProperties(propertyElementQuery,
this,
() -> getEnclosedElements(ElementQuery.ALL_METHODS),
() -> getEnclosedElements(ElementQuery.ALL_FIELDS),
false,
Collections.emptySet(),
customReaderPropertyNameResolver,
customWriterPropertyNameResolver,
this::mapToPropertyElement);
}
private JavaPropertyElement mapToPropertyElement(AstBeanPropertiesUtils.BeanPropertyData value) {
AnnotationMetadata propertyAnnotationMetadata = null;
if (isRecord()) {
for (Element enclosedElement : classElement.getEnclosedElements()) {
if (JavaModelUtils.isRecordComponent(enclosedElement) && enclosedElement instanceof RecordComponentElement recordComponentElement) {
if (recordComponentElement.getSimpleName().toString().equals(value.propertyName)) {
propertyAnnotationMetadata = visitorContext.getAnnotationMetadataBuilder().build(recordComponentElement);
break;
}
}
}
}
return new JavaPropertyElement(
JavaClassElement.this,
value.type,
value.readAccessKind == null ? null : value.getter,
value.writeAccessKind == null ? null : value.setter,
value.field,
propertyAnnotationMetadata,
elementAnnotationMetadataFactory,
value.propertyName,
value.readAccessKind == null ? PropertyElement.AccessKind.METHOD : PropertyElement.AccessKind.valueOf(value.readAccessKind.name()),
value.writeAccessKind == null ? PropertyElement.AccessKind.METHOD : PropertyElement.AccessKind.valueOf(value.writeAccessKind.name()),
value.isExcluded,
visitorContext,
findPropertyDoc(value));
}
@Nullable
private String findPropertyDoc(AstBeanPropertiesUtils.BeanPropertyData value) {
if (isRecord()) {
try {
String docComment = visitorContext.getElements().getDocComment(getNativeType().element());
if (docComment != null) {
Javadoc javadoc = StaticJavaParser.parseJavadoc(docComment);
for (JavadocBlockTag t : javadoc.getBlockTags()) {
if (t.getType() == JavadocBlockTag.Type.PARAM && t.getName().map(n -> n.equals(value.propertyName)).orElse(false)) {
return t.getContent().toText();
}
}
}
} catch (Exception ignore) {
// Ignore
}
}
return null;
}
private List<MethodElement> getRecordMethods() {
var recordComponents = new HashSet<String>();
var methodElements = new ArrayList<MethodElement>();
for (Element enclosedElement : classElement.getEnclosedElements()) {
if (JavaModelUtils.isRecordComponent(enclosedElement) || enclosedElement instanceof ExecutableElement) {
if (enclosedElement.getKind() == ElementKind.CONSTRUCTOR) {
continue;
}
String name = enclosedElement.getSimpleName().toString();
if (enclosedElement instanceof ExecutableElement executableElement) {
if (recordComponents.contains(name)) {
methodElements.add(
new JavaMethodElement(
JavaClassElement.this,
new JavaNativeElement.Method(executableElement),
elementAnnotationMetadataFactory,
visitorContext)
);
}
} else if (enclosedElement instanceof VariableElement) {
recordComponents.add(name);
}
}
}
return methodElements;
}
private List<FieldElement> getRecordFields() {
var fieldElements = new ArrayList<FieldElement>();
for (Element enclosedElement : classElement.getEnclosedElements()) {
if (!JavaModelUtils.isRecordComponent(enclosedElement) && enclosedElement instanceof VariableElement variableElement) {
fieldElements.add(
new JavaFieldElement(
JavaClassElement.this,
new JavaNativeElement.Variable(variableElement),
elementAnnotationMetadataFactory,
visitorContext)
);
}
}
return fieldElements;
}
private boolean isKotlinClass(Element element) {
return element.getAnnotationMirrors().stream().anyMatch(am -> am.getAnnotationType().asElement().toString().equals(KOTLIN_METADATA));
}
@NonNull
@Override
public <T extends io.micronaut.inject.ast.Element> List<T> getEnclosedElements(@NonNull ElementQuery<T> query) {
return enclosedElementsQuery.getEnclosedElements(this, query);
}
/**
* This method will produce the elements just like {@link #getEnclosedElements(ElementQuery)}
* but the elements are constructed as the source ones.
* {@link io.micronaut.inject.ast.ElementFactory#newSourceMethodElement(ClassElement, Object, ElementAnnotationMetadataFactory)}.
*
* @param query The query
* @param <T> The element type
* @return The list of elements
*/
public final <T extends io.micronaut.inject.ast.Element> List<T> getSourceEnclosedElements(@NonNull ElementQuery<T> query) {
return sourceEnclosedElementsQuery.getEnclosedElements(this, query);
}
@Override
public boolean isArray() {
return arrayDimensions > 0;
}
@Override
public int getArrayDimensions() {
return arrayDimensions;
}
@Override
public ClassElement withArrayDimensions(int arrayDimensions) {
if (arrayDimensions == this.arrayDimensions) {
return this;
}
JavaNativeElement.Class nativeType = getNativeType();
if (this.arrayDimensions - 1 == arrayDimensions && nativeType.typeMirror() instanceof ArrayType array) {
nativeType = new JavaNativeElement.Class(nativeType.element(), array.getComponentType(), nativeType.owner());
}
return new JavaClassElement(nativeType, elementAnnotationMetadataFactory, visitorContext, typeArguments, resolvedTypeArguments, arrayDimensions, false, doc);
}
@Override
public @NonNull String getSimpleName() {
if (simpleName == null) {
simpleName = JavaModelUtils.getClassNameWithoutPackage(classElement);
}
return simpleName;
}
@Override
public @NonNull String getName() {
if (name == null) {
name = JavaModelUtils.getClassName(classElement);
}
return name;
}
@Override
public String getPackageName() {
if (packageName == null) {
packageName = JavaModelUtils.getPackageName(classElement);
}
return packageName;
}
@Override
public PackageElement getPackage() {
Element enclosingElement = classElement.getEnclosingElement();
while (enclosingElement != null && enclosingElement.getKind() != ElementKind.PACKAGE) {
enclosingElement = enclosingElement.getEnclosingElement();
}
if (enclosingElement instanceof javax.lang.model.element.PackageElement packageElement) {
return new JavaPackageElement(
packageElement,
elementAnnotationMetadataFactory,
visitorContext
);
} else {
return PackageElement.DEFAULT_PACKAGE;
}
}
@Override
public boolean isAssignable(String type) {
if (getName().equals(type)) {
return true; // Same type
}
TypeElement otherElement = visitorContext.getElements().getTypeElement(type);
if (otherElement != null) {
return isAssignable(otherElement);
}
return false;
}
@Override
public boolean isAssignable(ClassElement type) {
if (equals(type)) {
return true; // Same type
}
if (type.isPrimitive()) {
return isAssignable(type.getName());
}
if (type instanceof JavaClassElement javaClassElement) {
return isAssignable(javaClassElement.getNativeType().element());
}
return isAssignable(type.getName());
}
@Override
public Optional<ClassElement> getOptionalValueType() {
if (isAssignable(Optional.class)) {
return getFirstTypeArgument().or(() -> visitorContext.getClassElement(Object.class));
}
if (isAssignable(OptionalLong.class)) {
return visitorContext.getClassElement(Long.class);
}
if (isAssignable(OptionalDouble.class)) {
return visitorContext.getClassElement(Double.class);
}
if (isAssignable(OptionalInt.class)) {
return visitorContext.getClassElement(Integer.class);
}
return Optional.empty();
}
private boolean isAssignable(TypeElement otherElement) {
Types types = visitorContext.getTypes();
TypeMirror thisType = types.erasure(classElement.asType());
TypeMirror thatType = types.erasure(otherElement.asType());
return types.isAssignable(thisType, thatType);
}
@NonNull
@Override
@SuppressWarnings("java:S1119")
public Optional<MethodElement> getPrimaryConstructor() {
if (JavaModelUtils.isRecord(classElement)) {
Optional<MethodElement> staticCreator = findStaticCreator();
if (staticCreator.isPresent()) {
return staticCreator;
}
if (isInner() && !isStatic()) {
// only static inner classes can be constructed
return Optional.empty();
}
List<ConstructorElement> constructors = getAccessibleConstructors();
Optional<ConstructorElement> annotatedConstructor = constructors.stream()
.filter(c -> c.hasStereotype(AnnotationUtil.INJECT) || c.hasStereotype(Creator.class))
.findFirst();
if (annotatedConstructor.isPresent()) {
return annotatedConstructor.map(c -> c);
}
// with records the record constructor is always the last constructor
List<? extends RecordComponentElement> recordComponents = classElement.getRecordComponents();
constructorSearch:
for (ConstructorElement constructor : constructors) {
ParameterElement[] parameters = constructor.getParameters();
if (parameters.length == recordComponents.size()) {
for (int i = 0; i < parameters.length; i++) {
ParameterElement parameter = parameters[i];
RecordComponentElement rce = recordComponents.get(i);
String parameterTypeString = parameter.getType().getName();
TypeMirror recordType = rce.asType();
Element element = visitorContext.getTypes().asElement(recordType);
String recordTypeString = element == null ? recordType.toString() : element.toString();
if (!parameterTypeString.equals(recordTypeString)) {
// types don't match, continue searching constructors
continue constructorSearch;
}
}
return Optional.of(constructor);
}
}
if (constructors.isEmpty()) {
// constructor not accessible
return Optional.empty();
} else {
return Optional.of(constructors.get(constructors.size() - 1));
}
}
return ArrayableClassElement.super.getPrimaryConstructor();
}
@Override
public @NonNull List<MethodElement> getAccessibleStaticCreators() {
var staticCreators = new ArrayList<>(ArrayableClassElement.super.getAccessibleStaticCreators());
if (!staticCreators.isEmpty()) {
return staticCreators;
}
return visitorContext.getClassElement(getName() + "$Companion", elementAnnotationMetadataFactory)
.filter(io.micronaut.inject.ast.Element::isStatic)
.flatMap(typeElement -> typeElement.getEnclosedElements(ElementQuery.ALL_METHODS
.annotated(am -> am.hasStereotype(Creator.class))).stream().findFirst()
)
.filter(method -> !method.isPrivate() && method.getReturnType().equals(this))
.stream().toList();
}
@Override
public Optional<ClassElement> getEnclosingType() {
if (isInner()) {
Element enclosingElement = this.classElement.getEnclosingElement();
if (enclosingElement instanceof TypeElement typeElement) {
return Optional.of(visitorContext.getElementFactory().newClassElement(
typeElement,
elementAnnotationMetadataFactory
));
}
}
return Optional.empty();
}
@NonNull
@Override
public List<ClassElement> getBoundGenericTypes() {
if (typeArguments == null) {
return Collections.emptyList();
}
return typeArguments.stream()
.map(tm -> newClassElement(tm, getTypeArguments()))
.toList();
}
@NonNull
@Override
public List<? extends GenericPlaceholderElement> getDeclaredGenericPlaceholders() {
return classElement.getTypeParameters().stream()
// we want the *declared* variables, so we don't pass in our genericsInfo.
.map(tpe -> (GenericPlaceholderElement) newClassElement(tpe.asType(), Collections.emptyMap()))
.toList();
}
@NonNull
@Override
public ClassElement getRawClassElement() {
return visitorContext.getElementFactory().newClassElement(classElement, elementAnnotationMetadataFactory)
.withArrayDimensions(getArrayDimensions());
}
@NonNull
@Override
public ClassElement withTypeArguments(@NonNull Collection<ClassElement> typeArguments) {
var boundByName = new LinkedHashMap<String, ClassElement>();
Iterator<? extends TypeParameterElement> types = classElement.getTypeParameters().iterator();
Iterator<? extends ClassElement> args = typeArguments.iterator();
while (types.hasNext() && args.hasNext()) {
ClassElement next = args.next();
Object nativeType = next.getNativeType();
if (nativeType instanceof Class<?> aClass) {
next = visitorContext.getClassElement(aClass).orElse(next);
}
boundByName.put(types.next().getSimpleName().toString(), next);
}
return withTypeArguments(boundByName);
}
@Override
@NonNull
public Map<String, ClassElement> getTypeArguments() {
if (resolvedTypeArguments == null) {
resolvedTypeArguments = resolveTypeArguments(classElement, typeArguments);
}
return resolvedTypeArguments;
}
@NonNull
@Override
public Map<String, Map<String, ClassElement>> getAllTypeArguments() {
if (resolvedAllTypeArguments == null) {
resolvedAllTypeArguments = ArrayableClassElement.super.getAllTypeArguments();
}
return resolvedAllTypeArguments;
}
@Override
public @NonNull ClassElement getType() {
if (theType == null) {
if (getNativeType().typeMirror() == null) {
theType = this;
} else {
// Strip the type mirror
// This should eliminate type annotations
theType = new JavaClassElement(new JavaNativeElement.Class(getNativeType().element()), elementAnnotationMetadataFactory, visitorContext, typeArguments, resolvedTypeArguments, arrayDimensions);
}
}
return theType;
}
private final | List |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/idmanytoone/BasketItems.java | {
"start": 542,
"end": 1349
} | class ____ implements Serializable {
private static final long serialVersionUID = -4580497316918713849L;
@Id
@ManyToOne(cascade={ CascadeType.MERGE, CascadeType.PERSIST, CascadeType.REFRESH })
@JoinColumn(name="basketDatetime", referencedColumnName="basketDatetime")
@JoinColumn(name="customerID", referencedColumnName="customerID")
private ShoppingBaskets shoppingBaskets;
@Column(name="cost", nullable=false)
@Id
private Double cost;
public void setCost(double value) {
setCost(new Double(value));
}
public void setCost(Double value) {
this.cost = value;
}
public Double getCost() {
return cost;
}
public void setShoppingBaskets(ShoppingBaskets value) {
this.shoppingBaskets = value;
}
public ShoppingBaskets getShoppingBaskets() {
return shoppingBaskets;
}
}
| BasketItems |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/store/StateStoreUnavailableException.java | {
"start": 1043,
"end": 1230
} | class ____ extends IOException {
private static final long serialVersionUID = 1L;
public StateStoreUnavailableException(String msg) {
super(msg);
}
} | StateStoreUnavailableException |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/indices/recovery/RecoveryFilesInfoRequest.java | {
"start": 759,
"end": 2960
} | class ____ extends RecoveryTransportRequest {
List<String> phase1FileNames;
List<Long> phase1FileSizes;
List<String> phase1ExistingFileNames;
List<Long> phase1ExistingFileSizes;
int totalTranslogOps;
public RecoveryFilesInfoRequest(StreamInput in) throws IOException {
super(in);
int size = in.readVInt();
phase1FileNames = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
phase1FileNames.add(in.readString());
}
size = in.readVInt();
phase1FileSizes = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
phase1FileSizes.add(in.readVLong());
}
size = in.readVInt();
phase1ExistingFileNames = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
phase1ExistingFileNames.add(in.readString());
}
size = in.readVInt();
phase1ExistingFileSizes = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
phase1ExistingFileSizes.add(in.readVLong());
}
totalTranslogOps = in.readVInt();
}
RecoveryFilesInfoRequest(
long recoveryId,
long requestSeqNo,
ShardId shardId,
List<String> phase1FileNames,
List<Long> phase1FileSizes,
List<String> phase1ExistingFileNames,
List<Long> phase1ExistingFileSizes,
int totalTranslogOps
) {
super(requestSeqNo, recoveryId, shardId);
this.phase1FileNames = phase1FileNames;
this.phase1FileSizes = phase1FileSizes;
this.phase1ExistingFileNames = phase1ExistingFileNames;
this.phase1ExistingFileSizes = phase1ExistingFileSizes;
this.totalTranslogOps = totalTranslogOps;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeStringCollection(phase1FileNames);
out.writeCollection(phase1FileSizes, StreamOutput::writeVLong);
out.writeStringCollection(phase1ExistingFileNames);
out.writeCollection(phase1ExistingFileSizes, StreamOutput::writeVLong);
out.writeVInt(totalTranslogOps);
}
}
| RecoveryFilesInfoRequest |
java | apache__spark | sql/hive/src/test/java/org/apache/spark/sql/hive/test/Complex.java | {
"start": 25590,
"end": 32462
} | class ____ extends StandardScheme<Complex> {
public void read(org.apache.thrift.protocol.TProtocol iprot, Complex struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // AINT
if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
struct.aint = iprot.readI32();
struct.setAintIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2: // A_STRING
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.aString = iprot.readString();
struct.setAStringIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 3: // LINT
if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
{
org.apache.thrift.protocol.TList _list0 = iprot.readListBegin();
struct.lint = new ArrayList<>(_list0.size);
for (int _i1 = 0; _i1 < _list0.size; ++_i1)
{
int _elem2; // required
_elem2 = iprot.readI32();
struct.lint.add(_elem2);
}
iprot.readListEnd();
}
struct.setLintIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 4: // L_STRING
if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
{
org.apache.thrift.protocol.TList _list3 = iprot.readListBegin();
struct.lString = new ArrayList<>(_list3.size);
for (int _i4 = 0; _i4 < _list3.size; ++_i4)
{
String _elem5; // required
_elem5 = iprot.readString();
struct.lString.add(_elem5);
}
iprot.readListEnd();
}
struct.setLStringIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 5: // LINT_STRING
if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
{
org.apache.thrift.protocol.TList _list6 = iprot.readListBegin();
struct.lintString = new ArrayList<>(_list6.size);
for (int _i7 = 0; _i7 < _list6.size; ++_i7)
{
IntString _elem8; // required
_elem8 = new IntString();
_elem8.read(iprot);
struct.lintString.add(_elem8);
}
iprot.readListEnd();
}
struct.setLintStringIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 6: // M_STRING_STRING
if (schemeField.type == org.apache.thrift.protocol.TType.MAP) {
{
org.apache.thrift.protocol.TMap _map9 = iprot.readMapBegin();
struct.mStringString = new HashMap<String,String>(2*_map9.size);
for (int _i10 = 0; _i10 < _map9.size; ++_i10)
{
String _key11; // required
String _val12; // required
_key11 = iprot.readString();
_val12 = iprot.readString();
struct.mStringString.put(_key11, _val12);
}
iprot.readMapEnd();
}
struct.setMStringStringIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, Complex struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
oprot.writeFieldBegin(AINT_FIELD_DESC);
oprot.writeI32(struct.aint);
oprot.writeFieldEnd();
if (struct.aString != null) {
oprot.writeFieldBegin(A_STRING_FIELD_DESC);
oprot.writeString(struct.aString);
oprot.writeFieldEnd();
}
if (struct.lint != null) {
oprot.writeFieldBegin(LINT_FIELD_DESC);
{
oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.I32, struct.lint.size()));
for (int _iter13 : struct.lint)
{
oprot.writeI32(_iter13);
}
oprot.writeListEnd();
}
oprot.writeFieldEnd();
}
if (struct.lString != null) {
oprot.writeFieldBegin(L_STRING_FIELD_DESC);
{
oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, struct.lString.size()));
for (String _iter14 : struct.lString)
{
oprot.writeString(_iter14);
}
oprot.writeListEnd();
}
oprot.writeFieldEnd();
}
if (struct.lintString != null) {
oprot.writeFieldBegin(LINT_STRING_FIELD_DESC);
{
oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.lintString.size()));
for (IntString _iter15 : struct.lintString)
{
_iter15.write(oprot);
}
oprot.writeListEnd();
}
oprot.writeFieldEnd();
}
if (struct.mStringString != null) {
oprot.writeFieldBegin(M_STRING_STRING_FIELD_DESC);
{
oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRING, struct.mStringString.size()));
for (Map.Entry<String, String> _iter16 : struct.mStringString.entrySet())
{
oprot.writeString(_iter16.getKey());
oprot.writeString(_iter16.getValue());
}
oprot.writeMapEnd();
}
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static | ComplexStandardScheme |
java | apache__spark | common/network-common/src/main/java/org/apache/spark/network/protocol/RpcFailure.java | {
"start": 975,
"end": 2172
} | class ____ extends AbstractMessage implements ResponseMessage {
public final long requestId;
public final String errorString;
public RpcFailure(long requestId, String errorString) {
this.requestId = requestId;
this.errorString = errorString;
}
@Override
public Message.Type type() { return Type.RpcFailure; }
@Override
public int encodedLength() {
return 8 + Encoders.Strings.encodedLength(errorString);
}
@Override
public void encode(ByteBuf buf) {
buf.writeLong(requestId);
Encoders.Strings.encode(buf, errorString);
}
public static RpcFailure decode(ByteBuf buf) {
long requestId = buf.readLong();
String errorString = Encoders.Strings.decode(buf);
return new RpcFailure(requestId, errorString);
}
@Override
public int hashCode() {
return Objects.hash(requestId, errorString);
}
@Override
public boolean equals(Object other) {
if (other instanceof RpcFailure o) {
return requestId == o.requestId && errorString.equals(o.errorString);
}
return false;
}
@Override
public String toString() {
return "RpcFailure[requestId=" + requestId + ",errorString=" + errorString + "]";
}
}
| RpcFailure |
java | spring-projects__spring-framework | spring-aop/src/main/java/org/springframework/aop/target/dynamic/AbstractRefreshableTargetSource.java | {
"start": 1349,
"end": 4137
} | class ____ implements TargetSource, Refreshable {
/** Logger available to subclasses. */
protected final Log logger = LogFactory.getLog(getClass());
@SuppressWarnings("NullAway.Init")
protected Object targetObject;
private long refreshCheckDelay = -1;
private long lastRefreshCheck = -1;
private long lastRefreshTime = -1;
private long refreshCount = 0;
/**
* Set the delay between refresh checks, in milliseconds.
* Default is -1, indicating no refresh checks at all.
* <p>Note that an actual refresh will only happen when
* {@link #requiresRefresh()} returns {@code true}.
*/
public void setRefreshCheckDelay(long refreshCheckDelay) {
this.refreshCheckDelay = refreshCheckDelay;
}
@Override
public synchronized Class<?> getTargetClass() {
if (this.targetObject == null) {
refresh();
}
return this.targetObject.getClass();
}
@Override
public final synchronized @Nullable Object getTarget() {
if ((refreshCheckDelayElapsed() && requiresRefresh()) || this.targetObject == null) {
refresh();
}
return this.targetObject;
}
@Override
public final synchronized void refresh() {
logger.debug("Attempting to refresh target");
this.targetObject = freshTarget();
this.refreshCount++;
this.lastRefreshTime = System.currentTimeMillis();
logger.debug("Target refreshed successfully");
}
@Override
public synchronized long getRefreshCount() {
return this.refreshCount;
}
@Override
public synchronized long getLastRefreshTime() {
return this.lastRefreshTime;
}
private boolean refreshCheckDelayElapsed() {
if (this.refreshCheckDelay < 0) {
return false;
}
long currentTimeMillis = System.currentTimeMillis();
if (this.lastRefreshCheck < 0 || currentTimeMillis - this.lastRefreshCheck > this.refreshCheckDelay) {
// Going to perform a refresh check - update the timestamp.
this.lastRefreshCheck = currentTimeMillis;
logger.debug("Refresh check delay elapsed - checking whether refresh is required");
return true;
}
return false;
}
/**
* Determine whether a refresh is required.
* Invoked for each refresh check, after the refresh check delay has elapsed.
* <p>The default implementation always returns {@code true}, triggering
* a refresh every time the delay has elapsed. To be overridden by subclasses
* with an appropriate check of the underlying target resource.
* @return whether a refresh is required
*/
protected boolean requiresRefresh() {
return true;
}
/**
* Obtain a fresh target object.
* <p>Only invoked if a refresh check has found that a refresh is required
* (that is, {@link #requiresRefresh()} has returned {@code true}).
* @return the fresh target object
*/
protected abstract Object freshTarget();
}
| AbstractRefreshableTargetSource |
java | quarkusio__quarkus | integration-tests/smallrye-metrics/src/main/java/io/quarkus/it/metrics/inheritance/InheritanceMetricsExtended.java | {
"start": 124,
"end": 235
} | class ____ extends InheritanceMetricsBase {
public void anotherMethod() {
}
}
| InheritanceMetricsExtended |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/issue_2200/Issue2224.java | {
"start": 498,
"end": 7596
} | class ____ extends TestCase {
//support inherit with other parameterized type
public void test_for_issue() {
String json = "[{\"idNo\":\"123456\",\"name\":\"tom\"},{\"idNo\":\"123457\",\"name\":\"jack\"}]";
PersonCollection personCollection = JSON.parseObject(json, PersonCollection.class);
assertNotNull(personCollection);
assertEquals(2, personCollection.size());
assertEquals("tom", personCollection.get("123456").getName());
assertEquals("jack", personCollection.get("123457").getName());
String json2 = JSON.toJSONString(personCollection);
assertNotNull(json2);
}
//support inherit with other parameterized type and item type is generic
public void test_for_issue_2() {
String json = "[[{\"idNo\":\"123\",\"name\":\"张三\"},{\"idNo\":\"124\",\"name\":\"张三\"}],[{\"idNo\":\"223\",\"name\":\"李四\"},{\"idNo\":\"224\",\"name\":\"李四\"}]]";
PersonGroupedCollection personCollection = JSON.parseObject(json, PersonGroupedCollection.class);
assertNotNull(personCollection);
assertEquals(2, personCollection.size());
assertEquals(2, personCollection.get("张三").size());
assertEquals("123", personCollection.get("张三").get(0).getIdNo());
assertEquals("张三", personCollection.get("张三").get(0).getName());
assertEquals("124", personCollection.get("张三").get(1).getIdNo());
assertEquals("张三", personCollection.get("张三").get(1).getName());
assertEquals(2, personCollection.get("李四").size());
assertEquals("223", personCollection.get("李四").get(0).getIdNo());
assertEquals("李四", personCollection.get("李四").get(0).getName());
assertEquals("224", personCollection.get("李四").get(1).getIdNo());
assertEquals("李四", personCollection.get("李四").get(1).getName());
String json2 = JSON.toJSONString(personCollection);
assertNotNull(json2);
}
//support inherit with other parameterized type and item type is bean array
public void test_for_issue_3() {
String json = "[[{\"idNo\":\"123\",\"name\":\"张三\"},{\"idNo\":\"124\",\"name\":\"张三\"}],[{\"idNo\":\"223\",\"name\":\"李四\"},{\"idNo\":\"224\",\"name\":\"李四\"}]]";
ArrayPersonGroupedCollection personCollection = JSON.parseObject(json, ArrayPersonGroupedCollection.class);
assertNotNull(personCollection);
assertEquals(2, personCollection.size());
assertEquals(2, personCollection.get("张三").length);
assertEquals("123", personCollection.get("张三")[0].getIdNo());
assertEquals("张三", personCollection.get("张三")[0].getName());
assertEquals("124", personCollection.get("张三")[1].getIdNo());
assertEquals("张三", personCollection.get("张三")[1].getName());
assertEquals(2, personCollection.get("李四").length);
assertEquals("223", personCollection.get("李四")[0].getIdNo());
assertEquals("李四", personCollection.get("李四")[0].getName());
assertEquals("224", personCollection.get("李四")[1].getIdNo());
assertEquals("李四", personCollection.get("李四")[1].getName());
String json2 = JSON.toJSONString(personCollection);
assertNotNull(json2);
}
//support inherit with other parameterized type and item type is generic array
public void test_for_issue_4() {
String json = "[[{\"idNo\":\"123\",\"name\":\"张三\"},{\"idNo\":\"124\",\"name\":\"张三\"}],[{\"idNo\":\"223\",\"name\":\"李四\"},{\"idNo\":\"224\",\"name\":\"李四\"}]]";
MAPersonGroupedCollection personCollection = JSON.parseObject(json, MAPersonGroupedCollection.class);
assertNotNull(personCollection);
assertEquals(2, personCollection.size());
assertEquals(2, personCollection.get("张三").length);
assertEquals("123", personCollection.get("张三")[0].get("idNo"));
assertEquals("张三", personCollection.get("张三")[0].get("name"));
assertEquals("124", personCollection.get("张三")[1].get("idNo"));
assertEquals("张三", personCollection.get("张三")[1].get("name"));
assertEquals(2, personCollection.get("李四").length);
assertEquals("223", personCollection.get("李四")[0].get("idNo"));
assertEquals("李四", personCollection.get("李四")[0].get("name"));
assertEquals("224", personCollection.get("李四")[1].get("idNo"));
assertEquals("李四", personCollection.get("李四")[1].get("name"));
String json2 = JSON.toJSONString(personCollection);
assertNotNull(json2);
}
//support inherit with other parameterized type and item type is generic array contains array
public void test_for_issue_5() {
String json = "[[{\"idNo\":[\"123\",\"123x\"],\"name\":[\"张三\",\"张三一\"]},{\"idNo\":[\"124\",\"124x\"],\"name\":[\"张三\",\"张三一\"]}],[{\"idNo\":[\"223\",\"223y\"],\"name\":[\"李四\",\"李小四\"]},{\"idNo\":[\"224\",\"224y\"],\"name\":[\"李四\",\"李小四\"]}]]";
MA2PersonGroupedCollection personCollection = JSON.parseObject(json, MA2PersonGroupedCollection.class);
assertNotNull(personCollection);
assertEquals(2, personCollection.size());
assertEquals(2, personCollection.get("张三").length);
assertEquals(2, personCollection.get("张三")[0].get("idNo").length);
assertEquals("123", personCollection.get("张三")[0].get("idNo")[0]);
assertEquals("123x", personCollection.get("张三")[0].get("idNo")[1]);
assertEquals(2, personCollection.get("张三")[0].get("name").length);
assertEquals("张三", personCollection.get("张三")[0].get("name")[0]);
assertEquals("张三一", personCollection.get("张三")[0].get("name")[1]);
assertEquals(2, personCollection.get("张三")[1].get("idNo").length);
assertEquals("124", personCollection.get("张三")[1].get("idNo")[0]);
assertEquals("124x", personCollection.get("张三")[1].get("idNo")[1]);
assertEquals(2, personCollection.get("张三")[1].get("name").length);
assertEquals("张三", personCollection.get("张三")[1].get("name")[0]);
assertEquals("张三一", personCollection.get("张三")[1].get("name")[1]);
assertEquals(2, personCollection.get("李四").length);
assertEquals(2, personCollection.get("李四")[0].get("idNo").length);
assertEquals("223", personCollection.get("李四")[0].get("idNo")[0]);
assertEquals("223y", personCollection.get("李四")[0].get("idNo")[1]);
assertEquals(2, personCollection.get("李四")[0].get("name").length);
assertEquals("李四", personCollection.get("李四")[0].get("name")[0]);
assertEquals("李小四", personCollection.get("李四")[0].get("name")[1]);
assertEquals(2, personCollection.get("李四")[1].get("idNo").length);
assertEquals("224", personCollection.get("李四")[1].get("idNo")[0]);
assertEquals("224y", personCollection.get("李四")[1].get("idNo")[1]);
assertEquals(2, personCollection.get("李四")[1].get("name").length);
assertEquals("李四", personCollection.get("李四")[1].get("name")[0]);
assertEquals("李小四", personCollection.get("李四")[1].get("name")[1]);
String json2 = JSON.toJSONString(personCollection);
assertNotNull(json2);
}
}
| Issue2224 |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/EsqlMediaTypeParserTests.java | {
"start": 1072,
"end": 10581
} | class ____ extends ESTestCase {
public void testPlainTextDetection() {
MediaType text = getResponseMediaType(reqWithAccept("text/plain"), createTestInstance(false));
assertThat(text, is(PLAIN_TEXT));
}
public void testCsvDetection() {
MediaType text = getResponseMediaType(reqWithAccept("text/csv"), createTestInstance(false));
assertThat(text, is(CSV));
text = getResponseMediaType(reqWithAccept("text/csv; delimiter=x"), createTestInstance(false));
assertThat(text, is(CSV));
}
public void testTsvDetection() {
MediaType text = getResponseMediaType(reqWithAccept("text/tab-separated-values"), createTestInstance(false));
assertThat(text, is(TSV));
}
public void testMediaTypeDetectionWithParameters() {
assertThat(getResponseMediaType(reqWithAccept("text/plain; charset=utf-8"), createTestInstance(false)), is(PLAIN_TEXT));
assertThat(getResponseMediaType(reqWithAccept("text/plain; header=present"), createTestInstance(false)), is(PLAIN_TEXT));
assertThat(
getResponseMediaType(reqWithAccept("text/plain; charset=utf-8; header=present"), createTestInstance(false)),
is(PLAIN_TEXT)
);
assertThat(getResponseMediaType(reqWithAccept("text/csv; charset=utf-8"), createTestInstance(false)), is(CSV));
assertThat(getResponseMediaType(reqWithAccept("text/csv; header=present"), createTestInstance(false)), is(CSV));
assertThat(getResponseMediaType(reqWithAccept("text/csv; charset=utf-8; header=present"), createTestInstance(false)), is(CSV));
assertThat(getResponseMediaType(reqWithAccept("text/tab-separated-values; charset=utf-8"), createTestInstance(false)), is(TSV));
assertThat(getResponseMediaType(reqWithAccept("text/tab-separated-values; header=present"), createTestInstance(false)), is(TSV));
assertThat(
getResponseMediaType(reqWithAccept("text/tab-separated-values; charset=utf-8; header=present"), createTestInstance(false)),
is(TSV)
);
}
public void testInvalidFormat() {
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> getResponseMediaType(reqWithAccept("text/garbage"), createTestInstance(false))
);
assertEquals(e.getMessage(), "Invalid request content type: Accept=[text/garbage], Content-Type=[application/json], format=[null]");
}
public void testColumnarWithAcceptText() {
var accept = randomFrom("text/plain", "text/csv", "text/tab-separated-values");
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> getResponseMediaType(reqWithAccept(accept), createTestInstance(true))
);
assertEquals(e.getMessage(), "Invalid use of [columnar] argument: cannot be used in combination with [txt, csv, tsv] formats");
}
public void testIncludeCCSMetadataWithAcceptText() {
var accept = randomFrom("text/plain", "text/csv", "text/tab-separated-values");
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> getResponseMediaType(reqWithAccept(accept), createTestInstance(false, true, false))
);
assertEquals(
"Invalid use of [include_ccs_metadata] argument: cannot be used in combination with [txt, csv, tsv] formats",
e.getMessage()
);
}
public void testIncludeExecutionMetadataWithAcceptText() {
var accept = randomFrom("text/plain", "text/csv", "text/tab-separated-values");
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> getResponseMediaType(reqWithAccept(accept), createCpsTestInstance(false, true, false))
);
assertEquals(
"Invalid use of [include_execution_metadata] argument: cannot be used in combination with [txt, csv, tsv] formats",
e.getMessage()
);
}
public void testColumnarWithParamText() {
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> getResponseMediaType(reqWithParams(Map.of("format", randomFrom("txt", "csv", "tsv"))), createTestInstance(true))
);
assertEquals(e.getMessage(), "Invalid use of [columnar] argument: cannot be used in combination with [txt, csv, tsv] formats");
}
public void testIncludeCCSMetadataWithNonJSONMediaTypesInParams() {
{
RestRequest restRequest = reqWithParams(Map.of("format", randomFrom("txt", "csv", "tsv")));
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> getResponseMediaType(restRequest, createTestInstance(false, true, false))
);
assertEquals(
"Invalid use of [include_ccs_metadata] argument: cannot be used in combination with [txt, csv, tsv] formats",
e.getMessage()
);
}
{
// check that no exception is thrown for the XContent types
RestRequest restRequest = reqWithParams(Map.of("format", randomFrom("SMILE", "YAML", "CBOR", "JSON")));
MediaType responseMediaType = getResponseMediaType(restRequest, createTestInstance(true, true, false));
assertNotNull(responseMediaType);
}
}
public void testIncludeExecutionMetadataWithNonJSONMediaTypesInParams() {
{
RestRequest restRequest = reqWithParams(Map.of("format", randomFrom("txt", "csv", "tsv")));
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> getResponseMediaType(restRequest, createCpsTestInstance(false, true, false))
);
assertEquals(
"Invalid use of [include_execution_metadata] argument: cannot be used in combination with [txt, csv, tsv] formats",
e.getMessage()
);
}
{
// check that no exception is thrown for the XContent types
RestRequest restRequest = reqWithParams(Map.of("format", randomFrom("SMILE", "YAML", "CBOR", "JSON")));
MediaType responseMediaType = getResponseMediaType(restRequest, createCpsTestInstance(true, true, false));
assertNotNull(responseMediaType);
}
}
public void testProfileWithNonJSONMediaTypesInParams() {
{
RestRequest restRequest = reqWithParams(Map.of("format", randomFrom("txt", "csv", "tsv")));
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> getResponseMediaType(restRequest, createTestInstance(false, false, true))
);
assertEquals("Invalid use of [profile] argument: cannot be used in combination with [txt, csv, tsv] formats", e.getMessage());
}
{
// check that no exception is thrown for the XContent types
RestRequest restRequest = reqWithParams(Map.of("format", randomFrom("SMILE", "YAML", "CBOR", "JSON")));
MediaType responseMediaType = getResponseMediaType(restRequest, createTestInstance(true, false, true));
assertNotNull(responseMediaType);
}
}
public void testNoFormat() {
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> getResponseMediaType(emptyRequest(), createTestInstance(false))
);
assertEquals(e.getMessage(), "Invalid request content type: Accept=[null], Content-Type=[null], format=[null]");
}
public void testNoContentType() {
RestRequest fakeRestRequest = emptyRequest();
assertThat(getResponseMediaType(fakeRestRequest, CSV), is(CSV));
assertThat(getResponseMediaType(fakeRestRequest, JSON), is(JSON));
}
private static RestRequest reqWithAccept(String acceptHeader) {
return new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withHeaders(
Map.of("Content-Type", Collections.singletonList("application/json"), "Accept", Collections.singletonList(acceptHeader))
).build();
}
private static RestRequest reqWithParams(Map<String, String> params) {
return new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withHeaders(
Map.of("Content-Type", Collections.singletonList("application/json"))
).withParams(params).build();
}
private static RestRequest emptyRequest() {
return new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).build();
}
protected EsqlQueryRequest createTestInstance(boolean columnar) {
var request = new EsqlQueryRequest();
request.columnar(columnar);
return request;
}
protected EsqlQueryRequest createTestInstance(boolean columnar, boolean includeCCSMetadata, boolean profile) {
var request = createTestInstance(columnar);
request.includeCCSMetadata(includeCCSMetadata);
request.profile(profile);
return request;
}
protected EsqlQueryRequest createCpsTestInstance(boolean columnar, boolean includeExecutionMetadata, boolean profile) {
var request = createTestInstance(columnar);
request.includeExecutionMetadata(includeExecutionMetadata);
request.profile(profile);
return request;
}
}
| EsqlMediaTypeParserTests |
java | quarkusio__quarkus | extensions/oidc-common/runtime/src/main/java/io/quarkus/oidc/common/runtime/config/OidcClientCommonConfigBuilder.java | {
"start": 13099,
"end": 15218
} | class ____<T> {
private record ProviderImpl(Optional<String> name, Optional<String> keyringName,
Optional<String> key) implements Provider {
}
private final Function<Provider, T> providerSetter;
private Optional<String> name;
private Optional<String> keyringName;
private Optional<String> key;
private ProviderBuilder(Function<Provider, T> providerSetter, Provider provider) {
this.providerSetter = providerSetter;
this.name = provider.name();
this.keyringName = provider.keyringName();
this.key = provider.key();
}
public ProviderBuilder() {
this.providerSetter = null;
this.name = Optional.empty();
this.keyringName = Optional.empty();
this.key = Optional.empty();
}
/**
* @param name {@link Provider#name()}
* @return this builder
*/
public ProviderBuilder<T> name(String name) {
this.name = Optional.ofNullable(name);
return this;
}
/**
* @param keyringName {@link Provider#keyringName()}
* @return this builder
*/
public ProviderBuilder<T> keyringName(String keyringName) {
this.keyringName = Optional.ofNullable(keyringName);
return this;
}
/**
* @param key {@link Provider#key()}
* @return this builder
*/
public ProviderBuilder<T> key(String key) {
this.key = Optional.ofNullable(key);
return this;
}
/**
* Builds {@link Provider}.
*
* @return T builder
*/
public T end() {
Objects.requireNonNull(providerSetter);
return providerSetter.apply(build());
}
/**
* Builds {@link Provider}.
*
* @return Provider
*/
public Provider build() {
return new ProviderImpl(name, keyringName, key);
}
}
public static final | ProviderBuilder |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFs.java | {
"start": 4374,
"end": 14693
} | class ____ internal we can ignore
* this issue but if we were to make it external then this needs
* to be resolved.
*/
// Handle the two cases:
// scheme:/// and scheme://authority/
myUri = new URI(myFs.getUri().toString() +
(myFs.getUri().getAuthority() == null ? "" : Path.SEPARATOR) +
chRootPathPart.toUri().getPath().substring(1));
super.checkPath(theRoot);
}
@Override
public URI getUri() {
return myUri;
}
/**
*
* Strip out the root from the path.
*
* @param p - fully qualified path p
* @return - the remaining path without the beginning /
*/
public String stripOutRoot(final Path p) {
try {
checkPath(p);
} catch (IllegalArgumentException e) {
throw new RuntimeException("Internal Error - path " + p +
" should have been with URI" + myUri);
}
String pathPart = p.toUri().getPath();
return (pathPart.length() == chRootPathPartString.length()) ?
"" : pathPart.substring(chRootPathPartString.length() +
(chRootPathPart.isRoot() ? 0 : 1));
}
@Override
public Path getHomeDirectory() {
return myFs.getHomeDirectory();
}
@Override
public Path getInitialWorkingDirectory() {
/*
* 3 choices here: return null or / or strip out the root out of myFs's
* inital wd.
* Only reasonable choice for initialWd for chrooted fds is null
*/
return null;
}
public Path getResolvedQualifiedPath(final Path f)
throws FileNotFoundException {
return myFs.makeQualified(
new Path(chRootPathPartString + f.toUri().toString()));
}
@Override
public FSDataOutputStream createInternal(final Path f,
final EnumSet<CreateFlag> flag, final FsPermission absolutePermission,
final int bufferSize, final short replication, final long blockSize,
final Progressable progress, final ChecksumOpt checksumOpt,
final boolean createParent) throws IOException, UnresolvedLinkException {
return myFs.createInternal(fullPath(f), flag,
absolutePermission, bufferSize,
replication, blockSize, progress, checksumOpt, createParent);
}
@Override
public boolean delete(final Path f, final boolean recursive)
throws IOException, UnresolvedLinkException {
return myFs.delete(fullPath(f), recursive);
}
@Override
public BlockLocation[] getFileBlockLocations(final Path f, final long start,
final long len) throws IOException, UnresolvedLinkException {
return myFs.getFileBlockLocations(fullPath(f), start, len);
}
@Override
public FileChecksum getFileChecksum(final Path f)
throws IOException, UnresolvedLinkException {
return myFs.getFileChecksum(fullPath(f));
}
@Override
public FileStatus getFileStatus(final Path f)
throws IOException, UnresolvedLinkException {
return myFs.getFileStatus(fullPath(f));
}
public void access(Path path, FsAction mode) throws AccessControlException,
FileNotFoundException, UnresolvedLinkException, IOException {
myFs.access(fullPath(path), mode);
}
@Override
public FileStatus getFileLinkStatus(final Path f)
throws IOException, UnresolvedLinkException {
return myFs.getFileLinkStatus(fullPath(f));
}
@Override
public FsStatus getFsStatus() throws IOException {
return myFs.getFsStatus();
}
@Override
@Deprecated
public FsServerDefaults getServerDefaults() throws IOException {
return myFs.getServerDefaults();
}
@Override
public FsServerDefaults getServerDefaults(final Path f) throws IOException {
return myFs.getServerDefaults(fullPath(f));
}
@Override
public int getUriDefaultPort() {
return myFs.getUriDefaultPort();
}
@Override
public FileStatus[] listStatus(final Path f)
throws IOException, UnresolvedLinkException {
return myFs.listStatus(fullPath(f));
}
@Override
public RemoteIterator<FileStatus> listStatusIterator(final Path f)
throws IOException, UnresolvedLinkException {
return myFs.listStatusIterator(fullPath(f));
}
@Override
public RemoteIterator<LocatedFileStatus> listLocatedStatus(final Path f)
throws IOException, UnresolvedLinkException {
return myFs.listLocatedStatus(fullPath(f));
}
@Override
public void mkdir(final Path dir, final FsPermission permission,
final boolean createParent) throws IOException, UnresolvedLinkException {
myFs.mkdir(fullPath(dir), permission, createParent);
}
@Override
public FSDataInputStream open(final Path f, final int bufferSize)
throws IOException, UnresolvedLinkException {
return myFs.open(fullPath(f), bufferSize);
}
@Override
public boolean truncate(final Path f, final long newLength)
throws IOException, UnresolvedLinkException {
return myFs.truncate(fullPath(f), newLength);
}
@Override
public void renameInternal(final Path src, final Path dst)
throws IOException, UnresolvedLinkException {
// note fullPath will check that paths are relative to this FileSystem.
// Hence both are in same file system and a rename is valid
myFs.renameInternal(fullPath(src), fullPath(dst));
}
@Override
public void renameInternal(final Path src, final Path dst,
final boolean overwrite)
throws IOException, UnresolvedLinkException {
// note fullPath will check that paths are relative to this FileSystem.
// Hence both are in same file system and a rename is valid
myFs.renameInternal(fullPath(src), fullPath(dst), overwrite);
}
@Override
public void setOwner(final Path f, final String username,
final String groupname)
throws IOException, UnresolvedLinkException {
myFs.setOwner(fullPath(f), username, groupname);
}
@Override
public void setPermission(final Path f, final FsPermission permission)
throws IOException, UnresolvedLinkException {
myFs.setPermission(fullPath(f), permission);
}
@Override
public boolean setReplication(final Path f, final short replication)
throws IOException, UnresolvedLinkException {
return myFs.setReplication(fullPath(f), replication);
}
@Override
public void setTimes(final Path f, final long mtime, final long atime)
throws IOException, UnresolvedLinkException {
myFs.setTimes(fullPath(f), mtime, atime);
}
@Override
public void modifyAclEntries(Path path, List<AclEntry> aclSpec)
throws IOException {
myFs.modifyAclEntries(fullPath(path), aclSpec);
}
@Override
public void removeAclEntries(Path path, List<AclEntry> aclSpec)
throws IOException {
myFs.removeAclEntries(fullPath(path), aclSpec);
}
@Override
public void removeDefaultAcl(Path path) throws IOException {
myFs.removeDefaultAcl(fullPath(path));
}
@Override
public void removeAcl(Path path) throws IOException {
myFs.removeAcl(fullPath(path));
}
@Override
public void setAcl(Path path, List<AclEntry> aclSpec) throws IOException {
myFs.setAcl(fullPath(path), aclSpec);
}
@Override
public AclStatus getAclStatus(Path path) throws IOException {
return myFs.getAclStatus(fullPath(path));
}
@Override
public void setXAttr(Path path, String name, byte[] value,
EnumSet<XAttrSetFlag> flag) throws IOException {
myFs.setXAttr(fullPath(path), name, value, flag);
}
@Override
public byte[] getXAttr(Path path, String name) throws IOException {
return myFs.getXAttr(fullPath(path), name);
}
@Override
public Map<String, byte[]> getXAttrs(Path path) throws IOException {
return myFs.getXAttrs(fullPath(path));
}
@Override
public Map<String, byte[]> getXAttrs(Path path, List<String> names)
throws IOException {
return myFs.getXAttrs(fullPath(path), names);
}
@Override
public List<String> listXAttrs(Path path) throws IOException {
return myFs.listXAttrs(fullPath(path));
}
@Override
public void removeXAttr(Path path, String name) throws IOException {
myFs.removeXAttr(fullPath(path), name);
}
@Override
public Path createSnapshot(Path path, String name) throws IOException {
return myFs.createSnapshot(fullPath(path), name);
}
@Override
public void renameSnapshot(Path path, String snapshotOldName,
String snapshotNewName) throws IOException {
myFs.renameSnapshot(fullPath(path), snapshotOldName, snapshotNewName);
}
@Override
public void deleteSnapshot(Path snapshotDir, String snapshotName)
throws IOException {
myFs.deleteSnapshot(fullPath(snapshotDir), snapshotName);
}
@Override
public void satisfyStoragePolicy(final Path path) throws IOException {
myFs.satisfyStoragePolicy(path);
}
@Override
public void setStoragePolicy(Path path, String policyName)
throws IOException {
myFs.setStoragePolicy(fullPath(path), policyName);
}
@Override
public void unsetStoragePolicy(final Path src)
throws IOException {
myFs.unsetStoragePolicy(fullPath(src));
}
@Override
public BlockStoragePolicySpi getStoragePolicy(final Path src)
throws IOException {
return myFs.getStoragePolicy(src);
}
@Override
public Collection<? extends BlockStoragePolicySpi> getAllStoragePolicies()
throws IOException {
return myFs.getAllStoragePolicies();
}
@Override
public void setVerifyChecksum(final boolean verifyChecksum)
throws IOException, UnresolvedLinkException {
myFs.setVerifyChecksum(verifyChecksum);
}
@Override
public boolean supportsSymlinks() {
return myFs.supportsSymlinks();
}
@Override
public void createSymlink(final Path target, final Path link,
final boolean createParent) throws IOException, UnresolvedLinkException {
/*
* We leave the link alone:
* If qualified or link relative then of course it is okay.
* If absolute (ie / relative) then the link has to be resolved
* relative to the changed root.
*/
myFs.createSymlink(fullPath(target), link, createParent);
}
@Override
public Path getLinkTarget(final Path f) throws IOException {
return myFs.getLinkTarget(fullPath(f));
}
@Override
public List<Token<?>> getDelegationTokens(String renewer) throws IOException {
return myFs.getDelegationTokens(renewer);
}
}
| is |
java | elastic__elasticsearch | x-pack/plugin/identity-provider/qa/idp-rest-tests/src/javaRestTest/java/org/elasticsearch/xpack/idp/IdpRestTestCase.java | {
"start": 1678,
"end": 8918
} | class ____ extends ESRestTestCase {
@Override
protected Settings restAdminSettings() {
String token = basicAuthHeaderValue("admin_user", new SecureString("admin-password".toCharArray()));
return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build();
}
@Override
protected Settings restClientSettings() {
String token = basicAuthHeaderValue("idp_admin", new SecureString("idp-password".toCharArray()));
return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build();
}
protected User createUser(String username, SecureString password, String role) throws IOException {
final User user = new User(
username,
new String[] { role },
username + " in " + getTestName(),
username + "@test.example.com",
Map.of(),
true
);
final String endpoint = "/_security/user/" + username;
final Request request = new Request(HttpPut.METHOD_NAME, endpoint);
final String body = Strings.format("""
{
"username": "%s",
"full_name": "%s",
"email": "%s",
"password": "%s",
"roles": [ "%s" ]
}
""", user.principal(), user.fullName(), user.email(), password.toString(), role);
request.setJsonEntity(body);
request.addParameters(Map.of("refresh", "true"));
request.setOptions(RequestOptions.DEFAULT);
adminClient().performRequest(request);
return user;
}
protected void deleteUser(String username) throws IOException {
final String endpoint = "/_security/user/" + username;
final Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
request.addParameters(Map.of("refresh", "true"));
request.setOptions(RequestOptions.DEFAULT);
adminClient().performRequest(request);
}
protected void createRole(
String name,
Collection<String> clusterPrivileges,
Collection<RoleDescriptor.IndicesPrivileges> indicesPrivileges,
Collection<RoleDescriptor.ApplicationResourcePrivileges> applicationPrivileges
) throws IOException {
final RoleDescriptor descriptor = new RoleDescriptor(
name,
clusterPrivileges.toArray(String[]::new),
indicesPrivileges.toArray(RoleDescriptor.IndicesPrivileges[]::new),
applicationPrivileges.toArray(RoleDescriptor.ApplicationResourcePrivileges[]::new),
null,
null,
Map.of(),
Map.of()
);
final String body = Strings.toString(descriptor);
final Request request = new Request(HttpPut.METHOD_NAME, "/_security/role/" + name);
request.setJsonEntity(body);
adminClient().performRequest(request);
}
protected void deleteRole(String name) throws IOException {
final Request request = new Request(HttpDelete.METHOD_NAME, "/_security/role/" + name);
adminClient().performRequest(request);
}
protected void createApplicationPrivileges(String applicationName, Map<String, Collection<String>> privileges) throws IOException {
final ByteArrayOutputStream bos = new ByteArrayOutputStream();
final XContentBuilder builder = new XContentBuilder(XContentType.JSON.xContent(), bos);
builder.startObject();
builder.startObject(applicationName);
for (var entry : privileges.entrySet()) {
builder.startObject(entry.getKey());
builder.stringListField(ApplicationPrivilegeDescriptor.Fields.ACTIONS.getPreferredName(), entry.getValue());
builder.endObject();
}
builder.endObject();
builder.endObject();
builder.flush();
final Request request = new Request(HttpPost.METHOD_NAME, "/_security/privilege/");
request.setJsonEntity(bos.toString(StandardCharsets.UTF_8));
adminClient().performRequest(request);
}
protected void setUserPassword(String username, SecureString password) throws IOException {
final String endpoint = "/_security/user/" + username + "/_password";
final Request request = new Request(HttpPost.METHOD_NAME, endpoint);
final String body = Strings.format("""
{
"password": "%s"
}
""", password.toString());
request.setJsonEntity(body);
request.setOptions(RequestOptions.DEFAULT);
adminClient().performRequest(request);
}
protected SamlServiceProviderIndex.DocumentVersion createServiceProvider(String entityId, Map<String, Object> body) throws IOException {
// so that we don't hit [SERVICE_UNAVAILABLE/1/state not recovered / initialized]
ensureGreen("");
final Request request = new Request("PUT", "/_idp/saml/sp/" + encode(entityId) + "?refresh=" + RefreshPolicy.IMMEDIATE.getValue());
final String entity = Strings.toString(JsonXContent.contentBuilder().map(body));
request.setJsonEntity(entity);
final Response response = client().performRequest(request);
final Map<String, Object> map = entityAsMap(response);
assertThat(ObjectPath.eval("service_provider.entity_id", map), equalTo(entityId));
assertThat(ObjectPath.eval("service_provider.enabled", map), equalTo(true));
final Object docId = ObjectPath.eval("document._id", map);
final Object seqNo = ObjectPath.eval("document._seq_no", map);
final Object primaryTerm = ObjectPath.eval("document._primary_term", map);
assertThat(docId, instanceOf(String.class));
assertThat(seqNo, instanceOf(Number.class));
assertThat(primaryTerm, instanceOf(Number.class));
return new SamlServiceProviderIndex.DocumentVersion((String) docId, asLong(primaryTerm), asLong(seqNo));
}
protected void checkIndexDoc(SamlServiceProviderIndex.DocumentVersion docVersion) throws IOException {
final Request request = new Request("GET", SamlServiceProviderIndex.INDEX_NAME + "/_doc/" + docVersion.id);
final Response response = adminClient().performRequest(request);
final Map<String, Object> map = entityAsMap(response);
assertThat(map.get("_index"), equalTo(SamlServiceProviderIndex.INDEX_NAME));
assertThat(map.get("_id"), equalTo(docVersion.id));
assertThat(asLong(map.get("_seq_no")), equalTo(docVersion.seqNo));
assertThat(asLong(map.get("_primary_term")), equalTo(docVersion.primaryTerm));
}
protected Long asLong(Object val) {
if (val == null) {
return null;
}
if (val instanceof Long) {
return (Long) val;
}
if (val instanceof Number) {
return ((Number) val).longValue();
}
if (val instanceof String) {
return Long.parseLong((String) val);
}
throw new IllegalArgumentException("Value [" + val + "] of type [" + val.getClass() + "] is not a Long");
}
protected String encode(String param) {
return URLEncoder.encode(param, StandardCharsets.UTF_8);
}
}
| IdpRestTestCase |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/security/oauthbearer/JwtValidator.java | {
"start": 2505,
"end": 2998
} | interface ____ extends OAuthBearerConfigurable {
/**
* Accepts an OAuth JWT access token in base-64 encoded format, validates, and returns an
* OAuthBearerToken.
*
* @param accessToken Non-<code>null</code> JWT access token
*
* @return {@link OAuthBearerToken}
*
* @throws JwtValidatorException Thrown on errors performing validation of given token
*/
OAuthBearerToken validate(String accessToken) throws JwtValidatorException;
}
| JwtValidator |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/web/util/WhatWgUrlParser.java | {
"start": 82639,
"end": 93315
} | class ____ implements IpAddress {
private final int[] pieces;
private final String string;
private Ipv6Address(int[] pieces) {
Assert.state(pieces.length == 8, "Invalid amount of IPv6 pieces");
this.pieces = pieces;
this.string = serialize(pieces);
}
/**
* The IPv6 parser takes a scalar value string input and then runs these steps.
* They return failure or an IPv6 address.
*/
public static Ipv6Address parse(String input) {
// Let address be a new IPv6 address whose IPv6 pieces are all 0.
int[] address = new int[8];
// Let pieceIndex be 0.
int pieceIndex = 0;
// Let compress be null.
Integer compress = null;
// Let pointer be a pointer for input.
int pointer = 0;
int inputLength = input.length();
int c = (inputLength > 0) ? input.codePointAt(0) : EOF;
// If c is U+003A (:), then:
if (c == ':') {
// If remaining does not start with U+003A (:),
// IPv6-invalid-compression validation error, return failure.
if (inputLength > 1 && input.codePointAt(1) != ':') {
throw new InvalidUrlException("IPv6 address begins with improper compression.");
}
// Increase pointer by 2.
pointer += 2;
// Increase pieceIndex by 1 and then set compress to pieceIndex.
pieceIndex++;
compress = pieceIndex;
}
c = (pointer < inputLength) ? input.codePointAt(pointer) : EOF;
// While c is not the EOF code point:
while (c != EOF) {
// If pieceIndex is 8, IPv6-too-many-pieces validation error, return failure.
if (pieceIndex == 8) {
throw new InvalidUrlException("IPv6 address contains more than 8 pieces.");
}
// If c is U+003A (:), then:
if (c == ':') {
// If compress is non-null, IPv6-multiple-compression validation error, return failure.
if (compress != null) {
throw new InvalidUrlException("IPv6 address is compressed in more than one spot.");
}
// Increase pointer and pieceIndex by 1, set compress to pieceIndex, and then continue.
pointer++;
pieceIndex++;
compress = pieceIndex;
c = (pointer < inputLength) ? input.codePointAt(pointer) : EOF;
continue;
}
// Let value and length be 0.
int value = 0;
int length = 0;
// While length is less than 4 and c is an ASCII hex digit,
// set value to value × 0x10 + c interpreted as hexadecimal number,
// and increase pointer and length by 1.
while (length < 4 && isAsciiHexDigit(c)) {
int cHex = Character.digit(c, 16);
value = (value * 0x10) + cHex;
pointer++;
length++;
c = (pointer < inputLength) ? input.codePointAt(pointer) : EOF;
}
// If c is U+002E (.), then:
if (c == '.') {
// If length is 0, IPv4-in-IPv6-invalid-code-point validation error, return failure.
if (length == 0) {
throw new InvalidUrlException(
"IPv6 address with IPv4 address syntax: IPv4 part is empty.");
}
// Decrease pointer by length.
pointer -= length;
// If pieceIndex is greater than 6,
// IPv4-in-IPv6-too-many-pieces validation error, return failure.
if (pieceIndex > 6) {
throw new InvalidUrlException(
"IPv6 address with IPv4 address syntax: IPv6 address has more than 6 pieces.");
}
// Let numbersSeen be 0.
int numbersSeen = 0;
c = (pointer < inputLength) ? input.codePointAt(pointer) : EOF;
// While c is not the EOF code point:
while (c != EOF) {
// Let ipv4Piece be null.
Integer ipv4Piece = null;
// If numbersSeen is greater than 0, then:
if (numbersSeen > 0) {
// If c is a U+002E (.) and numbersSeen is less than 4, then increase pointer by 1.
if (c =='.' && numbersSeen < 4) {
pointer++;
c = (pointer < inputLength) ? input.codePointAt(pointer) : EOF;
}
// Otherwise, IPv4-in-IPv6-invalid-code-point validation error, return failure.
else {
throw new InvalidUrlException(
"IPv6 address with IPv4 address syntax: " +
"IPv4 part is empty or contains a non-ASCII digit.");
}
}
// If c is not an ASCII digit,
// IPv4-in-IPv6-invalid-code-point validation error, return failure.
if (!isAsciiDigit(c)) {
throw new InvalidUrlException(
"IPv6 address with IPv4 address syntax: IPv4 part contains a non-ASCII digit.");
}
// While c is an ASCII digit:
while (isAsciiDigit(c)) {
// Let number be c interpreted as decimal number.
int number = Character.digit(c, 10);
// If ipv4Piece is null, then set ipv4Piece to number.
if (ipv4Piece == null) {
ipv4Piece = number;
}
// Otherwise, if ipv4Piece is 0,
// IPv4-in-IPv6-invalid-code-point validation error, return failure.
else if (ipv4Piece == 0) {
throw new InvalidUrlException(
"IPv6 address with IPv4 address syntax: IPv4 part contains a non-ASCII digit.");
}
// Otherwise, set ipv4Piece to ipv4Piece × 10 + number.
else {
ipv4Piece = ipv4Piece * 10 + number;
}
// If ipv4Piece is greater than 255,
// IPv4-in-IPv6-out-of-range-part validation error, return failure.
if (ipv4Piece > 255) {
throw new InvalidUrlException(
"IPv6 address with IPv4 address syntax: IPv4 part exceeds 255.");
}
// Increase pointer by 1.
pointer++;
c = (pointer < inputLength) ? input.codePointAt(pointer) : EOF;
}
// Set address[pieceIndex] to address[pieceIndex] × 0x100 + ipv4Piece.
address[pieceIndex] = address[pieceIndex] * 0x100 + (ipv4Piece != null ? ipv4Piece : 0);
// Increase numbersSeen by 1.
numbersSeen++;
// If numbersSeen is 2 or 4, then increase pieceIndex by 1.
if (numbersSeen == 2 || numbersSeen == 4) {
pieceIndex++;
}
c = (pointer < inputLength) ? input.codePointAt(pointer) : EOF;
}
// If numbersSeen is not 4,
// IPv4-in-IPv6-too-few-parts validation error, return failure.
if (numbersSeen != 4) {
throw new InvalidUrlException(
"IPv6 address with IPv4 address syntax: IPv4 address contains too few parts.");
}
// Break.
break;
}
// Otherwise, if c is U+003A (:):
else if (c == ':') {
// Increase pointer by 1.
pointer++;
c = (pointer < inputLength) ? input.codePointAt(pointer) : EOF;
// If c is the EOF code point, IPv6-invalid-code-point validation error, return failure.
if (c == EOF) {
throw new InvalidUrlException("IPv6 address unexpectedly ends.");
}
}
// Otherwise, if c is not the EOF code point,
// IPv6-invalid-code-point validation error, return failure.
else if (c != EOF) {
throw new InvalidUrlException(
"IPv6 address contains \"" + Character.toString(c) + "\", which is " +
"neither an ASCII hex digit nor a ':'.");
}
// Set address[pieceIndex] to value.
address[pieceIndex] = value;
// Increase pieceIndex by 1.
pieceIndex++;
}
// If compress is non-null, then:
if (compress != null) {
// Let swaps be pieceIndex − compress.
int swaps = pieceIndex - compress;
// Set pieceIndex to 7.
pieceIndex = 7;
// While pieceIndex is not 0 and swaps is greater than 0,
// swap address[pieceIndex] with address[compress + swaps − 1], and
// then decrease both pieceIndex and swaps by 1.
while (pieceIndex != 0 && swaps > 0) {
int tmp = address[pieceIndex];
address[pieceIndex] = address[compress + swaps - 1];
address[compress + swaps - 1] = tmp;
pieceIndex--;
swaps--;
}
}
// Otherwise, if compress is null and pieceIndex is not 8,
// IPv6-too-few-pieces validation error, return failure.
else if (pieceIndex != 8) {
throw new InvalidUrlException("An uncompressed IPv6 address contains fewer than 8 pieces.");
}
// Return address.
return new Ipv6Address(address);
}
/**
* The IPv6 serializer takes an IPv6 address {@code address} and
* then runs these steps. They return an ASCII string.
*/
private static String serialize(int[] address) {
// Let output be the empty string.
StringBuilder output = new StringBuilder();
// Let compress be an index to the first IPv6 piece in
// the first longest sequences of address’s IPv6 pieces that are 0.
int compress = longestSequenceOf0Pieces(address);
// Let ignore0 be false.
boolean ignore0 = false;
// For each pieceIndex in the range 0 to 7, inclusive:
for (int pieceIndex = 0; pieceIndex <= 7; pieceIndex++) {
// If ignore0 is true and address[pieceIndex] is 0, then continue.
if (ignore0 && address[pieceIndex] == 0) {
continue;
}
// Otherwise, if ignore0 is true, set ignore0 to false.
else if (ignore0) {
ignore0 = false;
}
// If compress is pieceIndex, then:
if (compress == pieceIndex) {
// Let separator be "::" if pieceIndex is 0, and U+003A (:) otherwise.
String separator = (pieceIndex == 0) ? "::" : ":";
// Append separator to output.
output.append(separator);
// Set ignore0 to true and continue.
ignore0 = true;
continue;
}
// Append address[pieceIndex], represented as
// the shortest possible lowercase hexadecimal number, to output.
output.append(Integer.toHexString(address[pieceIndex]));
// If pieceIndex is not 7, then append U+003A (:) to output.
if (pieceIndex != 7) {
output.append(':');
}
}
// Return output.
return output.toString();
}
private static int longestSequenceOf0Pieces(int[] pieces) {
int longestStart = -1;
int longestLength = -1;
int start = -1;
for (int i = 0; i < pieces.length + 1; i++) {
if (i < pieces.length && pieces[i] == 0) {
if (start < 0) {
start = i;
}
}
else if (start >= 0) {
int length = i - start;
if (length > longestLength) {
longestStart = start;
longestLength = length;
}
start = -1;
}
}
// If there is no sequence of address’s IPv6 pieces
// that are 0 that is longer than 1, then set compress to null.
if (longestLength > 1) {
return longestStart;
}
else {
return -1;
}
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
else if (obj instanceof Ipv6Address other) {
return Arrays.equals(this.pieces, other.pieces);
}
else {
return false;
}
}
@Override
public int hashCode() {
return Arrays.hashCode(this.pieces);
}
@Override
public String toString() {
return this.string;
}
}
sealed | Ipv6Address |
java | spring-projects__spring-boot | module/spring-boot-security/src/test/java/org/springframework/boot/security/autoconfigure/actuate/web/servlet/ManagementWebSecurityAutoConfigurationTests.java | {
"start": 10715,
"end": 11012
} | class ____ extends AnnotationConfigServletWebApplicationContext
implements WebServerApplicationContext {
@Override
public @Nullable WebServer getWebServer() {
return null;
}
@Override
public String getServerNamespace() {
return "server";
}
}
}
| MockWebServerApplicationContext |
java | apache__camel | components/camel-jetty/src/test/java/org/apache/camel/component/jetty/JettyWithXPathChoiceTest.java | {
"start": 1069,
"end": 2394
} | class ____ extends BaseJettyTest {
protected MockEndpoint x;
protected MockEndpoint y;
protected MockEndpoint z;
@Test
public void testSendToFirstWhen() throws Exception {
String body = "<one/>";
expectsMessageCount(0, y, z);
sendBody(body);
MockEndpoint.assertIsSatisfied(context);
x.reset();
y.reset();
z.reset();
body = "<two/>";
expectsMessageCount(0, x, z);
sendBody(body);
MockEndpoint.assertIsSatisfied(context);
}
private void sendBody(String body) {
template.sendBody("http://localhost:{{port}}/myworld", body);
}
@Override
public void doPostSetup() throws Exception {
x = getMockEndpoint("mock:x");
y = getMockEndpoint("mock:y");
z = getMockEndpoint("mock:z");
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("jetty:http://localhost:{{port}}/myworld")
// use stream caching
.streamCaching().choice().when().xpath("/one").to("mock:x").when().xpath("/two").to("mock:y")
.otherwise().to("mock:z").end();
}
};
}
}
| JettyWithXPathChoiceTest |
java | spring-projects__spring-framework | spring-beans/src/test/java/org/springframework/beans/propertyeditors/CustomEditorTests.java | {
"start": 59875,
"end": 60190
} | class ____ extends PropertyEditorSupport {
@Override
public void setAsText(String text) {
TestBean tb = new TestBean();
StringTokenizer st = new StringTokenizer(text, "_");
tb.setName(st.nextToken());
tb.setAge(Integer.parseInt(st.nextToken()));
setValue(tb);
}
}
private static | TestBeanEditor |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/web/configurers/ExceptionHandlingConfigurerAccessDeniedHandlerTests.java | {
"start": 4387,
"end": 5184
} | class ____ {
AccessDeniedHandler teapotDeniedHandler = (request, response, exception) -> response
.setStatus(HttpStatus.I_AM_A_TEAPOT.value());
@Bean
SecurityFilterChain filterChain(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeHttpRequests((authorize) -> authorize
.anyRequest().denyAll()
)
.exceptionHandling((exceptionHandling) -> exceptionHandling
.defaultAccessDeniedHandlerFor(
this.teapotDeniedHandler,
pathPattern("/hello/**")
)
.defaultAccessDeniedHandlerFor(
new AccessDeniedHandlerImpl(),
AnyRequestMatcher.INSTANCE
)
);
return http.build();
// @formatter:on
}
}
@Configuration
@EnableWebSecurity
static | RequestMatcherBasedAccessDeniedHandlerInLambdaConfig |
java | apache__kafka | clients/src/test/java/org/apache/kafka/common/metrics/KafkaMetricTest.java | {
"start": 1248,
"end": 3159
} | class ____ {
private static final MetricName METRIC_NAME = new MetricName("name", "group", "description", Collections.emptyMap());
@Test
public void testIsMeasurable() {
Measurable metricValueProvider = (config, now) -> 0;
KafkaMetric metric = new KafkaMetric(new Object(), METRIC_NAME, metricValueProvider, new MetricConfig(), new MockTime());
assertTrue(metric.isMeasurable());
assertEquals(metricValueProvider, metric.measurable());
}
@Test
public void testIsMeasurableWithGaugeProvider() {
Gauge<Double> metricValueProvider = (config, now) -> 0.0;
KafkaMetric metric = new KafkaMetric(new Object(), METRIC_NAME, metricValueProvider, new MetricConfig(), new MockTime());
assertFalse(metric.isMeasurable());
assertThrows(IllegalStateException.class, metric::measurable);
}
@Test
public void testMeasurableValueReturnsZeroWhenNotMeasurable() {
MockTime time = new MockTime();
MetricConfig config = new MetricConfig();
Gauge<Integer> gauge = (c, now) -> 7;
KafkaMetric metric = new KafkaMetric(new Object(), METRIC_NAME, gauge, config, time);
assertEquals(0.0d, metric.measurableValue(time.milliseconds()), 0.0d);
}
@Test
public void testKafkaMetricAcceptsNonMeasurableNonGaugeProvider() {
MetricValueProvider<String> provider = (config, now) -> "metric value provider";
KafkaMetric metric = new KafkaMetric(new Object(), METRIC_NAME, provider, new MetricConfig(), new MockTime());
Object value = metric.metricValue();
assertEquals("metric value provider", value);
}
@Test
public void testConstructorWithNullProvider() {
assertThrows(NullPointerException.class, () ->
new KafkaMetric(new Object(), METRIC_NAME, null, new MetricConfig(), new MockTime())
);
}
}
| KafkaMetricTest |
java | apache__flink | flink-rpc/flink-rpc-core/src/main/java/org/apache/flink/runtime/rpc/RpcEndpoint.java | {
"start": 4125,
"end": 12511
} | class ____ implements RpcGateway, AutoCloseableAsync {
protected final Logger log = LoggerFactory.getLogger(getClass());
// ------------------------------------------------------------------------
/** RPC service to be used to start the RPC server and to obtain rpc gateways. */
private final RpcService rpcService;
/** Unique identifier for this rpc endpoint. */
private final String endpointId;
/** Interface to access the underlying rpc server. */
protected final RpcServer rpcServer;
/**
* A reference to the endpoint's main thread, if the current method is called by the main
* thread.
*/
final AtomicReference<Thread> currentMainThread = new AtomicReference<>(null);
/**
* The main thread executor to be used to execute future callbacks in the main thread of the
* executing rpc server.
*/
private final MainThreadExecutor mainThreadExecutor;
/**
* Register endpoint closeable resource to the registry and close them when the server is
* stopped.
*/
private final CloseableRegistry resourceRegistry;
/**
* Indicates whether the RPC endpoint is started and not stopped or being stopped.
*
* <p>IMPORTANT: the running state is not thread safe and can be used only in the main thread of
* the rpc endpoint.
*/
private boolean isRunning;
/**
* Initializes the RPC endpoint.
*
* @param rpcService The RPC server that dispatches calls to this RPC endpoint.
* @param endpointId Unique identifier for this endpoint
*/
protected RpcEndpoint(
RpcService rpcService, String endpointId, Map<String, String> loggingContext) {
this.rpcService = checkNotNull(rpcService, "rpcService");
this.endpointId = checkNotNull(endpointId, "endpointId");
this.rpcServer = rpcService.startServer(this, loggingContext);
this.resourceRegistry = new CloseableRegistry();
this.mainThreadExecutor =
new MainThreadExecutor(rpcServer, this::validateRunsInMainThread, endpointId);
registerResource(this.mainThreadExecutor);
}
/**
* Initializes the RPC endpoint.
*
* @param rpcService The RPC server that dispatches calls to this RPC endpoint.
* @param endpointId Unique identifier for this endpoint
*/
protected RpcEndpoint(final RpcService rpcService, final String endpointId) {
this(rpcService, endpointId, Collections.emptyMap());
}
/**
* Initializes the RPC endpoint with a random endpoint id.
*
* @param rpcService The RPC server that dispatches calls to this RPC endpoint.
*/
protected RpcEndpoint(final RpcService rpcService) {
this(rpcService, UUID.randomUUID().toString());
}
/**
* Returns the rpc endpoint's identifier.
*
* @return Rpc endpoint's identifier.
*/
public String getEndpointId() {
return endpointId;
}
/**
* Returns whether the RPC endpoint is started and not stopped or being stopped.
*
* @return whether the RPC endpoint is started and not stopped or being stopped.
*/
protected boolean isRunning() {
validateRunsInMainThread();
return isRunning;
}
// ------------------------------------------------------------------------
// Start & shutdown & lifecycle callbacks
// ------------------------------------------------------------------------
/**
* Triggers start of the rpc endpoint. This tells the underlying rpc server that the rpc
* endpoint is ready to process remote procedure calls.
*/
public final void start() {
rpcServer.start();
}
/**
* Internal method which is called by the RpcService implementation to start the RpcEndpoint.
*
* @throws Exception indicating that the rpc endpoint could not be started. If an exception
* occurs, then the rpc endpoint will automatically terminate.
*/
public final void internalCallOnStart() throws Exception {
validateRunsInMainThread();
isRunning = true;
onStart();
}
/**
* User overridable callback which is called from {@link #internalCallOnStart()}.
*
* <p>This method is called when the RpcEndpoint is being started. The method is guaranteed to
* be executed in the main thread context and can be used to start the rpc endpoint in the
* context of the rpc endpoint's main thread.
*
* <p>IMPORTANT: This method should never be called directly by the user.
*
* @throws Exception indicating that the rpc endpoint could not be started. If an exception
* occurs, then the rpc endpoint will automatically terminate.
*/
protected void onStart() throws Exception {}
/**
* Triggers stop of the rpc endpoint. This tells the underlying rpc server that the rpc endpoint
* is no longer ready to process remote procedure calls.
*/
protected final void stop() {
rpcServer.stop();
}
/**
* Internal method which is called by the RpcService implementation to stop the RpcEndpoint.
*
* @return Future which is completed once all post stop actions are completed. If an error
* occurs this future is completed exceptionally
*/
public final CompletableFuture<Void> internalCallOnStop() {
validateRunsInMainThread();
CompletableFuture<Void> stopFuture = new CompletableFuture<>();
try {
resourceRegistry.close();
stopFuture.complete(null);
} catch (IOException e) {
stopFuture.completeExceptionally(
new RuntimeException("Close resource registry fail", e));
}
stopFuture = CompletableFuture.allOf(stopFuture, onStop());
isRunning = false;
return stopFuture;
}
/**
* Register the given closeable resource to {@link CloseableRegistry}.
*
* @param closeableResource the given closeable resource
*/
protected void registerResource(Closeable closeableResource) {
try {
resourceRegistry.registerCloseable(closeableResource);
} catch (IOException e) {
throw new RuntimeException(
"Registry closeable resource " + closeableResource + " fail", e);
}
}
/**
* Unregister the given closeable resource from {@link CloseableRegistry}.
*
* @param closeableResource the given closeable resource
* @return true if the given resource unregister successful, otherwise false
*/
protected boolean unregisterResource(Closeable closeableResource) {
return resourceRegistry.unregisterCloseable(closeableResource);
}
/**
* User overridable callback which is called from {@link #internalCallOnStop()}.
*
* <p>This method is called when the RpcEndpoint is being shut down. The method is guaranteed to
* be executed in the main thread context and can be used to clean up internal state.
*
* <p>IMPORTANT: This method should never be called directly by the user.
*
* @return Future which is completed once all post stop actions are completed. If an error
* occurs this future is completed exceptionally
*/
protected CompletableFuture<Void> onStop() {
return CompletableFuture.completedFuture(null);
}
/**
* Triggers the shut down of the rpc endpoint. The shut down is executed asynchronously.
*
* <p>In order to wait on the completion of the shut down, obtain the termination future via
* {@link #getTerminationFuture()}} and wait on its completion.
*/
@Override
public final CompletableFuture<Void> closeAsync() {
rpcService.stopServer(rpcServer);
return getTerminationFuture();
}
// ------------------------------------------------------------------------
// Basic RPC endpoint properties
// ------------------------------------------------------------------------
/**
* Returns a self gateway of the specified type which can be used to issue asynchronous calls
* against the RpcEndpoint.
*
* <p>IMPORTANT: The self gateway type must be implemented by the RpcEndpoint. Otherwise the
* method will fail.
*
* @param selfGatewayType | RpcEndpoint |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/context/properties/bind/BindableRuntimeHintsRegistrarTests.java | {
"start": 21689,
"end": 22004
} | class ____ {
private @Nullable InheritedNested inheritedNested;
public @Nullable InheritedNested getInheritedNested() {
return this.inheritedNested;
}
public void setInheritedNested(@Nullable InheritedNested inheritedNested) {
this.inheritedNested = inheritedNested;
}
public static | BaseProperties |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/DebertaTokenizationResult.java | {
"start": 2018,
"end": 5787
} | class ____ implements TokenizationResult.TokensBuilder {
private final int clsTokenId;
private final int sepTokenId;
private final boolean withSpecialTokens;
protected final Stream.Builder<IntStream> tokenIds;
protected final Stream.Builder<IntStream> tokenMap;
protected int seqPairOffset = 0;
DebertaTokensBuilder(int clsTokenId, int sepTokenId, boolean withSpecialTokens) {
this.clsTokenId = clsTokenId;
this.sepTokenId = sepTokenId;
this.withSpecialTokens = withSpecialTokens;
this.tokenIds = Stream.builder();
this.tokenMap = Stream.builder();
}
@Override
public TokensBuilder addSequence(List<Integer> tokenIds, List<Integer> tokenMap) {
// DeBERTa-v2 single sequence: [CLS] X [SEP]
if (withSpecialTokens) {
this.tokenIds.add(IntStream.of(clsTokenId));
this.tokenMap.add(IntStream.of(SPECIAL_TOKEN_POSITION));
}
this.tokenIds.add(tokenIds.stream().mapToInt(Integer::valueOf));
this.tokenMap.add(tokenMap.stream().mapToInt(Integer::valueOf));
if (withSpecialTokens) {
this.tokenIds.add(IntStream.of(sepTokenId));
this.tokenMap.add(IntStream.of(SPECIAL_TOKEN_POSITION));
}
return this;
}
@Override
public TokensBuilder addSequencePair(
List<Integer> tokenId1s,
List<Integer> tokenMap1,
List<Integer> tokenId2s,
List<Integer> tokenMap2
) {
if (tokenId1s.isEmpty() || tokenId2s.isEmpty()) {
throw new IllegalArgumentException("Both sequences must have at least one token");
}
// DeBERTa-v2 pair of sequences: [CLS] A [SEP] B [SEP]
if (withSpecialTokens) {
tokenIds.add(IntStream.of(clsTokenId));
tokenMap.add(IntStream.of(SPECIAL_TOKEN_POSITION));
}
tokenIds.add(tokenId1s.stream().mapToInt(Integer::valueOf));
tokenMap.add(tokenMap1.stream().mapToInt(Integer::valueOf));
int previouslyFinalMap = tokenMap1.get(tokenMap1.size() - 1);
if (withSpecialTokens) {
tokenIds.add(IntStream.of(sepTokenId));
tokenMap.add(IntStream.of(SPECIAL_TOKEN_POSITION));
}
tokenIds.add(tokenId2s.stream().mapToInt(Integer::valueOf));
tokenMap.add(tokenMap2.stream().mapToInt(i -> i + previouslyFinalMap));
if (withSpecialTokens) {
tokenIds.add(IntStream.of(sepTokenId));
tokenMap.add(IntStream.of(SPECIAL_TOKEN_POSITION));
}
seqPairOffset = withSpecialTokens ? tokenId1s.size() + 2 : tokenId1s.size();
return this;
}
@Override
public Tokens build(
List<String> input,
boolean truncated,
List<List<? extends DelimitedToken>> allTokens,
int spanPrev,
int seqId
) {
return new Tokens(
input,
allTokens,
truncated,
tokenIds.build().flatMapToInt(Function.identity()).toArray(),
tokenMap.build().flatMapToInt(Function.identity()).toArray(),
spanPrev,
seqId,
seqPairOffset
);
}
@Override
public Tokens build(String input, boolean truncated, List<? extends DelimitedToken> allTokens, int spanPrev, int seqId) {
return TokensBuilder.super.build(input, truncated, allTokens, spanPrev, seqId);
}
}
}
| DebertaTokensBuilder |
java | apache__avro | lang/java/perf/src/main/java/org/apache/avro/perf/test/generic/GenericTest.java | {
"start": 2869,
"end": 3886
} | class ____ extends BasicState {
private final Schema schema;
private GenericRecord[] testData;
private Encoder encoder;
public TestStateEncode() {
super();
this.schema = new Schema.Parser().parse(RECORD_SCHEMA);
}
/**
* Setup the trial data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws Exception {
this.encoder = super.newEncoder(false, getNullOutputStream());
this.testData = new GenericRecord[getBatchSize()];
final Random r = super.getRandom();
for (int i = 0; i < testData.length; i++) {
final GenericRecord rec = new GenericData.Record(schema);
rec.put(0, r.nextDouble());
rec.put(1, r.nextDouble());
rec.put(2, r.nextDouble());
rec.put(3, r.nextInt());
rec.put(4, r.nextInt());
rec.put(5, r.nextInt());
testData[i] = rec;
}
}
}
@State(Scope.Thread)
public static | TestStateEncode |
java | spring-projects__spring-framework | spring-context-indexer/src/test/java/org/springframework/context/index/sample/type/SampleSmartRepo.java | {
"start": 719,
"end": 838
} | interface ____ to demonstrate that no
* duplicate stereotypes are generated.
*
* @author Stephane Nicoll
*/
public | used |
java | quarkusio__quarkus | test-framework/junit5/src/main/java/io/quarkus/test/junit/nativeimage/ClassInclusionReport.java | {
"start": 432,
"end": 781
} | class ____ {
private final Set<String> includedClasses;
private final Path usedClassesReport;
private ClassInclusionReport(Set<String> includedClasses, Path usedClassesReport) {
this.includedClasses = includedClasses;
this.usedClassesReport = usedClassesReport;
}
/**
* This will load the | ClassInclusionReport |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/CoAPEndpointBuilderFactory.java | {
"start": 50973,
"end": 53196
} | class ____ {
/**
* The internal instance of the builder used to access to all the
* methods representing the name of headers.
*/
private static final CoAPHeaderNameBuilder INSTANCE = new CoAPHeaderNameBuilder();
/**
* The CoAP ETag for the response.
*
* The option is a: {@code byte[]} type.
*
* Group: common
*
* @return the name of the header {@code CoapETag}.
*/
public String coapETag() {
return "CamelCoapETag";
}
/**
* The CoAP Max-Age for the response body.
*
* The option is a: {@code java.lang.Long} type.
*
* Group: common
*
* @return the name of the header {@code CoapMaxAge}.
*/
public String coapMaxAge() {
return "CamelCoapMaxAge";
}
/**
* The request method that the CoAP producer should use when calling the
* target CoAP server URI. Valid options are DELETE, GET, PING, POST
* & PUT.
*
* The option is a: {@code String} type.
*
* Group: common
*
* @return the name of the header {@code CoapMethod}.
*/
public String coapMethod() {
return "CamelCoapMethod";
}
/**
* The CoAP response code sent by the external server. See RFC 7252 for
* details of what each code means.
*
* The option is a: {@code String} type.
*
* Group: common
*
* @return the name of the header {@code CoapResponseCode}.
*/
public String coapResponseCode() {
return "CamelCoapResponseCode";
}
/**
* The content type.
*
* The option is a: {@code String} type.
*
* Group: common
*
* @return the name of the header {@code Content-Type}.
*/
public String contentType() {
return "Content-Type";
}
}
static CoAPEndpointBuilder endpointBuilder(String componentName, String path) {
| CoAPHeaderNameBuilder |
java | spring-projects__spring-security | webauthn/src/test/java/org/springframework/security/web/webauthn/api/TestPublicKeyCredentialCreationOptions.java | {
"start": 723,
"end": 2300
} | class ____ {
public static PublicKeyCredentialCreationOptions.PublicKeyCredentialCreationOptionsBuilder createPublicKeyCredentialCreationOptions() {
AuthenticatorSelectionCriteria authenticatorSelection = AuthenticatorSelectionCriteria.builder()
.userVerification(UserVerificationRequirement.PREFERRED)
.residentKey(ResidentKeyRequirement.REQUIRED)
.build();
Bytes challenge = Bytes.fromBase64("q7lCdd3SVQxdC-v8pnRAGEn1B2M-t7ZECWPwCAmhWvc");
PublicKeyCredentialRpEntity rp = PublicKeyCredentialRpEntity.builder()
.id("example.localhost")
.name("SimpleWebAuthn Example")
.build();
Bytes userId = Bytes.fromBase64("oWJtkJ6vJ_m5b84LB4_K7QKTCTEwLIjCh4tFMCGHO4w");
PublicKeyCredentialUserEntity userEntity = ImmutablePublicKeyCredentialUserEntity.builder()
.displayName("user@example.localhost")
.id(userId)
.name("user@example.localhost")
.build();
ImmutableAuthenticationExtensionsClientInputs clientInputs = new ImmutableAuthenticationExtensionsClientInputs(
ImmutableAuthenticationExtensionsClientInput.credProps);
return PublicKeyCredentialCreationOptions.builder()
.attestation(AttestationConveyancePreference.NONE)
.user(userEntity)
.pubKeyCredParams(PublicKeyCredentialParameters.EdDSA, PublicKeyCredentialParameters.ES256,
PublicKeyCredentialParameters.RS256)
.authenticatorSelection(authenticatorSelection)
.challenge(challenge)
.rp(rp)
.extensions(clientInputs)
.timeout(Duration.ofMinutes(5));
}
private TestPublicKeyCredentialCreationOptions() {
}
}
| TestPublicKeyCredentialCreationOptions |
java | google__dagger | javatests/dagger/internal/codegen/DuplicateBindingsValidationTest.java | {
"start": 2040,
"end": 2088
} | interface ____ {}",
"",
" | A |
java | eclipse-vertx__vert.x | vertx-core/src/test/java/io/vertx/tests/json/JsonArrayTest.java | {
"start": 24013,
"end": 38725
} | class ____ {
static final Function<Object, ?> CLONER = o -> {
assertTrue(o instanceof SomeClass);
return new SomeClass();
};
}
@Test
public void testToString() {
jsonArray.add("foo").add(123);
assertEquals(jsonArray.encode(), jsonArray.toString());
}
@Test
public void testGetList() {
JsonObject obj = new JsonObject().put("quux", "wibble");
jsonArray.add("foo").add(123).add(obj);
List<Object> list = jsonArray.getList();
list.remove("foo");
assertFalse(jsonArray.contains("foo"));
list.add("floob");
assertTrue(jsonArray.contains("floob"));
assertSame(obj, list.get(1));
obj.remove("quux");
}
@Test
public void testCreateFromList() {
List<Object> list = new ArrayList<>();
list.add("foo");
list.add(123);
JsonArray arr = new JsonArray(list);
assertEquals("foo", arr.getString(0));
assertEquals(Integer.valueOf(123), arr.getInteger(1));
assertSame(list, arr.getList());
}
@Test
public void testCreateFromListCharSequence() {
List<Object> list = new ArrayList<>();
list.add("foo");
list.add(123);
list.add(new StringBuilder("eek"));
JsonArray arr = new JsonArray(list);
assertEquals("foo", arr.getString(0));
assertEquals(Integer.valueOf(123), arr.getInteger(1));
assertEquals("eek", arr.getString(2));
assertSame(list, arr.getList());
}
@Test
public void testCreateFromListNestedJsonObject() {
List<Object> list = new ArrayList<>();
list.add("foo");
list.add(123);
JsonObject obj = new JsonObject().put("blah", "wibble");
list.add(obj);
JsonArray arr = new JsonArray(list);
assertEquals("foo", arr.getString(0));
assertEquals(Integer.valueOf(123), arr.getInteger(1));
assertSame(list, arr.getList());
assertSame(obj, arr.getJsonObject(2));
}
@Test
public void testCreateFromListNestedMap() {
List<Object> list = new ArrayList<>();
list.add("foo");
list.add(123);
Map<String, Object> map = new HashMap<>();
map.put("blah", "wibble");
list.add(map);
JsonArray arr = new JsonArray(list);
assertEquals("foo", arr.getString(0));
assertEquals(Integer.valueOf(123), arr.getInteger(1));
assertSame(list, arr.getList());
JsonObject obj = arr.getJsonObject(2);
assertSame(map, obj.getMap());
}
@Test
public void testCreateFromListNestedJsonArray() {
List<Object> list = new ArrayList<>();
list.add("foo");
list.add(123);
JsonArray arr2 = new JsonArray().add("blah").add("wibble");
list.add(arr2);
JsonArray arr = new JsonArray(list);
assertEquals("foo", arr.getString(0));
assertEquals(Integer.valueOf(123), arr.getInteger(1));
assertSame(list, arr.getList());
assertSame(arr2, arr.getJsonArray(2));
}
@Test
public void testCreateFromListNestedList() {
List<Object> list = new ArrayList<>();
list.add("foo");
list.add(123);
List<Object> list2 = new ArrayList<>();
list2.add("blah");
list2.add("wibble");
list.add(list2);
JsonArray arr = new JsonArray(list);
assertEquals("foo", arr.getString(0));
assertEquals(Integer.valueOf(123), arr.getInteger(1));
assertSame(list, arr.getList());
JsonArray arr2 = arr.getJsonArray(2);
assertSame(list2, arr2.getList());
}
@Test
public void testCreateFromBuffer() {
JsonArray excepted = new JsonArray();
excepted.add("foobar");
excepted.add(123);
Buffer buf = Buffer.buffer(excepted.encode());
assertEquals(excepted, new JsonArray(buf));
}
@Test
public void testClusterSerializable() {
jsonArray.add("foo").add(123);
Buffer buff = Buffer.buffer();
jsonArray.writeToBuffer(buff);
JsonArray deserialized = new JsonArray();
deserialized.readFromBuffer(0, buff);
assertEquals(jsonArray, deserialized);
}
@Test
public void testJsonArrayEquality() {
JsonObject obj = new JsonObject(Collections.singletonMap("abc", Collections.singletonList(3)));
assertEquals(obj, new JsonObject(Collections.singletonMap("abc", Collections.singletonList(3))));
assertEquals(obj, new JsonObject(Collections.singletonMap("abc", Collections.singletonList(3L))));
assertEquals(obj, new JsonObject(Collections.singletonMap("abc", new JsonArray().add(3))));
assertEquals(obj, new JsonObject(Collections.singletonMap("abc", new JsonArray().add(3L))));
assertNotEquals(obj, new JsonObject(Collections.singletonMap("abc", Collections.singletonList(4))));
assertNotEquals(obj, new JsonObject(Collections.singletonMap("abc", new JsonArray().add(4))));
JsonArray array = new JsonArray(Collections.singletonList(Collections.singletonList(3)));
assertEquals(array, new JsonArray(Collections.singletonList(Collections.singletonList(3))));
assertEquals(array, new JsonArray(Collections.singletonList(Collections.singletonList(3L))));
assertEquals(array, new JsonArray(Collections.singletonList(new JsonArray().add(3))));
assertEquals(array, new JsonArray(Collections.singletonList(new JsonArray().add(3L))));
assertNotEquals(array, new JsonArray(Collections.singletonList(Collections.singletonList(4))));
assertNotEquals(array, new JsonArray(Collections.singletonList(new JsonArray().add(4))));
}
@Test
public void testStreamCorrectTypes() throws Exception {
String json = "{\"object1\": [{\"object2\": 12}]}";
JsonObject object = new JsonObject(json);
testStreamCorrectTypes(object.copy());
testStreamCorrectTypes(object);
}
@Test
public void testRemoveMethodReturnedObject() {
JsonArray obj = new JsonArray();
obj.add("bar")
.add(new JsonObject().put("name", "vert.x").put("count", 2))
.add(new JsonArray().add(1.0).add(2.0));
Object removed = obj.remove(0);
assertTrue(removed instanceof String);
removed = obj.remove(0);
assertTrue(removed instanceof JsonObject);
assertEquals(((JsonObject) removed).getString("name"), "vert.x");
removed = obj.remove(0);
assertTrue(removed instanceof JsonArray);
assertEquals(((JsonArray) removed).getDouble(0), 1.0, 0.0);
}
private void testStreamCorrectTypes(JsonObject object) {
object.getJsonArray("object1").stream().forEach(innerMap -> {
assertTrue("Expecting JsonObject, found: " + innerMap.getClass().getCanonicalName(), innerMap instanceof JsonObject);
});
}
@Test
public void testInvalidConstruction() {
try {
new JsonArray((String) null);
fail();
} catch (NullPointerException ignore) {
}
try {
new JsonArray((Buffer) null);
fail();
} catch (NullPointerException ignore) {
}
try {
new JsonArray((List) null);
fail();
} catch (NullPointerException ignore) {
}
}
@Test
public void testSetEnum() {
try {
jsonArray.set(0, JsonObjectTest.SomeEnum.FOO);
fail();
} catch (IndexOutOfBoundsException e) {
// OK
}
jsonArray.add("bar");
assertSame(jsonArray, jsonArray.set(0, JsonObjectTest.SomeEnum.FOO));
assertEquals(JsonObjectTest.SomeEnum.FOO.toString(), jsonArray.getString(0));
assertEquals(1, jsonArray.size());
}
@Test
public void testSetString() {
try {
jsonArray.set(0, "foo");
fail();
} catch (IndexOutOfBoundsException e) {
// OK
}
jsonArray.add("bar");
assertSame(jsonArray, jsonArray.set(0, "foo"));
assertEquals("foo", jsonArray.getString(0));
assertEquals(1, jsonArray.size());
}
@Test
public void testSetCharSequence() {
try {
jsonArray.set(0, new StringBuilder("foo"));
fail();
} catch (IndexOutOfBoundsException e) {
// OK
}
jsonArray.add("bar");
assertSame(jsonArray, jsonArray.set(0, new StringBuilder("foo")));
assertEquals("foo", jsonArray.getString(0));
assertEquals(1, jsonArray.size());
}
@Test
public void testSetInteger() {
try {
jsonArray.set(0, 123);
fail();
} catch (IndexOutOfBoundsException e) {
// OK
}
jsonArray.add("bar");
assertSame(jsonArray, jsonArray.set(0, 123));
assertEquals(Integer.valueOf(123), jsonArray.getInteger(0));
assertEquals(1, jsonArray.size());
}
@Test
public void testSetLong() {
try {
jsonArray.set(0, 123l);
fail();
} catch (IndexOutOfBoundsException e) {
// OK
}
jsonArray.add("bar");
assertSame(jsonArray, jsonArray.set(0, 123l));
assertEquals(Long.valueOf(123), jsonArray.getLong(0));
assertEquals(1, jsonArray.size());
}
@Test
public void testSetFloat() {
try {
jsonArray.set(0, 123f);
fail();
} catch (IndexOutOfBoundsException e) {
// OK
}
jsonArray.add("bar");
assertSame(jsonArray, jsonArray.set(0, 123f));
assertEquals(Float.valueOf(123), jsonArray.getFloat(0));
assertEquals(1, jsonArray.size());
}
@Test
public void testSetDouble() {
try {
jsonArray.set(0, 123d);
fail();
} catch (IndexOutOfBoundsException e) {
// OK
}
jsonArray.add("bar");
assertSame(jsonArray, jsonArray.set(0, 123d));
assertEquals(Double.valueOf(123), jsonArray.getDouble(0));
assertEquals(1, jsonArray.size());
}
@Test
public void testSetBoolean() {
try {
jsonArray.set(0, true);
fail();
} catch (IndexOutOfBoundsException e) {
// OK
}
jsonArray.add("bar");
assertSame(jsonArray, jsonArray.set(0, true));
assertEquals(Boolean.TRUE, jsonArray.getBoolean(0));
assertEquals(1, jsonArray.size());
}
@Test
public void testSetJsonObject() {
JsonObject obj = new JsonObject().put("foo", "bar");
try {
jsonArray.set(0, obj);
fail();
} catch (IndexOutOfBoundsException e) {
// OK
}
jsonArray.add("bar");
assertSame(jsonArray, jsonArray.set(0, obj));
assertEquals(obj, jsonArray.getJsonObject(0));
assertEquals(1, jsonArray.size());
}
@Test
public void testSetJsonArray() {
JsonArray arr = new JsonArray().add("foo");
try {
jsonArray.set(0, arr);
fail();
} catch (IndexOutOfBoundsException e) {
// OK
}
jsonArray.add("bar");
assertSame(jsonArray, jsonArray.set(0, arr));
assertEquals(arr, jsonArray.getJsonArray(0));
assertEquals(1, jsonArray.size());
}
@Test
public void testSetBinary() {
byte[] bytes = TestUtils.randomByteArray(10);
try {
jsonArray.set(0, bytes);
fail();
} catch (IndexOutOfBoundsException e) {
// OK
}
jsonArray.add("bar");
assertSame(jsonArray, jsonArray.set(0, bytes));
assertEquals(TestUtils.toBase64String(bytes), jsonArray.getValue(0));
assertEquals(1, jsonArray.size());
}
@Test
public void testSetInstant() {
Instant now = Instant.now();
try {
jsonArray.set(0, now);
fail();
} catch (IndexOutOfBoundsException e) {
// OK
}
jsonArray.add("bar");
assertSame(jsonArray, jsonArray.set(0, now));
assertEquals(now.toString(), jsonArray.getValue(0));
assertEquals(1, jsonArray.size());
}
@Test
public void testSetObject() {
jsonArray.add("bar");
try {
jsonArray.set(0, new SomeClass());
// OK (we can put anything, yet it should fail to encode if a codec is missing)
} catch (RuntimeException e) {
fail();
}
try {
jsonArray.set(0, new BigDecimal(123));
// OK (we can put anything, yet it should fail to encode if a codec is missing)
} catch (RuntimeException e) {
fail();
}
try {
jsonArray.set(0, new Date());
// OK (we can put anything, yet it should fail to encode if a codec is missing)
} catch (RuntimeException e) {
fail();
}
}
@Test
public void testSetNull() {
try {
jsonArray.setNull(0);
fail();
} catch (IndexOutOfBoundsException e) {
// OK
}
jsonArray.add("bar");
assertSame(jsonArray, jsonArray.setNull(0));
assertNull(jsonArray.getString(0));
assertEquals(1, jsonArray.size());
}
@Test
public void testAddWithPos() {
JsonArray arr = new JsonArray()
.add(1)
.add(2)
.add(3);
assertEquals(3, arr.size());
assertEquals(1, arr.getValue(0));
assertEquals(2, arr.getValue(1));
assertEquals(3, arr.getValue(2));
// add some values by index
arr.add(3, 4);
// assert that the new length changed
assertEquals(4, arr.size());
// assert the value got added
assertEquals(4, arr.getValue(3));
}
@Test
public void testNoEncode() {
Instant now = Instant.now();
JsonArray json = new JsonArray();
// bypass any custom validation
json.getList().add(now);
assertEquals(now, json.getInstant(0));
assertSame(now, json.getInstant(0));
// same for byte[]
byte[] bytes = "bytes".getBytes();
// bypass any custom validation
json.getList().add(bytes);
assertEquals(bytes, json.getBinary(1));
assertSame(bytes, json.getBinary(1));
}
@Test
public void testBigDecimal() {
BigDecimal bd1 =
new BigDecimal("124567890.0987654321");
// storing BigDecimal should not be an issue
JsonArray json = new JsonArray();
json.add(bd1);
assertEquals(bd1, json.getValue(0));
assertSame(bd1, json.getValue(0));
// copy() should allow it too.
JsonArray json2 = json.copy();
// encode
assertEquals("[124567890.0987654321]", json.encode());
}
@Test
public void testShareable() {
Shareable myShareable = new Shareable() {
@Override
public Shareable copy() {
return this;
}
};
// storing Shareable should not be an issue
JsonArray json = new JsonArray();
json.add(myShareable);
assertEquals(myShareable, json.getValue(0));
assertSame(myShareable, json.getValue(0));
// copy() should allow it too.
JsonArray json2 = json.copy();
}
@Test
public void testNumber() {
// storing any kind of number should be allowed
JsonArray numbers = new JsonArray()
.add(new BigDecimal("124567890.0987654321"))
.add(new BigInteger("1234567890123456789012345678901234567890"))
.add((byte) 0x0a)
.add(Math.PI)
.add((float) Math.PI)
.add(42)
.add(1234567890123456789L)
.add(Short.MAX_VALUE);
// copy should have no side effects
JsonArray json2 = numbers.copy();
// same for encode
assertEquals("[124567890.0987654321,1234567890123456789012345678901234567890,10,3.141592653589793,3.1415927,42,1234567890123456789,32767]", numbers.encode());
// fetching any property should always be a number
// the test asserts on not null because not being a number would cause a | SomeClass |
java | google__dagger | hilt-compiler/main/java/dagger/hilt/processor/internal/uninstallmodules/AggregatedUninstallModulesMetadata.java | {
"start": 1653,
"end": 3807
} | class ____ {
/** Returns the aggregating element */
public abstract XTypeElement aggregatingElement();
/** Returns the test annotated with {@link dagger.hilt.android.testing.UninstallModules}. */
public abstract XTypeElement testElement();
/**
* Returns the list of uninstall modules in {@link dagger.hilt.android.testing.UninstallModules}.
*/
public abstract ImmutableList<XTypeElement> uninstallModuleElements();
/** Returns metadata for all aggregated elements in the aggregating package. */
public static ImmutableSet<AggregatedUninstallModulesMetadata> from(XProcessingEnv env) {
return from(
AggregatedElements.from(
ClassNames.AGGREGATED_UNINSTALL_MODULES_PACKAGE,
ClassNames.AGGREGATED_UNINSTALL_MODULES,
env));
}
/** Returns metadata for each aggregated element. */
public static ImmutableSet<AggregatedUninstallModulesMetadata> from(
ImmutableSet<XTypeElement> aggregatedElements) {
return aggregatedElements.stream()
.map(aggregatedElement -> create(aggregatedElement, getProcessingEnv(aggregatedElement)))
.collect(toImmutableSet());
}
public static AggregatedUninstallModulesIr toIr(AggregatedUninstallModulesMetadata metadata) {
return new AggregatedUninstallModulesIr(
metadata.aggregatingElement().getClassName(),
metadata.testElement().getClassName().canonicalName(),
metadata.uninstallModuleElements().stream()
.map(XTypeElement::getClassName)
.map(ClassName::canonicalName)
.collect(Collectors.toList()));
}
private static AggregatedUninstallModulesMetadata create(
XTypeElement element, XProcessingEnv env) {
XAnnotation annotationMirror = element.getAnnotation(ClassNames.AGGREGATED_UNINSTALL_MODULES);
return new AutoValue_AggregatedUninstallModulesMetadata(
element,
env.requireTypeElement(annotationMirror.getAsString("test")),
annotationMirror.getAsStringList("uninstallModules").stream()
.map(env::requireTypeElement)
.collect(toImmutableList()));
}
}
| AggregatedUninstallModulesMetadata |
java | hibernate__hibernate-orm | tooling/metamodel-generator/src/test/java/org/hibernate/processor/test/generatedannotation/GeneratedAnnotationTest.java | {
"start": 646,
"end": 1215
} | class ____ {
@Test
@TestForIssue(jiraKey = "METAGEN-79")
@WithClasses(TestEntity.class)
void testGeneratedAnnotationNotGenerated() {
assertMetamodelClassGeneratedFor( TestEntity.class );
// need to check the source because @Generated is not a runtime annotation
String metaModelSource = getMetaModelSourceAsString( TestEntity.class );
String generatedString = "@Generated(\"org.hibernate.processor.HibernateProcessor\")";
assertTrue( metaModelSource.contains( generatedString ), "@Generated should be added to the metamodel." );
}
}
| GeneratedAnnotationTest |
java | elastic__elasticsearch | libs/ssl-config/src/main/java/org/elasticsearch/common/ssl/SslTrustConfig.java | {
"start": 802,
"end": 1913
} | interface ____ {
/**
* @return A collection of files that are read by this config object.
* The {@link #createTrustManager()} method will read these files dynamically, so the behaviour of this trust config may change if
* any of these files are modified.
*/
Collection<Path> getDependentFiles();
/**
* @return A new {@link X509ExtendedTrustManager}.
* @throws SslConfigException if there is a problem configuring the trust manager.
*/
X509ExtendedTrustManager createTrustManager();
/**
* @return A collection of {@link Certificate certificates} used by this config, excluding those shipped with the JDK
*/
Collection<? extends StoredCertificate> getConfiguredCertificates();
/**
* @return {@code true} if this trust config is based on any explicit trust settings
*/
default boolean hasExplicitConfig() {
return false;
}
/**
* @return {@code true} if this trust config is based on the system default truststore
*/
default boolean isSystemDefault() {
return false;
}
}
| SslTrustConfig |
java | apache__kafka | connect/runtime/src/test/java/org/apache/kafka/connect/runtime/distributed/DistributedHerderTest.java | {
"start": 8138,
"end": 8639
} | class ____ {
private static final Map<String, String> HERDER_CONFIG = new HashMap<>();
static {
HERDER_CONFIG.put(DistributedConfig.STATUS_STORAGE_TOPIC_CONFIG, "status-topic");
HERDER_CONFIG.put(DistributedConfig.CONFIG_TOPIC_CONFIG, "config-topic");
HERDER_CONFIG.put(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
HERDER_CONFIG.put(DistributedConfig.GROUP_ID_CONFIG, "connect-test-group");
// The WorkerConfig base | DistributedHerderTest |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/io/network/partition/InputGateFairnessTest.java | {
"start": 13973,
"end": 16922
} | class ____ extends SingleInputGate {
private static final int BUFFER_SIZE = 32 * 1024;
private static final SupplierWithException<BufferPool, IOException>
STUB_BUFFER_POOL_FACTORY = NoOpBufferPool::new;
private final PrioritizedDeque<InputChannel> channelsWithData;
private final HashSet<InputChannel> uniquenessChecker;
@SuppressWarnings("unchecked")
public FairnessVerifyingInputGate(
String owningTaskName,
IntermediateDataSetID consumedResultId,
int numberOfInputChannels) {
super(
owningTaskName,
0,
consumedResultId,
ResultPartitionType.PIPELINED,
numberOfInputChannels,
SingleInputGateBuilder.NO_OP_PRODUCER_CHECKER,
STUB_BUFFER_POOL_FACTORY,
null,
new UnpooledMemorySegmentProvider(BUFFER_SIZE),
BUFFER_SIZE,
new ThroughputCalculator(SystemClock.getInstance()),
null);
channelsWithData = getInputChannelsWithData();
this.uniquenessChecker = new HashSet<>();
}
@Override
public Optional<BufferOrEvent> getNext() throws IOException, InterruptedException {
synchronized (channelsWithData) {
assertThat(channelsWithData.size())
.withFailMessage("too many input channels")
.isLessThanOrEqualTo(getNumberOfInputChannels());
ensureUnique(channelsWithData.asUnmodifiableCollection());
}
return super.getNext();
}
private void ensureUnique(Collection<InputChannel> channels) {
HashSet<InputChannel> uniquenessChecker = this.uniquenessChecker;
for (InputChannel channel : channels) {
if (!uniquenessChecker.add(channel)) {
fail("Duplicate channel in input gate: " + channel);
}
}
assertThat(uniquenessChecker)
.withFailMessage("found duplicate input channels")
.hasSameSizeAs(channels);
uniquenessChecker.clear();
}
}
public static RemoteInputChannel createRemoteInputChannel(
SingleInputGate inputGate, int channelIndex, ConnectionManager connectionManager) {
return InputChannelBuilder.newBuilder()
.setChannelIndex(channelIndex)
.setConnectionManager(connectionManager)
.buildRemoteChannel(inputGate);
}
public static void setupInputGate(SingleInputGate gate, InputChannel... channels)
throws IOException {
gate.setInputChannels(channels);
gate.setup();
gate.requestPartitions();
}
}
| FairnessVerifyingInputGate |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/scheduling/annotation/AsyncExecutionTests.java | {
"start": 20477,
"end": 20594
} | interface ____ {
void doSomething(int i);
Future<String> returnSomething(int i);
}
public static | AsyncInterface |
java | processing__processing4 | app/src/processing/app/Base.java | {
"start": 1752,
"end": 1917
} | class ____ for platform identification and
* general interaction with the system (launching URLs, loading
* files and images, etc.) that comes from that.
*/
public | is |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/InfluxDb2EndpointBuilderFactory.java | {
"start": 7701,
"end": 10202
} | interface ____
extends
EndpointProducerBuilder {
default InfluxDb2EndpointBuilder basic() {
return (InfluxDb2EndpointBuilder) this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default AdvancedInfluxDb2EndpointBuilder lazyStartProducer(boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default AdvancedInfluxDb2EndpointBuilder lazyStartProducer(String lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
}
public | AdvancedInfluxDb2EndpointBuilder |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/FederationQueueWeight.java | {
"start": 1409,
"end": 7802
} | class ____ {
/**
* The FederationQueueWeight object consists of three parts:
* routerWeight, amrmWeight, and headRoomAlpha.
*
* @param routerWeight Weight for routing applications to different subclusters.
* We will route the application to different subclusters based on the configured weights.
* Assuming we have two subclusters, SC-1 and SC-2,
* with a weight of 0.7 for SC-1 and 0.3 for SC-2,
* the application will be allocated in such a way
* that 70% of the applications will be assigned to SC-1 and 30% to SC-2.
*
* @param amrmWeight Weight for resource request from ApplicationMaster (AM) to
* different subclusters' Resource Manager (RM).
* Assuming we have two subclusters, SC-1 and SC-2,
* with a weight of 0.6 for SC-1 and 0.4 for SC-2,
* When AM requesting resources,
* 60% of the requests will be made to the Resource Manager (RM) of SC-1
* and 40% to the RM of SC-2.
*
* @param headRoomAlpha
* used by policies that balance weight-based and load-based considerations in their decisions.
* For policies that use this parameter,
* values close to 1 indicate that most of the decision
* should be based on currently observed headroom from various sub-clusters,
* values close to zero, indicate that the decision should be
* mostly based on weights and practically ignore current load.
*
* @return FederationQueueWeight
*/
@Private
@Unstable
public static FederationQueueWeight newInstance(String routerWeight,
String amrmWeight, String headRoomAlpha) {
FederationQueueWeight federationQueueWeight = Records.newRecord(FederationQueueWeight.class);
federationQueueWeight.setRouterWeight(routerWeight);
federationQueueWeight.setAmrmWeight(amrmWeight);
federationQueueWeight.setHeadRoomAlpha(headRoomAlpha);
return federationQueueWeight;
}
@Private
@Unstable
public static FederationQueueWeight newInstance(String routerWeight,
String amrmWeight, String headRoomAlpha, String queue, String policyManagerClassName) {
FederationQueueWeight federationQueueWeight = Records.newRecord(FederationQueueWeight.class);
federationQueueWeight.setRouterWeight(routerWeight);
federationQueueWeight.setAmrmWeight(amrmWeight);
federationQueueWeight.setHeadRoomAlpha(headRoomAlpha);
federationQueueWeight.setQueue(queue);
federationQueueWeight.setPolicyManagerClassName(policyManagerClassName);
return federationQueueWeight;
}
@Public
@Unstable
public abstract String getRouterWeight();
@Public
@Unstable
public abstract void setRouterWeight(String routerWeight);
@Public
@Unstable
public abstract String getAmrmWeight();
@Public
@Unstable
public abstract void setAmrmWeight(String amrmWeight);
@Public
@Unstable
public abstract String getHeadRoomAlpha();
@Public
@Unstable
public abstract void setHeadRoomAlpha(String headRoomAlpha);
private static final String COMMA = ",";
private static final String COLON = ":";
/**
* Check if the subCluster Queue Weight Ratio are valid.
*
* This method can be used to validate RouterPolicyWeight and AMRMPolicyWeight.
*
* @param subClusterWeight the weight ratios of subClusters.
* @throws YarnException exceptions from yarn servers.
*/
public static void checkSubClusterQueueWeightRatioValid(String subClusterWeight)
throws YarnException {
// The subClusterWeight cannot be empty.
if (StringUtils.isBlank(subClusterWeight)) {
throw new YarnException("subClusterWeight can't be empty!");
}
// SC-1:0.7,SC-2:0.3 -> [SC-1:0.7,SC-2:0.3]
String[] subClusterWeights = subClusterWeight.split(COMMA);
Map<String, Double> subClusterWeightMap = new LinkedHashMap<>();
for (String subClusterWeightItem : subClusterWeights) {
// SC-1:0.7 -> [SC-1,0.7]
// We require that the parsing result is not empty and must have a length of 2.
String[] subClusterWeightItems = subClusterWeightItem.split(COLON);
if (subClusterWeightItems == null || subClusterWeightItems.length != 2) {
throw new YarnException("The subClusterWeight cannot be empty," +
" and the subClusterWeight size must be 2. (eg.SC-1,0.2)");
}
subClusterWeightMap.put(subClusterWeightItems[0], Double.valueOf(subClusterWeightItems[1]));
}
// The sum of weight ratios for subClusters must be equal to 1.
double sum = subClusterWeightMap.values().stream().mapToDouble(Double::doubleValue).sum();
boolean isValid = Math.abs(sum - 1.0) < 1e-6; // Comparing with a tolerance of 1e-6
if (!isValid) {
throw new YarnException("The sum of ratios for all subClusters must be equal to 1.");
}
}
/**
* Check if HeadRoomAlpha is a number and is between 0 and 1.
*
* @param headRoomAlpha headroomalpha.
* @throws YarnException exceptions from yarn servers.
*/
public static void checkHeadRoomAlphaValid(String headRoomAlpha) throws YarnException {
if (!isNumeric(headRoomAlpha)) {
throw new YarnException("HeadRoomAlpha must be a number.");
}
double dHeadRoomAlpha = Double.parseDouble(headRoomAlpha);
if (!(dHeadRoomAlpha >= 0 && dHeadRoomAlpha <= 1)) {
throw new YarnException("HeadRoomAlpha must be between 0-1.");
}
}
/**
* Determines whether the given value is a number.
*
* @param value given value.
* @return true, is a number, false, not a number.
*/
protected static boolean isNumeric(String value) {
return NumberUtils.isCreatable(value);
}
@Public
@Unstable
public abstract String getQueue();
@Public
@Unstable
public abstract void setQueue(String queue);
@Public
@Unstable
public abstract String getPolicyManagerClassName();
@Public
@Unstable
public abstract void setPolicyManagerClassName(String policyManagerClassName);
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("FederationQueueWeight { ");
builder.append("Queue: ").append(getQueue()).append(", ");
builder.append("RouterWeight: ").append(getRouterWeight()).append(", ");
builder.append("AmrmWeight: ").append(getAmrmWeight()).append(", ");
builder.append("PolicyManagerClassName: ").append(getPolicyManagerClassName());
builder.append(" }");
return builder.toString();
}
}
| FederationQueueWeight |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/annotation/PrimitiveBeanLookupAndAutowiringTests.java | {
"start": 1680,
"end": 2906
} | class ____ {
@Test
void primitiveLookupByName() {
ConfigurableApplicationContext ctx = new AnnotationConfigApplicationContext(Config.class);
assertThat(ctx.getBean("b", boolean.class)).isTrue();
assertThat(ctx.getBean("i", int.class)).isEqualTo(42);
ctx.close();
}
@Test
void primitiveLookupByType() {
ConfigurableApplicationContext ctx = new AnnotationConfigApplicationContext(Config.class);
assertThat(ctx.getBean(boolean.class)).isTrue();
assertThat(ctx.getBean(int.class)).isEqualTo(42);
ctx.close();
}
@Test
void primitiveAutowiredInjection() {
ConfigurableApplicationContext ctx =
new AnnotationConfigApplicationContext(Config.class, AutowiredComponent.class);
assertThat(ctx.getBean(AutowiredComponent.class).b).isTrue();
assertThat(ctx.getBean(AutowiredComponent.class).i).isEqualTo(42);
ctx.close();
}
@Test
void primitiveResourceInjection() {
ConfigurableApplicationContext ctx =
new AnnotationConfigApplicationContext(Config.class, ResourceComponent.class);
assertThat(ctx.getBean(ResourceComponent.class).b).isTrue();
assertThat(ctx.getBean(ResourceComponent.class).i).isEqualTo(42);
ctx.close();
}
@Configuration
static | PrimitiveBeanLookupAndAutowiringTests |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/cache/ManyToOneTestReusedColumn.java | {
"start": 6636,
"end": 6914
} | class ____ extends Food {
@ManyToOne
@Fetch(FetchMode.SELECT)
private Cheese bestPairedWith;
public Cheese getBestPairedWith() {
return bestPairedWith;
}
public void setBestPairedWith(Cheese bestPairedWith) {
this.bestPairedWith = bestPairedWith;
}
}
}
| Cheese |
java | apache__camel | components/camel-controlbus/src/main/java/org/apache/camel/component/controlbus/ControlBusProducer.java | {
"start": 1749,
"end": 3470
} | class ____ extends DefaultAsyncProducer {
private static final Logger LOG = LoggerFactory.getLogger(ControlBusProducer.class);
private final CamelLogger logger;
public ControlBusProducer(Endpoint endpoint, CamelLogger logger) {
super(endpoint);
this.logger = logger;
}
@Override
public ControlBusEndpoint getEndpoint() {
return (ControlBusEndpoint) super.getEndpoint();
}
@Override
public boolean process(Exchange exchange, AsyncCallback callback) {
if (getEndpoint().getLanguage() != null) {
try {
processByLanguage(exchange, getEndpoint().getLanguage());
} catch (Exception e) {
exchange.setException(e);
}
} else if (getEndpoint().getAction() != null) {
try {
processByAction(exchange);
} catch (Exception e) {
exchange.setException(e);
}
}
callback.done(true);
return true;
}
protected void processByLanguage(Exchange exchange, Language language) {
LanguageTask task = new LanguageTask(exchange, language);
if (getEndpoint().isAsync()) {
getEndpoint().getComponent().getExecutorService().submit(task);
} else {
task.run();
}
}
protected void processByAction(Exchange exchange) {
ActionTask task = new ActionTask(exchange);
if (getEndpoint().isAsync()) {
getEndpoint().getComponent().getExecutorService().submit(task);
} else {
task.run();
}
}
/**
* Tasks to run when processing by language.
*/
private final | ControlBusProducer |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.