language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/objectarray/ObjectArrayAssert_doesNotHaveAnyElementsOfTypes_Test.java
|
{
"start": 893,
"end": 1310
}
|
class ____ extends ObjectArrayAssertBaseTest {
@Override
protected ObjectArrayAssert<Object> invoke_api_method() {
return assertions.doesNotHaveAnyElementsOfTypes(String.class);
}
@Override
protected void verify_internal_effects() {
verify(arrays).assertDoesNotHaveAnyElementsOfTypes(getInfo(assertions), getActual(assertions), String.class);
}
}
|
ObjectArrayAssert_doesNotHaveAnyElementsOfTypes_Test
|
java
|
quarkusio__quarkus
|
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/cdi/bcextensions/PriorityTest.java
|
{
"start": 836,
"end": 1188
}
|
class ____ {
@RegisterExtension
public ArcTestContainer container = ArcTestContainer.builder()
.buildCompatibleExtensions(new MyExtension())
.build();
@Test
public void test() {
assertIterableEquals(Arrays.asList("1", "2", "3", "4", "5", "6"), MyExtension.invocations);
}
public static
|
PriorityTest
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/index/shard/IndexLongFieldRangeXContentTests.java
|
{
"start": 885,
"end": 1733
}
|
class ____ extends AbstractXContentTestCase<IndexLongFieldRange> {
@Override
protected IndexLongFieldRange createTestInstance() {
return randomRange();
}
@Override
protected IndexLongFieldRange doParseInstance(XContentParser parser) throws IOException {
assertThat(parser.nextToken(), sameInstance(XContentParser.Token.START_OBJECT));
return IndexLongFieldRange.fromXContent(parser);
}
@Override
protected boolean supportsUnknownFields() {
return false;
}
@Override
protected void assertEqualInstances(IndexLongFieldRange expectedInstance, IndexLongFieldRange newInstance) {
if (checkForSameInstances(expectedInstance, newInstance) == false) {
super.assertEqualInstances(expectedInstance, newInstance);
}
}
}
|
IndexLongFieldRangeXContentTests
|
java
|
apache__camel
|
components/camel-jms/src/main/java/org/apache/camel/component/jms/ConsumerType.java
|
{
"start": 851,
"end": 909
}
|
enum ____ {
Simple,
Default,
Custom
}
|
ConsumerType
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/functions/aggfunctions/FirstValueWithRetractAggFunctionWithOrderTest.java
|
{
"start": 11737,
"end": 12418
}
|
class ____<T>
extends FirstLastValueAggFunctionWithOrderTestBase<
T, FirstValueWithRetractAccumulator<T>> {
@Override
protected Class<?> getAccClass() {
return FirstValueWithRetractAccumulator.class;
}
@Override
protected Method getRetractFunc() throws NoSuchMethodException {
return getAggregator()
.getClass()
.getMethod("retract", getAccClass(), Object.class, Long.class);
}
}
/** Test base for {@link FirstValueWithRetractAggFunction} with number types. */
abstract static
|
FirstValueWithRetractAggFunctionWithOrderTestBase
|
java
|
google__auto
|
value/src/main/java/com/google/auto/value/processor/ErrorReporter.java
|
{
"start": 955,
"end": 3456
}
|
class ____ {
private final Messager messager;
private int errorCount;
ErrorReporter(ProcessingEnvironment processingEnv) {
this.messager = processingEnv.getMessager();
}
/**
* Issue a compilation note.
*
* @param e the element to which it pertains
* @param format the format string for the text of the note
* @param args arguments for the format string
*/
@FormatMethod
void reportNote(Element e, String format, Object... args) {
messager.printMessage(Diagnostic.Kind.NOTE, String.format(format, args), e);
}
/**
* Issue a compilation warning.
*
* @param e the element to which it pertains
* @param format the format string for the text of the warning
* @param args arguments for the format string
*/
@FormatMethod
void reportWarning(Element e, String format, Object... args) {
messager.printMessage(Diagnostic.Kind.WARNING, String.format(format, args), e);
}
/**
* Issue a compilation error. This method does not throw an exception, since we want to continue
* processing and perhaps report other errors. It is a good idea to introduce a test case in
* CompilationTest for any new call to reportError(...) to ensure that we continue correctly after
* an error.
*
* @param e the element to which it pertains
* @param format the format string for the text of the warning
* @param args arguments for the format string
*/
@FormatMethod
void reportError(Element e, String format, Object... args) {
messager.printMessage(Diagnostic.Kind.ERROR, String.format(format, args), e);
errorCount++;
}
/**
* Issue a compilation error and abandon the processing of this class. This does not prevent the
* processing of other classes.
*
* @param e the element to which it pertains
* @param format the format string for the text of the error
* @param args arguments for the format string
* @return This method does not return, but is declared with an exception return type so you can
* write {@code throw abortWithError(...)} to tell the compiler that.
* @throws AbortProcessingException always
*/
@FormatMethod
AbortProcessingException abortWithError(Element e, String format, Object... args) {
reportError(e, format, args);
throw new AbortProcessingException();
}
/** The number of errors that have been output by calls to {@link #reportError}. */
int errorCount() {
return errorCount;
}
/** Abandon the processing of this
|
ErrorReporter
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/TimelineV2Client.java
|
{
"start": 1580,
"end": 4841
}
|
class ____ extends CompositeService {
/**
* Creates an instance of the timeline v.2 client.
*
* @param appId the application id with which the timeline client is
* associated
* @return the created timeline client instance
*/
@Public
public static TimelineV2Client createTimelineClient(ApplicationId appId) {
TimelineV2Client client = new TimelineV2ClientImpl(appId);
return client;
}
protected TimelineV2Client(String name) {
super(name);
}
/**
* <p>
* Send the information of a number of conceptual entities within the scope
* of YARN application to the timeline service v.2 collector. It is a blocking
* API. The method will not return until all the put entities have been
* persisted.
* </p>
*
* @param entities the collection of {@link TimelineEntity}
* @throws IOException if there are I/O errors
* @throws YarnException if entities are incomplete/invalid
*/
@Public
public abstract void putEntities(TimelineEntity... entities)
throws IOException, YarnException;
/**
* <p>
* Send the information of a number of conceptual entities within the scope
* of YARN application to the timeline service v.2 collector. It is an
* asynchronous API. The method will return once all the entities are
* received.
* </p>
*
* @param entities the collection of {@link TimelineEntity}
* @throws IOException if there are I/O errors
* @throws YarnException if entities are incomplete/invalid
*/
@Public
public abstract void putEntitiesAsync(TimelineEntity... entities)
throws IOException, YarnException;
/**
* <p>
* Update collector info received in AllocateResponse which contains the
* timeline service address where the request will be sent to and the timeline
* delegation token which will be used to send the request.
* </p>
*
* @param collectorInfo Collector info which contains the timeline service
* address and timeline delegation token.
*/
public abstract void setTimelineCollectorInfo(CollectorInfo collectorInfo);
/**
* <p>
* Send the information of a number of conceptual entities within the scope of
* a sub-application to the timeline service v.2 collector. It is a blocking
* API. The method will not return until all the put entities have been
* persisted.
* </p>
*
* @param entities the collection of {@link TimelineEntity}
* @throws IOException if there are I/O errors
* @throws YarnException if entities are incomplete/invalid
*/
@Public
public abstract void putSubAppEntities(TimelineEntity... entities)
throws IOException, YarnException;
/**
* <p>
* Send the information of a number of conceptual entities within the scope of
* a sub-application to the timeline service v.2 collector. It is an
* asynchronous API. The method will return once all the entities are received
* .
* </p>
*
* @param entities the collection of {@link TimelineEntity}
* @throws IOException if there are I/O errors
* @throws YarnException if entities are incomplete/invalid
*/
@Public
public abstract void putSubAppEntitiesAsync(TimelineEntity... entities)
throws IOException, YarnException;
}
|
TimelineV2Client
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestGetConf.java
|
{
"start": 2963,
"end": 22396
}
|
enum ____ {
NAMENODE, BACKUP, SECONDARY, NNRPCADDRESSES, JOURNALNODE
}
FileSystem localFileSys;
/** Setup federation nameServiceIds in the configuration */
private void setupNameServices(HdfsConfiguration conf, int nameServiceIdCount) {
StringBuilder nsList = new StringBuilder();
for (int i = 0; i < nameServiceIdCount; i++) {
if (nsList.length() > 0) {
nsList.append(",");
}
nsList.append(getNameServiceId(i));
}
conf.set(DFS_NAMESERVICES, nsList.toString());
}
/** Set a given key with value as address, for all the nameServiceIds.
* @param conf configuration to set the addresses in
* @param key configuration key
* @param nameServiceIdCount Number of nameServices for which the key is set
* @param portOffset starting port offset
* @return list of addresses that are set in the configuration
*/
private String[] setupAddress(HdfsConfiguration conf, String key,
int nameServiceIdCount, int portOffset) {
String[] values = new String[nameServiceIdCount];
for (int i = 0; i < nameServiceIdCount; i++, portOffset++) {
String nsID = getNameServiceId(i);
String specificKey = DFSUtil.addKeySuffixes(key, nsID);
values[i] = "nn" + i + ":" + portOffset;
conf.set(specificKey, values[i]);
}
return values;
}
/**
* Add namenodes to the static resolution list to avoid going
* through DNS which can be really slow in some configurations.
*/
private void setupStaticHostResolution(int nameServiceIdCount,
String hostname) {
for (int i = 0; i < nameServiceIdCount; i++) {
NetUtils.addStaticResolution(hostname + i, "localhost");
}
}
/*
* Convert the map returned from DFSUtil functions to an array of
* addresses represented as "host:port"
*/
private String[] toStringArray(List<ConfiguredNNAddress> list) {
String[] ret = new String[list.size()];
for (int i = 0; i < list.size(); i++) {
ret[i] = NetUtils.getHostPortString(list.get(i).getAddress());
}
return ret;
}
/**
* Using DFSUtil methods get the list of given {@code type} of address
*/
private Map<String, Map<String, InetSocketAddress>> getAddressListFromConf(
TestType type, HdfsConfiguration conf) throws IOException {
switch (type) {
case NAMENODE:
return DFSUtil.getNNServiceRpcAddressesForCluster(conf);
case BACKUP:
return DFSUtil.getBackupNodeAddresses(conf);
case SECONDARY:
return DFSUtil.getSecondaryNameNodeAddresses(conf);
case NNRPCADDRESSES:
return DFSUtil.getNNServiceRpcAddressesForCluster(conf);
}
return null;
}
private String runTool(HdfsConfiguration conf, String[] args, boolean success)
throws Exception {
ByteArrayOutputStream o = new ByteArrayOutputStream();
PrintStream out = new PrintStream(o, true);
try {
int ret = ToolRunner.run(new GetConf(conf, out, out), args);
out.flush();
System.err.println("Output: " + o.toString());
assertEquals(success, ret == 0, "Expected " + (success ? "success" : "failure") +
" for args: " + Joiner.on(" ").join(args) + "\n" +
"Output: " + o.toString());
return o.toString();
} finally {
o.close();
out.close();
}
}
/**
* Get address list for a given type of address. Command expected to
* fail if {@code success} is false.
* @return returns the success or error output from the tool.
*/
private String getAddressListFromTool(TestType type, HdfsConfiguration conf,
boolean success)
throws Exception {
String[] args = new String[1];
switch (type) {
case NAMENODE:
args[0] = Command.NAMENODE.getName();
break;
case BACKUP:
args[0] = Command.BACKUP.getName();
break;
case SECONDARY:
args[0] = Command.SECONDARY.getName();
break;
case NNRPCADDRESSES:
args[0] = Command.NNRPCADDRESSES.getName();
break;
case JOURNALNODE:
args[0] = Command.JOURNALNODE.getName();
}
return runTool(conf, args, success);
}
/**
* Using {@link GetConf} methods get the list of given {@code type} of
* addresses
*
* @param type, TestType
* @param conf, configuration
* @param checkPort, If checkPort is true, verify NNPRCADDRESSES whose
* expected value is hostname:rpc-port. If checkPort is false, the
* expected is hostname only.
* @param expected, expected addresses
*/
private void getAddressListFromTool(TestType type, HdfsConfiguration conf,
boolean checkPort, List<ConfiguredNNAddress> expected) throws Exception {
String out = getAddressListFromTool(type, conf, expected.size() != 0);
List<String> values = new ArrayList<String>();
// Convert list of addresses returned to an array of string
StringTokenizer tokenizer = new StringTokenizer(out);
while (tokenizer.hasMoreTokens()) {
String s = tokenizer.nextToken().trim();
values.add(s);
}
String[] actual = values.toArray(new String[values.size()]);
// Convert expected list to String[] of hosts
int i = 0;
String[] expectedHosts = new String[expected.size()];
for (ConfiguredNNAddress cnn : expected) {
InetSocketAddress addr = cnn.getAddress();
if (!checkPort) {
expectedHosts[i++] = addr.getHostName();
}else {
expectedHosts[i++] = addr.getHostName()+":"+addr.getPort();
}
}
// Compare two arrays
assertTrue(Arrays.equals(expectedHosts, actual));
}
private void verifyAddresses(HdfsConfiguration conf, TestType type,
boolean checkPort, String... expected) throws Exception {
// Ensure DFSUtil returned the right set of addresses
Map<String, Map<String, InetSocketAddress>> map =
getAddressListFromConf(type, conf);
List<ConfiguredNNAddress> list = DFSUtil.flattenAddressMap(map);
String[] actual = toStringArray(list);
Arrays.sort(actual);
Arrays.sort(expected);
assertArrayEquals(expected, actual);
// Test GetConf returned addresses
getAddressListFromTool(type, conf, checkPort, list);
}
private static String getNameServiceId(int index) {
return "ns" + index;
}
/**
* Test empty configuration
*/
@Test
@Timeout(value = 10)
public void testEmptyConf() throws Exception {
HdfsConfiguration conf = new HdfsConfiguration(false);
// Verify getting addresses fails
getAddressListFromTool(TestType.NAMENODE, conf, false);
System.out.println(getAddressListFromTool(TestType.BACKUP, conf, false));
getAddressListFromTool(TestType.SECONDARY, conf, false);
getAddressListFromTool(TestType.NNRPCADDRESSES, conf, false);
for (Command cmd : Command.values()) {
String arg = cmd.getName();
CommandHandler handler = Command.getHandler(arg);
assertNotNull(handler, "missing handler: " + cmd);
if (handler.key != null) {
// First test with configuration missing the required key
String[] args = {handler.key};
runTool(conf, args, false);
}
}
}
/**
* Test invalid argument to the tool
*/
@Test
@Timeout(value = 10)
public void testInvalidArgument() throws Exception {
HdfsConfiguration conf = new HdfsConfiguration();
String[] args = {"-invalidArgument"};
String ret = runTool(conf, args, false);
assertTrue(ret.contains(GetConf.USAGE));
}
/**
* Tests to make sure the returned addresses are correct in case of default
* configuration with no federation
*/
@Test
@Timeout(value = 10)
public void testNonFederation() throws Exception {
HdfsConfiguration conf = new HdfsConfiguration(false);
// Returned namenode address should match default address
conf.set(FS_DEFAULT_NAME_KEY, "hdfs://localhost:1000");
verifyAddresses(conf, TestType.NAMENODE, false, "localhost:1000");
verifyAddresses(conf, TestType.NNRPCADDRESSES, true, "localhost:1000");
// Returned address should match backupnode RPC address
conf.set(DFS_NAMENODE_BACKUP_ADDRESS_KEY,"localhost:1001");
verifyAddresses(conf, TestType.BACKUP, false, "localhost:1001");
// Returned address should match secondary http address
conf.set(DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY, "localhost:1002");
verifyAddresses(conf, TestType.SECONDARY, false, "localhost:1002");
// Returned namenode address should match service RPC address
conf = new HdfsConfiguration();
conf.set(DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY, "localhost:1000");
conf.set(DFS_NAMENODE_RPC_ADDRESS_KEY, "localhost:1001");
verifyAddresses(conf, TestType.NAMENODE, false, "localhost:1000");
verifyAddresses(conf, TestType.NNRPCADDRESSES, true, "localhost:1000");
// Returned address should match RPC address
conf = new HdfsConfiguration();
conf.set(DFS_NAMENODE_RPC_ADDRESS_KEY, "localhost:1001");
verifyAddresses(conf, TestType.NAMENODE, false, "localhost:1001");
verifyAddresses(conf, TestType.NNRPCADDRESSES, true, "localhost:1001");
}
/**
* Tests to make sure the returned addresses are correct in case of federation
* of setup.
*/
@Test
@Timeout(value = 10)
public void testFederation() throws Exception {
final int nsCount = 10;
HdfsConfiguration conf = new HdfsConfiguration(false);
// Test to ensure namenode, backup and secondary namenode addresses are
// returned from federation configuration. Returned namenode addresses are
// based on service RPC address and not regular RPC address
setupNameServices(conf, nsCount);
String[] nnAddresses = setupAddress(conf,
DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY, nsCount, 1000);
setupAddress(conf, DFS_NAMENODE_RPC_ADDRESS_KEY, nsCount, 1500);
setupStaticHostResolution(nsCount, "nn");
String[] backupAddresses = setupAddress(conf,
DFS_NAMENODE_BACKUP_ADDRESS_KEY, nsCount, 2000);
String[] secondaryAddresses = setupAddress(conf,
DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY, nsCount, 3000);
verifyAddresses(conf, TestType.NAMENODE, false, nnAddresses);
verifyAddresses(conf, TestType.BACKUP, false, backupAddresses);
verifyAddresses(conf, TestType.SECONDARY, false, secondaryAddresses);
verifyAddresses(conf, TestType.NNRPCADDRESSES, true, nnAddresses);
// Test to ensure namenode, backup, secondary namenode addresses and
// namenode rpc addresses are returned from federation configuration.
// Returned namenode addresses are based on regular RPC address
// in the absence of service RPC address.
conf = new HdfsConfiguration(false);
setupNameServices(conf, nsCount);
nnAddresses = setupAddress(conf,
DFS_NAMENODE_RPC_ADDRESS_KEY, nsCount, 1000);
backupAddresses = setupAddress(conf,
DFS_NAMENODE_BACKUP_ADDRESS_KEY, nsCount, 2000);
secondaryAddresses = setupAddress(conf,
DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY, nsCount, 3000);
verifyAddresses(conf, TestType.NAMENODE, false, nnAddresses);
verifyAddresses(conf, TestType.BACKUP, false, backupAddresses);
verifyAddresses(conf, TestType.SECONDARY, false, secondaryAddresses);
verifyAddresses(conf, TestType.NNRPCADDRESSES, true, nnAddresses);
}
/**
* Tests for journal node addresses.
* @throws Exception
*/
@Test
@Timeout(value = 10)
public void testGetJournalNodes() throws Exception {
final int nsCount = 3;
final String journalsBaseUri = "qjournal://jn0:8020;jn1:8020;jn2:8020";
setupStaticHostResolution(nsCount, "jn");
// With out Name service Id
HdfsConfiguration conf = new HdfsConfiguration(false);
conf.set(DFS_NAMENODE_SHARED_EDITS_DIR_KEY,
journalsBaseUri+"/");
Set<String> expected = new HashSet<>();
expected.add("jn0");
expected.add("jn1");
expected.add("jn2");
String expected1 = "";
StringBuilder buffer = new StringBuilder();
for (String val : expected) {
if (buffer.length() > 0) {
buffer.append(" ");
}
buffer.append(val);
}
buffer.append(System.lineSeparator());
expected1 = buffer.toString();
Set<String> actual = DFSUtil.getJournalNodeAddresses(conf);
assertEquals(expected.toString(), actual.toString());
String actual1 = getAddressListFromTool(TestType.JOURNALNODE,
conf, true);
assertEquals(expected1, actual1);
conf.clear();
//With out Name service Id
conf.set(DFS_NAMENODE_SHARED_EDITS_DIR_KEY,
journalsBaseUri + "/");
actual = DFSUtil.getJournalNodeAddresses(conf);
assertEquals(expected.toString(), actual.toString());
actual1 = getAddressListFromTool(TestType.JOURNALNODE,
conf, true);
assertEquals(expected1, actual1);
conf.clear();
//Federation with HA, but suffixed only with Name service Id
setupNameServices(conf, nsCount);
conf.set(DFS_HA_NAMENODES_KEY_PREFIX +".ns0",
"nn0,nn1");
conf.set(DFS_HA_NAMENODES_KEY_PREFIX +".ns1",
"nn0, nn1");
conf.set(DFS_NAMENODE_SHARED_EDITS_DIR_KEY+".ns0",
journalsBaseUri + "/ns0");
conf.set(DFS_NAMENODE_SHARED_EDITS_DIR_KEY+".ns1",
journalsBaseUri + "/ns1");
actual = DFSUtil.getJournalNodeAddresses(conf);
assertEquals(expected.toString(), actual.toString());
expected1 = getAddressListFromTool(TestType.JOURNALNODE,
conf, true);
assertEquals(expected1, actual1);
conf.clear();
// Federation with HA
setupNameServices(conf, nsCount);
conf.set(DFS_HA_NAMENODES_KEY_PREFIX + ".ns0", "nn0,nn1");
conf.set(DFS_HA_NAMENODES_KEY_PREFIX + ".ns1", "nn0, nn1");
conf.set(DFS_NAMENODE_SHARED_EDITS_DIR_KEY + ".ns0.nn0",
journalsBaseUri + "/ns0");
conf.set(DFS_NAMENODE_SHARED_EDITS_DIR_KEY + ".ns0.nn1",
journalsBaseUri + "/ns0");
conf.set(DFS_NAMENODE_SHARED_EDITS_DIR_KEY + ".ns1.nn2",
journalsBaseUri + "/ns1");
conf.set(DFS_NAMENODE_SHARED_EDITS_DIR_KEY + ".ns1.nn3",
journalsBaseUri + "/ns1");
actual = DFSUtil.getJournalNodeAddresses(conf);
assertEquals(expected.toString(), actual.toString());
actual1 = getAddressListFromTool(TestType.JOURNALNODE,
conf, true);
assertEquals(expected1, actual1);
conf.clear();
// Name service setup, but no journal node
setupNameServices(conf, nsCount);
expected = new HashSet<>();
actual = DFSUtil.getJournalNodeAddresses(conf);
assertEquals(expected.toString(), actual.toString());
actual1 = System.lineSeparator();
expected1 = getAddressListFromTool(TestType.JOURNALNODE,
conf, true);
assertEquals(expected1, actual1);
conf.clear();
//name node edits dir is present, but set
//to location of storage shared directory
conf.set(DFS_NAMENODE_SHARED_EDITS_DIR_KEY,
"file:///mnt/filer1/dfs/ha-name-dir-shared");
expected = new HashSet<>();
actual = DFSUtil.getJournalNodeAddresses(conf);
assertEquals(expected.toString(), actual.toString());
expected1 = getAddressListFromTool(TestType.JOURNALNODE,
conf, true);
actual1 = System.lineSeparator();
assertEquals(expected1, actual1);
conf.clear();
}
/*
** Test for unknown journal node host exception.
*/
@Test
@Timeout(value = 10)
public void testUnknownJournalNodeHost()
throws URISyntaxException, IOException {
assertThrows(UnknownHostException.class, () -> {
String journalsBaseUri = "qjournal://jn1:8020;jn2:8020;jn3:8020";
HdfsConfiguration conf = new HdfsConfiguration(false);
conf.set(DFS_NAMENODE_SHARED_EDITS_DIR_KEY,
journalsBaseUri + "/jndata");
DFSUtil.getJournalNodeAddresses(conf);
});
}
/*
** Test for malformed journal node urisyntax exception.
*/
@Test
@Timeout(value = 10)
public void testJournalNodeUriError()
throws URISyntaxException, IOException {
assertThrows(URISyntaxException.class, () -> {
final int nsCount = 3;
String journalsBaseUri = "qjournal://jn0 :8020;jn1:8020;jn2:8020";
setupStaticHostResolution(nsCount, "jn");
HdfsConfiguration conf = new HdfsConfiguration(false);
conf.set(DFS_NAMENODE_SHARED_EDITS_DIR_KEY,
journalsBaseUri + "/jndata");
DFSUtil.getJournalNodeAddresses(conf);
});
}
@Test
@Timeout(value = 10)
public void testGetSpecificKey() throws Exception {
HdfsConfiguration conf = new HdfsConfiguration();
conf.set("mykey", " myval ");
String[] args = {"-confKey", "mykey"};
String toolResult = runTool(conf, args, true);
assertEquals(String.format("myval%n"), toolResult);
}
@Test
@Timeout(value = 10)
public void testExtraArgsThrowsError() throws Exception {
HdfsConfiguration conf = new HdfsConfiguration();
conf.set("mykey", "myval");
String[] args = {"-namenodes", "unexpected-arg"};
assertTrue(runTool(conf, args, false).contains(
"Did not expect argument: unexpected-arg"));
}
/**
* Tests commands other than {@link Command#NAMENODE}, {@link Command#BACKUP},
* {@link Command#SECONDARY} and {@link Command#NNRPCADDRESSES}
*/
@Test
@Timeout(value = 10)
public void testTool() throws Exception {
HdfsConfiguration conf = new HdfsConfiguration(false);
for (Command cmd : Command.values()) {
CommandHandler handler = Command.getHandler(cmd.getName());
if (handler.key != null && !"-confKey".equals(cmd.getName())) {
// Add the key to the conf and ensure tool returns the right value
String[] args = {cmd.getName()};
conf.set(handler.key, "value");
assertTrue(runTool(conf, args, true).contains("value"));
}
}
}
@Test
public void TestGetConfExcludeCommand() throws Exception{
HdfsConfiguration conf = new HdfsConfiguration();
// Set up the hosts/exclude files.
HostsFileWriter hostsFileWriter = new HostsFileWriter();
hostsFileWriter.initialize(conf, "GetConf");
Path excludeFile = hostsFileWriter.getExcludeFile();
String[] args = {"-excludeFile"};
String ret = runTool(conf, args, true);
assertEquals(excludeFile.toUri().getPath(),ret.trim());
hostsFileWriter.cleanup();
}
@Test
public void TestGetConfIncludeCommand() throws Exception{
HdfsConfiguration conf = new HdfsConfiguration();
// Set up the hosts/exclude files.
HostsFileWriter hostsFileWriter = new HostsFileWriter();
hostsFileWriter.initialize(conf, "GetConf");
Path hostsFile = hostsFileWriter.getIncludeFile();
// Setup conf
String[] args = {"-includeFile"};
String ret = runTool(conf, args, true);
assertEquals(hostsFile.toUri().getPath(),ret.trim());
hostsFileWriter.cleanup();
}
@Test
public void testIncludeInternalNameServices() throws Exception {
final int nsCount = 10;
final int remoteNsCount = 4;
HdfsConfiguration conf = new HdfsConfiguration();
setupNameServices(conf, nsCount);
setupAddress(conf, DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY, nsCount, 1000);
setupAddress(conf, DFS_NAMENODE_RPC_ADDRESS_KEY, nsCount, 1500);
conf.set(DFS_INTERNAL_NAMESERVICES_KEY, "ns1");
setupStaticHostResolution(nsCount, "nn");
String[] includedNN = new String[] {"nn1:1001"};
verifyAddresses(conf, TestType.NAMENODE, false, includedNN);
verifyAddresses(conf, TestType.NNRPCADDRESSES, true, includedNN);
}
}
|
TestType
|
java
|
apache__dubbo
|
dubbo-config/dubbo-config-api/src/test/java/org/apache/dubbo/config/integration/AbstractRegistryCenterExporterListener.java
|
{
"start": 1799,
"end": 5668
}
|
interface ____ exported service.
*/
protected abstract Class<?> getInterface();
/**
* {@inheritDoc}
*/
@Override
public void exported(Exporter<?> exporter) throws RpcException {
ListenerExporterWrapper listenerExporterWrapper = (ListenerExporterWrapper) exporter;
Invoker invoker = listenerExporterWrapper.getInvoker();
if (!(invoker instanceof FilterChainBuilder.CallbackRegistrationInvoker)) {
exportedExporters.add(exporter);
return;
}
FilterChainBuilder.CallbackRegistrationInvoker callbackRegistrationInvoker =
(FilterChainBuilder.CallbackRegistrationInvoker) invoker;
if (callbackRegistrationInvoker == null || callbackRegistrationInvoker.getInterface() != getInterface()) {
return;
}
exportedExporters.add(exporter);
FilterChainBuilder.CopyOfFilterChainNode filterChainNode = getFilterChainNode(callbackRegistrationInvoker);
do {
Filter filter = this.getFilter(filterChainNode);
if (filter != null) {
filters.add(filter);
}
filterChainNode = this.getNextNode(filterChainNode);
} while (filterChainNode != null);
}
/**
* {@inheritDoc}
*/
@Override
public void unexported(Exporter<?> exporter) {
exportedExporters.remove(exporter);
}
/**
* Returns the exported exporters.
*/
public List<Exporter<?>> getExportedExporters() {
return Collections.unmodifiableList(exportedExporters);
}
/**
* Returns all filters
*/
public Set<Filter> getFilters() {
return Collections.unmodifiableSet(filters);
}
/**
* Use reflection to obtain {@link Filter}
*/
private FilterChainBuilder.CopyOfFilterChainNode getFilterChainNode(
FilterChainBuilder.CallbackRegistrationInvoker callbackRegistrationInvoker) {
if (callbackRegistrationInvoker != null) {
Field field = null;
try {
field = callbackRegistrationInvoker.getClass().getDeclaredField("filterInvoker");
field.setAccessible(true);
return (FilterChainBuilder.CopyOfFilterChainNode) field.get(callbackRegistrationInvoker);
} catch (NoSuchFieldException | IllegalAccessException e) {
// ignore
}
}
return null;
}
/**
* Use reflection to obtain {@link Filter}
*/
private Filter getFilter(FilterChainBuilder.CopyOfFilterChainNode filterChainNode) {
if (filterChainNode != null) {
Field field = null;
try {
field = filterChainNode.getClass().getDeclaredField("filter");
field.setAccessible(true);
return (Filter) field.get(filterChainNode);
} catch (NoSuchFieldException | IllegalAccessException e) {
// ignore
}
}
return null;
}
/**
* Use reflection to obtain {@link FilterChainBuilder.CopyOfFilterChainNode}
*/
private FilterChainBuilder.CopyOfFilterChainNode getNextNode(
FilterChainBuilder.CopyOfFilterChainNode filterChainNode) {
if (filterChainNode != null) {
Field field = null;
try {
field = filterChainNode.getClass().getDeclaredField("nextNode");
field.setAccessible(true);
Object object = field.get(filterChainNode);
if (object instanceof FilterChainBuilder.CopyOfFilterChainNode) {
return (FilterChainBuilder.CopyOfFilterChainNode) object;
}
} catch (NoSuchFieldException | IllegalAccessException e) {
// ignore
}
}
return null;
}
}
|
of
|
java
|
apache__flink
|
flink-core/src/main/java/org/apache/flink/api/common/typeinfo/Types.java
|
{
"start": 3249,
"end": 11447
}
|
class ____ {
/** Returns type information for {@link java.lang.Void}. Does not support a null value. */
public static final TypeInformation<Void> VOID = BasicTypeInfo.VOID_TYPE_INFO;
/** Returns type information for {@link java.lang.String}. Supports a null value. */
public static final TypeInformation<String> STRING = BasicTypeInfo.STRING_TYPE_INFO;
/**
* Returns type information for both a primitive <code>byte</code> and {@link java.lang.Byte}.
* Does not support a null value.
*/
public static final TypeInformation<Byte> BYTE = BasicTypeInfo.BYTE_TYPE_INFO;
/**
* Returns type information for both a primitive <code>boolean</code> and {@link
* java.lang.Boolean}. Does not support a null value.
*/
public static final TypeInformation<Boolean> BOOLEAN = BasicTypeInfo.BOOLEAN_TYPE_INFO;
/**
* Returns type information for both a primitive <code>short</code> and {@link java.lang.Short}.
* Does not support a null value.
*/
public static final TypeInformation<Short> SHORT = BasicTypeInfo.SHORT_TYPE_INFO;
/**
* Returns type information for both a primitive <code>int</code> and {@link java.lang.Integer}.
* Does not support a null value.
*/
public static final TypeInformation<Integer> INT = BasicTypeInfo.INT_TYPE_INFO;
/**
* Returns type information for both a primitive <code>long</code> and {@link java.lang.Long}.
* Does not support a null value.
*/
public static final TypeInformation<Long> LONG = BasicTypeInfo.LONG_TYPE_INFO;
/**
* Returns type information for both a primitive <code>float</code> and {@link java.lang.Float}.
* Does not support a null value.
*/
public static final TypeInformation<Float> FLOAT = BasicTypeInfo.FLOAT_TYPE_INFO;
/**
* Returns type information for both a primitive <code>double</code> and {@link
* java.lang.Double}. Does not support a null value.
*/
public static final TypeInformation<Double> DOUBLE = BasicTypeInfo.DOUBLE_TYPE_INFO;
/**
* Returns type information for both a primitive <code>char</code> and {@link
* java.lang.Character}. Does not support a null value.
*/
public static final TypeInformation<Character> CHAR = BasicTypeInfo.CHAR_TYPE_INFO;
/** Returns type information for {@link java.math.BigDecimal}. Supports a null value. */
public static final TypeInformation<BigDecimal> BIG_DEC = BasicTypeInfo.BIG_DEC_TYPE_INFO;
/** Returns type information for {@link java.math.BigInteger}. Supports a null value. */
public static final TypeInformation<BigInteger> BIG_INT = BasicTypeInfo.BIG_INT_TYPE_INFO;
/** Returns type information for {@link java.sql.Date}. Supports a null value. */
public static final TypeInformation<Date> SQL_DATE = SqlTimeTypeInfo.DATE;
/** Returns type information for {@link java.sql.Time}. Supports a null value. */
public static final TypeInformation<Time> SQL_TIME = SqlTimeTypeInfo.TIME;
/** Returns type information for {@link java.sql.Timestamp}. Supports a null value. */
public static final TypeInformation<Timestamp> SQL_TIMESTAMP = SqlTimeTypeInfo.TIMESTAMP;
/** Returns type information for {@link java.time.LocalDate}. Supports a null value. */
public static final TypeInformation<LocalDate> LOCAL_DATE = LocalTimeTypeInfo.LOCAL_DATE;
/** Returns type information for {@link java.time.LocalTime}. Supports a null value. */
public static final TypeInformation<LocalTime> LOCAL_TIME = LocalTimeTypeInfo.LOCAL_TIME;
/** Returns type information for {@link java.time.LocalDateTime}. Supports a null value. */
public static final TypeInformation<LocalDateTime> LOCAL_DATE_TIME =
LocalTimeTypeInfo.LOCAL_DATE_TIME;
/** Returns type information for {@link java.time.Instant}. Supports a null value. */
public static final TypeInformation<Instant> INSTANT = BasicTypeInfo.INSTANT_TYPE_INFO;
public static final TypeInformation<Variant> VARIANT = VariantTypeInfo.INSTANCE;
// CHECKSTYLE.OFF: MethodName
/**
* Returns type information for {@link org.apache.flink.types.Row} with fields of the given
* types. A row itself must not be null.
*
* <p>A row is a fixed-length, null-aware composite type for storing multiple values in a
* deterministic field order. Every field can be null regardless of the field's type. The type
* of row fields cannot be automatically inferred; therefore, it is required to provide type
* information whenever a row is produced.
*
* <p>The schema of rows can have up to <code>Integer.MAX_VALUE</code> fields, however, all row
* instances must strictly adhere to the schema defined by the type info.
*
* <p>This method generates type information with fields of the given types; the fields have the
* default names (f0, f1, f2 ..).
*
* @param types The types of the row fields, e.g., Types.STRING, Types.INT
*/
public static TypeInformation<Row> ROW(TypeInformation<?>... types) {
return new RowTypeInfo(types);
}
/**
* Returns type information for {@link org.apache.flink.types.Row} with fields of the given
* types and with given names. A row must not be null.
*
* <p>A row is a fixed-length, null-aware composite type for storing multiple values in a
* deterministic field order. Every field can be null independent of the field's type. The type
* of row fields cannot be automatically inferred; therefore, it is required to provide type
* information whenever a row is used.
*
* <p>The schema of rows can have up to <code>Integer.MAX_VALUE</code> fields, however, all row
* instances must strictly adhere to the schema defined by the type info.
*
* <p>Example use: {@code ROW_NAMED(new String[]{"name", "number"}, Types.STRING, Types.INT)}.
*
* @param fieldNames array of field names
* @param types array of field types
*/
public static TypeInformation<Row> ROW_NAMED(String[] fieldNames, TypeInformation<?>... types) {
return new RowTypeInfo(types, fieldNames);
}
/**
* Returns type information for subclasses of Flink's {@link
* org.apache.flink.api.java.tuple.Tuple} (namely {@link org.apache.flink.api.java.tuple.Tuple0}
* till {@link org.apache.flink.api.java.tuple.Tuple25}) with fields of the given types. A tuple
* must not be null.
*
* <p>A tuple is a fixed-length composite type for storing multiple values in a deterministic
* field order. Fields of a tuple are typed. Tuples are the most efficient composite type; a
* tuple does not support null-valued fields unless the type of the field supports nullability.
*
* @param types The types of the tuple fields, e.g., Types.STRING, Types.INT
*/
public static <T extends Tuple> TypeInformation<T> TUPLE(TypeInformation<?>... types) {
return new TupleTypeInfo<>(types);
}
/**
* Returns type information for typed subclasses of Flink's {@link
* org.apache.flink.api.java.tuple.Tuple}. Typed subclassed are classes that extend {@link
* org.apache.flink.api.java.tuple.Tuple0} till {@link org.apache.flink.api.java.tuple.Tuple25}
* to provide types for all fields and might add additional getters and setters for better
* readability. Additional member fields must not be added. A tuple must not be null.
*
* <p>A tuple is a fixed-length composite type for storing multiple values in a deterministic
* field order. Fields of a tuple are typed. Tuples are the most efficient composite type; a
* tuple does not support null-valued fields unless the type of the field supports nullability.
*
* <p>The generic types for all fields of the tuple can be defined in a hierarchy of subclasses.
*
* <p>If Flink's type analyzer is unable to extract a tuple type information with type
* information for all fields, an {@link
* org.apache.flink.api.common.functions.InvalidTypesException} is thrown.
*
* <p>Example use:
*
* <pre>{@code
*
|
Types
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ReencryptionUpdater.java
|
{
"start": 5463,
"end": 20013
}
|
class ____ {
private final long inodeId;
private final EncryptedKeyVersion existingEdek;
private EncryptedKeyVersion edek = null;
FileEdekInfo(FSDirectory dir, INodeFile inode) throws IOException {
assert dir.hasReadLock();
Preconditions.checkNotNull(inode, "INodeFile is null");
inodeId = inode.getId();
final FileEncryptionInfo fei = FSDirEncryptionZoneOp
.getFileEncryptionInfo(dir, INodesInPath.fromINode(inode));
Preconditions.checkNotNull(fei,
"FileEncryptionInfo is null for " + inodeId);
existingEdek = EncryptedKeyVersion
.createForDecryption(fei.getKeyName(), fei.getEzKeyVersionName(),
fei.getIV(), fei.getEncryptedDataEncryptionKey());
}
long getInodeId() {
return inodeId;
}
EncryptedKeyVersion getExistingEdek() {
return existingEdek;
}
void setEdek(final EncryptedKeyVersion ekv) {
assert ekv != null;
edek = ekv;
}
}
@VisibleForTesting
synchronized void pauseForTesting() {
shouldPauseForTesting = true;
LOG.info("Pausing re-encrypt updater for testing.");
notify();
}
@VisibleForTesting
synchronized void resumeForTesting() {
shouldPauseForTesting = false;
LOG.info("Resuming re-encrypt updater for testing.");
notify();
}
@VisibleForTesting
void pauseForTestingAfterNthCheckpoint(final long zoneId, final int count) {
assert pauseAfterNthCheckpoint == 0;
pauseAfterNthCheckpoint = count;
pauseZoneId = zoneId;
}
@VisibleForTesting
boolean isRunning() {
return isRunning;
}
private final FSDirectory dir;
private final CompletionService<ReencryptionTask> batchService;
private final ReencryptionHandler handler;
ReencryptionUpdater(final FSDirectory fsd,
final CompletionService<ReencryptionTask> service,
final ReencryptionHandler rh, final Configuration conf) {
dir = fsd;
batchService = service;
handler = rh;
this.throttleLimitRatio =
conf.getDouble(DFS_NAMENODE_REENCRYPT_THROTTLE_LIMIT_UPDATER_RATIO_KEY,
DFS_NAMENODE_REENCRYPT_THROTTLE_LIMIT_UPDATER_RATIO_DEFAULT);
Preconditions.checkArgument(throttleLimitRatio > 0.0f,
DFS_NAMENODE_REENCRYPT_THROTTLE_LIMIT_UPDATER_RATIO_KEY
+ " is not positive.");
}
/**
* Called by the submission thread to indicate all tasks have been submitted.
* If this is called but no tasks has been submitted, the re-encryption is
* considered complete.
*
* @param zoneId Id of the zone inode.
* @throws IOException
* @throws InterruptedException
*/
void markZoneSubmissionDone(final long zoneId)
throws IOException, InterruptedException {
final ZoneSubmissionTracker tracker = handler.getTracker(zoneId);
if (tracker != null && !tracker.getTasks().isEmpty()) {
tracker.submissionDone = true;
} else {
// Caller thinks submission is done, but no tasks submitted - meaning
// no files in the EZ need to be re-encrypted. Complete directly.
handler.addDummyTracker(zoneId, tracker);
}
}
@Override
public void run() {
isRunning = true;
throttleTimerAll.start();
while (true) {
try {
// Assuming single-threaded updater.
takeAndProcessTasks();
} catch (InterruptedException ie) {
LOG.warn("Re-encryption updater thread interrupted. Exiting.");
Thread.currentThread().interrupt();
isRunning = false;
return;
} catch (IOException | CancellationException e) {
LOG.warn("Re-encryption updater thread exception.", e);
} catch (Throwable t) {
LOG.error("Re-encryption updater thread exiting.", t);
isRunning = false;
return;
}
}
}
/**
* Process a completed ReencryptionTask. Each inode id is resolved to an INode
* object, skip if the inode is deleted.
* <p>
* Only file xattr is updated by this method. Re-encryption progress is not
* updated.
*
* @param zoneNodePath full path of the EZ inode.
* @param task the completed task.
* @throws IOException
* @throws InterruptedException
*/
private void processTaskEntries(final String zoneNodePath,
final ReencryptionTask task) throws IOException, InterruptedException {
assert dir.hasWriteLock();
if (!task.batch.isEmpty() && task.numFailures == 0) {
LOG.debug(
"Updating file xattrs for re-encrypting zone {}," + " starting at {}",
zoneNodePath, task.batch.getFirstFilePath());
final int batchSize = task.batch.size();
for (Iterator<FileEdekInfo> it = task.batch.getBatch().iterator();
it.hasNext();) {
FileEdekInfo entry = it.next();
// resolve the inode again, and skip if it's doesn't exist
LOG.trace("Updating {} for re-encryption.", entry.getInodeId());
final INode inode = dir.getInode(entry.getInodeId());
if (inode == null) {
LOG.debug("INode {} doesn't exist, skipping re-encrypt.",
entry.getInodeId());
// also remove from batch so later it's not saved.
it.remove();
continue;
}
// Cautiously check file encryption info, and only update if we're sure
// it's still using the same edek.
Preconditions.checkNotNull(entry.edek);
final FileEncryptionInfo fei = FSDirEncryptionZoneOp
.getFileEncryptionInfo(dir, INodesInPath.fromINode(inode));
if (!fei.getKeyName().equals(entry.edek.getEncryptionKeyName())) {
LOG.debug("Inode {} EZ key changed, skipping re-encryption.",
entry.getInodeId());
it.remove();
continue;
}
if (fei.getEzKeyVersionName()
.equals(entry.edek.getEncryptionKeyVersionName())) {
LOG.debug(
"Inode {} EZ key version unchanged, skipping re-encryption.",
entry.getInodeId());
it.remove();
continue;
}
if (!Arrays.equals(fei.getEncryptedDataEncryptionKey(),
entry.existingEdek.getEncryptedKeyVersion().getMaterial())) {
LOG.debug("Inode {} existing edek changed, skipping re-encryption",
entry.getInodeId());
it.remove();
continue;
}
FileEncryptionInfo newFei = new FileEncryptionInfo(fei.getCipherSuite(),
fei.getCryptoProtocolVersion(),
entry.edek.getEncryptedKeyVersion().getMaterial(),
entry.edek.getEncryptedKeyIv(), fei.getKeyName(),
entry.edek.getEncryptionKeyVersionName());
final INodesInPath iip = INodesInPath.fromINode(inode);
FSDirEncryptionZoneOp
.setFileEncryptionInfo(dir, iip, newFei, XAttrSetFlag.REPLACE);
task.lastFile = iip.getPath();
++task.numFilesUpdated;
}
LOG.info("Updated xattrs on {}({}) files in zone {} for re-encryption,"
+ " starting:{}.", task.numFilesUpdated, batchSize,
zoneNodePath, task.batch.getFirstFilePath());
}
task.processed = true;
}
/**
* Iterate tasks for the given zone, and update progress accordingly. The
* checkpoint indicates all files before it are done re-encryption, so it will
* be updated to the position where all tasks before are completed.
*
* @param zoneNode the EZ inode.
* @param tracker the zone submission tracker.
* @return the list containing the last checkpointed xattr. Empty if
* no checkpoint happened.
* @throws ExecutionException
* @throws IOException
* @throws InterruptedException
*/
private List<XAttr> processCheckpoints(final INode zoneNode,
final ZoneSubmissionTracker tracker)
throws ExecutionException, IOException, InterruptedException {
assert dir.hasWriteLock();
final long zoneId = zoneNode.getId();
final String zonePath = zoneNode.getFullPathName();
final ZoneReencryptionStatus status =
handler.getReencryptionStatus().getZoneStatus(zoneId);
assert status != null;
// always start from the beginning, because the checkpoint means all files
// before it are re-encrypted.
final LinkedList<Future> tasks = tracker.getTasks();
final List<XAttr> xAttrs = Lists.newArrayListWithCapacity(1);
ListIterator<Future> iter = tasks.listIterator();
synchronized (handler) {
while (iter.hasNext()) {
Future<ReencryptionTask> curr = iter.next();
if (curr.isCancelled()) {
break;
}
if (!curr.isDone() || !curr.get().processed) {
// still has earlier tasks not completed, skip here.
break;
}
ReencryptionTask task = curr.get();
LOG.debug("Updating re-encryption checkpoint with completed task."
+ " last: {} size:{}.", task.lastFile, task.batch.size());
assert zoneId == task.zoneId;
try {
final XAttr xattr = FSDirEncryptionZoneOp
.updateReencryptionProgress(dir, zoneNode, status, task.lastFile,
task.numFilesUpdated, task.numFailures);
xAttrs.clear();
xAttrs.add(xattr);
} catch (IOException ie) {
LOG.warn("Failed to update re-encrypted progress to xattr" +
" for zone {}", zonePath, ie);
++task.numFailures;
}
++tracker.numCheckpointed;
iter.remove();
}
}
if (tracker.isCompleted()) {
LOG.debug("Removed re-encryption tracker for zone {} because it completed"
+ " with {} tasks.", zonePath, tracker.numCheckpointed);
return handler.completeReencryption(zoneNode);
}
return xAttrs;
}
private void takeAndProcessTasks() throws Exception {
final Future<ReencryptionTask> completed = batchService.take();
throttle();
checkPauseForTesting();
if (completed.isCancelled()) {
// Ignore canceled zones. The cancellation is edit-logged by the handler.
LOG.debug("Skipped a canceled re-encryption task");
return;
}
final ReencryptionTask task = completed.get();
boolean shouldRetry;
do {
dir.getFSNamesystem().writeLock(RwLockMode.FS);
try {
throttleTimerLocked.start();
processTask(task);
shouldRetry = false;
} catch (RetriableException | SafeModeException re) {
// Keep retrying until succeed.
LOG.info("Exception when processing re-encryption task for zone {}, "
+ "retrying...", task.zoneId, re);
shouldRetry = true;
Thread.sleep(faultRetryInterval);
} catch (IOException ioe) {
LOG.warn("Failure processing re-encryption task for zone {}",
task.zoneId, ioe);
++task.numFailures;
task.processed = true;
shouldRetry = false;
} finally {
dir.getFSNamesystem().writeUnlock(RwLockMode.FS, "reencryptUpdater");
throttleTimerLocked.stop();
}
// logSync regardless, to prevent edit log buffer overflow triggering
// logSync inside FSN writelock.
dir.getEditLog().logSync();
} while (shouldRetry);
}
private void processTask(ReencryptionTask task)
throws InterruptedException, ExecutionException, IOException {
final List<XAttr> xAttrs;
final String zonePath;
dir.writeLock();
try {
handler.getTraverser().checkINodeReady(task.zoneId);
final INode zoneNode = dir.getInode(task.zoneId);
if (zoneNode == null) {
// ez removed.
return;
}
zonePath = zoneNode.getFullPathName();
LOG.info("Processing returned re-encryption task for zone {}({}), "
+ "batch size {}, start:{}", zonePath, task.zoneId,
task.batch.size(), task.batch.getFirstFilePath());
final ZoneSubmissionTracker tracker =
handler.getTracker(zoneNode.getId());
if (tracker == null) {
// re-encryption canceled.
LOG.info("Re-encryption was canceled.");
return;
}
tracker.numFutureDone++;
EncryptionFaultInjector.getInstance().reencryptUpdaterProcessOneTask();
processTaskEntries(zonePath, task);
EncryptionFaultInjector.getInstance().reencryptUpdaterProcessCheckpoint();
xAttrs = processCheckpoints(zoneNode, tracker);
} finally {
dir.writeUnlock();
}
FSDirEncryptionZoneOp.saveFileXAttrsForBatch(dir, task.batch.getBatch());
if (!xAttrs.isEmpty()) {
dir.getEditLog().logSetXAttrs(zonePath, xAttrs, false);
}
}
private synchronized void checkPauseForTesting() throws InterruptedException {
assert !dir.hasWriteLock();
assert !dir.getFSNamesystem().hasWriteLock(RwLockMode.FS);
if (pauseAfterNthCheckpoint != 0) {
ZoneSubmissionTracker tracker =
handler.unprotectedGetTracker(pauseZoneId);
if (tracker != null) {
if (tracker.numFutureDone == pauseAfterNthCheckpoint) {
shouldPauseForTesting = true;
pauseAfterNthCheckpoint = 0;
}
}
}
while (shouldPauseForTesting) {
LOG.info("Sleeping in the re-encryption updater for unit test.");
wait();
LOG.info("Continuing re-encryption updater after pausing.");
}
}
/**
* Throttles the ReencryptionUpdater to prevent from contending FSN/FSD write
* locks. This is done by the configuration.
*/
private void throttle() throws InterruptedException {
if (throttleLimitRatio >= 1.0) {
return;
}
final long expect = (long) (throttleTimerAll.now(TimeUnit.MILLISECONDS)
* throttleLimitRatio);
final long actual = throttleTimerLocked.now(TimeUnit.MILLISECONDS);
if (LOG.isDebugEnabled()) {
LOG.debug("Re-encryption updater throttling expect: {}, actual: {},"
+ " throttleTimerAll:{}", expect, actual,
throttleTimerAll.now(TimeUnit.MILLISECONDS));
}
if (expect - actual < 0) {
// in case throttleLimitHandlerRatio is very small, expect will be 0.
// so sleepMs should not be calculated from expect, to really meet the
// ratio. e.g. if ratio is 0.001, expect = 0 and actual = 1, sleepMs
// should be 1000 - throttleTimerAll.now()
final long sleepMs =
(long) (actual / throttleLimitRatio) - throttleTimerAll
.now(TimeUnit.MILLISECONDS);
LOG.debug("Throttling re-encryption, sleeping for {} ms", sleepMs);
Thread.sleep(sleepMs);
}
throttleTimerAll.reset().start();
throttleTimerLocked.reset();
}
}
|
FileEdekInfo
|
java
|
spring-projects__spring-framework
|
spring-context/src/main/java/org/springframework/jmx/export/assembler/AbstractMBeanInfoAssembler.java
|
{
"start": 9209,
"end": 9620
}
|
interface ____ the managed resource.
* @param managedBean the bean instance (might be an AOP proxy)
* @param beanKey the key associated with the MBean in the beans map
* of the {@code MBeanExporter}
* @return the operation metadata
* @throws JMException in case of errors
*/
protected abstract ModelMBeanOperationInfo[] getOperationInfo(Object managedBean, String beanKey)
throws JMException;
}
|
for
|
java
|
spring-projects__spring-boot
|
module/spring-boot-hibernate/src/test/java/org/springframework/boot/hibernate/autoconfigure/HibernateJpaAutoConfigurationTests.java
|
{
"start": 49088,
"end": 49855
}
|
class ____ extends JpaTransactionManager {
}
@Target(ElementType.METHOD)
@Retention(RetentionPolicy.RUNTIME)
@WithResource(name = "META-INF/persistence.xml",
content = """
<?xml version="1.0" encoding="UTF-8"?>
<persistence version="2.0" xmlns="http://java.sun.com/xml/ns/persistence" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://java.sun.com/xml/ns/persistence https://java.sun.com/xml/ns/persistence/persistence_2_0.xsd">
<persistence-unit name="manually-configured">
<class>org.springframework.boot.jpa.autoconfigure.test.city.City</class>
<exclude-unlisted-classes>true</exclude-unlisted-classes>
</persistence-unit>
</persistence>
""")
protected @
|
CustomJpaTransactionManager
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/common/util/ReleasableDoubleArray.java
|
{
"start": 763,
"end": 2558
}
|
class ____ implements DoubleArray {
private static final long SHALLOW_SIZE = RamUsageEstimator.shallowSizeOfInstance(ReleasableDoubleArray.class);
private final ReleasableBytesReference ref;
ReleasableDoubleArray(StreamInput in) throws IOException {
ref = in.readReleasableBytesReference();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeBytesReference(ref);
}
@Override
public long size() {
return ref.length() / Long.BYTES;
}
@Override
public double get(long index) {
if (index > Integer.MAX_VALUE / Long.BYTES) {
// We can't serialize messages longer than 2gb anyway
throw new ArrayIndexOutOfBoundsException();
}
return ref.getDoubleLE((int) index * Long.BYTES);
}
@Override
public void set(long index, double value) {
throw new UnsupportedOperationException();
}
@Override
public double increment(long index, double inc) {
throw new UnsupportedOperationException();
}
@Override
public void fill(long fromIndex, long toIndex, double value) {
throw new UnsupportedOperationException();
}
@Override
public void fillWith(StreamInput in) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public void set(long index, byte[] buf, int offset, int len) {
throw new UnsupportedOperationException();
}
@Override
public long ramBytesUsed() {
/*
* If we return the size of the buffer that we've sliced
* we're likely to double count things.
*/
return SHALLOW_SIZE;
}
@Override
public void close() {
ref.decRef();
}
}
|
ReleasableDoubleArray
|
java
|
grpc__grpc-java
|
api/src/test/java/io/grpc/StatusExceptionTest.java
|
{
"start": 842,
"end": 1718
}
|
class ____ {
@Test
public void normalCtorKeepsStack() {
StackTraceElement[] trace =
new StatusException(Status.CANCELLED, null) {}.getStackTrace();
assertThat(trace).isNotEmpty();
}
@Test
public void extendPreservesStack() {
StackTraceElement[] trace = new StatusException(Status.CANCELLED) {}.getStackTrace();
assertThat(trace).isNotEmpty();
}
@Test
public void extendAndOverridePreservesStack() {
final StackTraceElement element = new StackTraceElement("a", "b", "c", 4);
StatusException exception = new StatusException(Status.CANCELLED, new Metadata()) {
@Override
public synchronized Throwable fillInStackTrace() {
setStackTrace(new StackTraceElement[]{element});
return this;
}
};
assertThat(exception.getStackTrace()).asList().containsExactly(element);
}
}
|
StatusExceptionTest
|
java
|
quarkusio__quarkus
|
integration-tests/gradle/src/main/resources/avro-simple-project/src/main/java/org/acme/quarkus/sample/HelloResource.java
|
{
"start": 298,
"end": 503
}
|
class ____ {
@GET
@Produces(MediaType.TEXT_PLAIN)
public String providerValues() {
return Arrays.stream(Provider.values()).map(String::valueOf).collect(joining(","));
}
}
|
HelloResource
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/test/java/org/springframework/boot/context/properties/ConfigurationPropertiesTests.java
|
{
"start": 81187,
"end": 81546
}
|
class ____ {
final @Nullable DataSizeProperties dataSizeProperties;
OtherInjectedProperties(ObjectProvider<DataSizeProperties> dataSizeProperties) {
this.dataSizeProperties = dataSizeProperties.getIfUnique();
}
}
@Configuration(proxyBeanMethods = false)
@EnableConfigurationProperties(OtherInjectedProperties.class)
static
|
OtherInjectedProperties
|
java
|
apache__camel
|
dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/SchematronComponentBuilderFactory.java
|
{
"start": 3994,
"end": 4794
}
|
class ____
extends AbstractComponentBuilder<SchematronComponent>
implements SchematronComponentBuilder {
@Override
protected SchematronComponent buildConcreteComponent() {
return new SchematronComponent();
}
@Override
protected boolean setPropertyOnComponent(
Component component,
String name,
Object value) {
switch (name) {
case "lazyStartProducer": ((SchematronComponent) component).setLazyStartProducer((boolean) value); return true;
case "autowiredEnabled": ((SchematronComponent) component).setAutowiredEnabled((boolean) value); return true;
default: return false;
}
}
}
}
|
SchematronComponentBuilderImpl
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/indices/SystemIndexDescriptor.java
|
{
"start": 3468,
"end": 7436
}
|
class ____, but because of the implementation,
* other regex operators probably work.
*
* <p>Sample index patterns that we want to handle:
* <ol>
* <li>{@code .system-*} - covers all index names beginning with ".system-".
* <li>{@code .system-[0-9]+} - covers all index names beginning with ".system-" and containing only one or more numerals after that
* <li>{@code .system-~(other-*)} - covers all system indices beginning with ".system-", except for those beginning with
* ".system-other-"
* </ol>
*
* <p>The descriptor defines which, if any, Elasticsearch products are expected to read or modify it with calls to the REST API.
* Requests that do not include the correct product header should, in most cases, generate deprecation warnings. The exception is for
* "net new" system index descriptors, described below.
*
* <p>The descriptor also provides names for the thread pools that Elasticsearch should use to read, search, or modify the descriptor’s
* indices.
*
* <p>A SystemIndexDescriptor may be one of several types (see {@link SystemIndexDescriptor.Type}). The four types come from two different
* distinctions. The first is between "internal" and "external" system indices. The second is between "managed and unmanaged" system
* indices. The "internal/external" distinction is simple. Access to internal system indices via standard index APIs is deprecated,
* and system features that use internal system indices should provide any necessary APIs for operating on their state. An "external"
* system index, on the other hand, does not deprecate the use of standard index APIs.
*
* <p>The distinction between managed and unmanaged is simple in theory but not observed very well in our code. A "managed" system index
* is one whose settings, mappings, and aliases are defined by the SystemIndexDescriptor and managed by Elasticsearch. Many of the
* fields in this class, when added, were meant to be used only by managed system indices, and use of them should always be
* conditional on whether the system index is managed or not. However, we have not consistently enforced this, so our code may have
* inconsistent expectations about what fields will be defined for an unmanaged index. (In the future, we should refactor so that it
* is clear which fields are ignored by unmanaged system indices.)
*
* <p>A managed system index defines a "primary index" which is intended to be the main write index for the descriptor. The current
* behavior when creating a non-primary index is a little strange. A request to create a non-primary index with the Create Index
* API will fail. (See <a href="https://github.com/elastic/elasticsearch/pull/86707">PR #86707</a>) However, auto-creating the index by
* writing a document to it will succeed. (See <a href="https://github.com/elastic/elasticsearch/pull/77045">PR #77045</a>)
*
* <p>The mappings for managed system indices are automatically upgraded when all nodes in the cluster are compatible with the
* descriptor's mappings. See {@link SystemIndexMappingUpdateService} for details.
* When the mappings change add the previous index descriptors with
* {@link SystemIndexDescriptor.Builder#setPriorSystemIndexDescriptors(List)}. In a mixed cluster setting this enables auto creation
* of the index with compatible mappings.
*
* <p>We hope to remove the currently deprecated forms of access to system indices in a future release. A newly added system index with
* no backwards-compatibility requirements may opt into our desired behavior by setting isNetNew to true. A "net new system index"
* strictly enforces its allowed product origins, and cannot be accessed by any REST API request that lacks a correct product header.
* A system index that is fully internal to Elasticsearch will not allow any product origins; such an index is fully "locked down,"
* and in general can only be changed by restoring feature states from snapshots.
*/
public
|
operators
|
java
|
spring-projects__spring-framework
|
spring-web/src/main/java/org/springframework/web/util/ContentCachingResponseWrapper.java
|
{
"start": 8355,
"end": 8960
}
|
class ____ extends ServletOutputStream {
private final ServletOutputStream os;
public ResponseServletOutputStream(ServletOutputStream os) {
this.os = os;
}
@Override
public void write(int b) throws IOException {
content.write(b);
}
@Override
public void write(byte[] b, int off, int len) throws IOException {
content.write(b, off, len);
}
@Override
public boolean isReady() {
return this.os.isReady();
}
@Override
public void setWriteListener(WriteListener writeListener) {
this.os.setWriteListener(writeListener);
}
}
private
|
ResponseServletOutputStream
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/internal/ComparatorBasedComparisonStrategy_toString_Test.java
|
{
"start": 967,
"end": 2247
}
|
class ____ extends AbstractTest_ComparatorBasedComparisonStrategy {
@Test
void toString_with_anonymous_comparator() {
ComparatorBasedComparisonStrategy lengthComparisonStrategy = new ComparatorBasedComparisonStrategy(new Comparator<String>() {
@Override
public int compare(String s1, String s2) {
return s1.length() - s2.length();
}
});
assertThat(lengthComparisonStrategy).hasToString("'anonymous comparator class'");
}
@Test
void toString_with_anonymous_comparator_with_description() {
// GIVEN
ComparatorBasedComparisonStrategy namedAnonymousComparator = new ComparatorBasedComparisonStrategy(new Comparator<String>() {
@Override
public int compare(String s1, String s2) {
return s1.compareTo(s2);
}
}, "Mr. Comparator");
// THEN
assertThat(namedAnonymousComparator).hasToString("'Mr. Comparator'");
}
@Test
void toString_with_non_anonymous_comparator() {
assertThat(caseInsensitiveComparisonStrategy).hasToString("CaseInsensitiveStringComparator");
}
@Test
void toString_with_provided_comparator_name() {
assertThat(describedComparisonStrategy).hasToString("'Case-insensitive comparator for String class'");
}
}
|
ComparatorBasedComparisonStrategy_toString_Test
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/deser/filter/UnknownPropertyDeserTest.java
|
{
"start": 3574,
"end": 10686
}
|
class ____ {
public String aProperty;
}
/*
/**********************************************************
/* Test methods
/**********************************************************
*/
private final ObjectMapper MAPPER = newJsonMapper();
final static String JSON_UNKNOWN_FIELD = "{ \"a\" : 1, \"foo\" : [ 1, 2, 3], \"b\" : -1 }";
/**
* By default we should just get an exception if an unknown property
* is encountered
*/
@Test
public void testUnknownHandlingDefault() throws Exception
{
try {
MAPPER.readerFor(TestBean.class)
.with(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES)
.readValue(JSON_UNKNOWN_FIELD);
fail("Should not pass");
} catch (UnrecognizedPropertyException jex) {
verifyException(jex, "Unrecognized property \"foo\"");
}
}
/**
* Test that verifies that it is possible to ignore unknown properties using
* {@link DeserializationProblemHandler}.
*/
@Test
public void testUnknownHandlingIgnoreWithHandler() throws Exception
{
ObjectMapper mapper = jsonMapperBuilder()
.addHandler(new MyHandler())
.build();
TestBean result = mapper.readValue(JSON_UNKNOWN_FIELD, TestBean.class);
assertNotNull(result);
assertEquals(1, result._a);
assertEquals(-1, result._b);
assertEquals("foo:START_ARRAY", result._unknown);
}
/**
* Test that verifies that it is possible to ignore unknown properties using
* {@link DeserializationProblemHandler} and an ObjectReader.
*/
@Test
public void testUnknownHandlingIgnoreWithHandlerAndObjectReader() throws Exception
{
ObjectMapper mapper = newJsonMapper();
TestBean result = mapper.readerFor(TestBean.class).withHandler(new MyHandler())
.readValue(JSON_UNKNOWN_FIELD);
assertNotNull(result);
assertEquals(1, result._a);
assertEquals(-1, result._b);
assertEquals("foo:START_ARRAY", result._unknown);
}
/**
* Test for checking that it is also possible to simply suppress
* error reporting for unknown properties.
*/
@Test
public void testUnknownHandlingIgnoreWithFeature() throws Exception
{
ObjectMapper mapper = jsonMapperBuilder()
.disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES)
.build();
TestBean result = mapper.readValue(JSON_UNKNOWN_FIELD, TestBean.class);
assertNotNull(result);
assertEquals(1, result._a);
assertNull(result._unknown);
assertEquals(-1, result._b);
}
@Test
public void testWithClassIgnore() throws Exception
{
IgnoreSome result = MAPPER.readValue("{ \"a\":1,\"b\":2,\"c\":\"x\",\"d\":\"y\"}",
IgnoreSome.class);
// first: should deserialize 2 of properties normally
assertEquals(1, result.a);
assertEquals("y", result.d());
// and not take other 2
assertEquals(0, result.b);
assertNull(result.c());
}
@Test
public void testClassIgnoreWithMap() throws Exception
{
// Let's actually use incompatible types for "a" and "d"; should not matter when ignored
IgnoreMap result = MAPPER.readValue(
"{ \"a\":[ 1],\n"
+"\"b\":2,\n"
+"\"c\": \"x\",\n"
+"\"d\":false }", IgnoreMap.class);
assertEquals(2, result.size());
Object ob = result.get("b");
assertEquals(Integer.class, ob.getClass());
assertEquals(Integer.valueOf(2), ob);
assertEquals("x", result.get("c"));
assertFalse(result.containsKey("a"));
assertFalse(result.containsKey("d"));
}
@Test
public void testClassWithIgnoreUnknown() throws Exception
{
IgnoreUnknown result = MAPPER.readValue
("{\"b\":3,\"c\":[1,2],\"x\":{ },\"a\":-3}", IgnoreUnknown.class);
assertEquals(-3, result.a);
}
@Test
public void testAnySetterWithFailOnUnknownDisabled() throws Exception
{
IgnoreUnknownAnySetter value = MAPPER.readValue("{\"x\":\"y\", \"a\":\"b\"}", IgnoreUnknownAnySetter.class);
assertNotNull(value);
assertEquals(2, value.props.size());
}
@Test
public void testUnwrappedWithFailOnUnknownDisabled() throws Exception
{
IgnoreUnknownUnwrapped value = MAPPER.readValue("{\"a\":1, \"b\":2}", IgnoreUnknownUnwrapped.class);
assertNotNull(value);
assertEquals(1, value.child.a);
assertEquals(2, value.child.b);
}
/**
* Test that verifies that use of {@link JsonIgnore} will add implicit
* skipping of matching properties.
*/
@Test
public void testClassWithUnknownAndIgnore() throws Exception
{
// should be ok: "a" and "b" ignored, "c" mapped:
ImplicitIgnores result = MAPPER.readValue
("{\"a\":1,\"b\":2,\"c\":3 }", ImplicitIgnores.class);
assertEquals(3, result.c);
// but "d" is not defined, so should still error
try {
MAPPER.readValue("{\"a\":1,\"b\":2,\"c\":3,\"d\":4 }", ImplicitIgnores.class);
} catch (UnrecognizedPropertyException e) {
verifyException(e, "Unrecognized property \"d\"");
}
}
@Test
public void testPropertyIgnoral() throws Exception
{
XYZWrapper1 result = MAPPER.readValue("{\"value\":{\"y\":2,\"x\":1,\"z\":3}}", XYZWrapper1.class);
assertEquals(2, result.value.y);
assertEquals(3, result.value.z);
}
@Test
public void testPropertyIgnoralWithClass() throws Exception
{
XYZWrapper2 result = MAPPER.readValue("{\"value\":{\"y\":2,\"x\":1,\"z\":3}}",
XYZWrapper2.class);
assertEquals(1, result.value.x);
}
@Test
public void testPropertyIgnoralForMap() throws Exception
{
MapWithoutX result = MAPPER.readValue("{\"values\":{\"x\":1,\"y\":2}}", MapWithoutX.class);
assertNotNull(result.values);
assertEquals(1, result.values.size());
assertEquals(Integer.valueOf(2), result.values.get("y"));
}
@Test
public void testIssue987() throws Exception
{
ObjectMapper jsonMapper = jsonMapperBuilder()
.addHandler(new DeserializationProblemHandler() {
@Override
public boolean handleUnknownProperty(DeserializationContext ctxt, JsonParser p,
ValueDeserializer<?> deserializer, Object beanOrClass, String propertyName) {
p.skipChildren();
return true;
}
})
.build();
String input = "[{\"aProperty\":\"x\",\"unknown\":{\"unknown\":{}}}]";
List<Bean987> deserializedList = jsonMapper.readValue(input,
new TypeReference<List<Bean987>>() { });
assertEquals(1, deserializedList.size());
}
}
|
Bean987
|
java
|
mybatis__mybatis-3
|
src/main/java/org/apache/ibatis/datasource/pooled/PooledDataSourceFactory.java
|
{
"start": 809,
"end": 966
}
|
class ____ extends UnpooledDataSourceFactory {
public PooledDataSourceFactory() {
this.dataSource = new PooledDataSource();
}
}
|
PooledDataSourceFactory
|
java
|
mockito__mockito
|
mockito-integration-tests/inline-mocks-tests/src/test/java/org/mockitoinline/InitializationTest.java
|
{
"start": 578,
"end": 618
}
|
enum ____ {
VALUE
}
}
|
SampleEnum
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest-client/runtime/src/main/java/io/quarkus/rest/client/reactive/runtime/Constants.java
|
{
"start": 57,
"end": 136
}
|
class ____ {
public final static int DEFAULT_MAX_CHUNK_SIZE = 8096;
}
|
Constants
|
java
|
netty__netty
|
common/src/main/java/io/netty/util/LeakPresenceDetector.java
|
{
"start": 14859,
"end": 15024
}
|
class ____ extends IllegalStateException {
public AllocationProhibitedException(String s) {
super(s);
}
}
}
|
AllocationProhibitedException
|
java
|
spring-projects__spring-framework
|
spring-test/src/main/java/org/springframework/test/json/AbstractJsonContentAssert.java
|
{
"start": 2588,
"end": 18397
}
|
class ____<SELF extends AbstractJsonContentAssert<SELF>>
extends AbstractObjectAssert<SELF, JsonContent> {
private static final Failures failures = Failures.instance();
private final @Nullable JsonConverterDelegate converterDelegate;
private @Nullable Class<?> resourceLoadClass;
private @Nullable Charset charset;
private JsonLoader jsonLoader;
/**
* Create an assert for the given JSON document.
* @param actual the JSON document to assert
* @param selfType the implementation type of this assert
*/
protected AbstractJsonContentAssert(@Nullable JsonContent actual, Class<?> selfType) {
super(actual, selfType);
this.converterDelegate = (actual != null ? actual.getJsonConverterDelegate() : null);
this.jsonLoader = new JsonLoader(null, null);
as("JSON content");
}
/**
* Verify that the actual value can be converted to an instance of the
* given {@code target}, and produce a new {@linkplain AbstractObjectAssert
* assertion} object narrowed to that type.
* @param target the {@linkplain Class type} to convert the actual value to
*/
public <T> AbstractObjectAssert<?, T> convertTo(Class<T> target) {
isNotNull();
T value = convertToTargetType(target);
return Assertions.assertThat(value);
}
/**
* Verify that the actual value can be converted to an instance of the type
* defined by the given {@link AssertFactory} and return a new Assert narrowed
* to that type.
* <p>{@link InstanceOfAssertFactories} provides static factories for all the
* types supported by {@link Assertions#assertThat}. Additional factories can
* be created by implementing {@link AssertFactory}.
* <p>Example: <pre><code class="java">
* // Check that the JSON document is an array of 3 users
* assertThat(json).convertTo(InstanceOfAssertFactories.list(User.class))
* hasSize(3); // ListAssert of User
* </code></pre>
* @param assertFactory the {@link AssertFactory} to use to produce a narrowed
* Assert for the type that it defines.
*/
public <ASSERT extends AbstractAssert<?, ?>> ASSERT convertTo(AssertFactory<?, ASSERT> assertFactory) {
isNotNull();
return assertFactory.createAssert(this::convertToTargetType);
}
private <T> T convertToTargetType(Type targetType) {
String json = this.actual.getJson();
if (this.converterDelegate == null) {
throw new IllegalStateException(
"No JSON message converter available to convert %s".formatted(json));
}
try {
return this.converterDelegate.read(json, ResolvableType.forType(targetType));
}
catch (Exception ex) {
throw failure(new ValueProcessingFailed(json,
"To convert successfully to:%n %s%nBut it failed:%n %s%n".formatted(
targetType.getTypeName(), ex.getMessage())));
}
}
// JsonPath support
/**
* Verify that the given JSON {@code path} is present, and extract the JSON
* value for further {@linkplain JsonPathValueAssert assertions}.
* @param path the {@link JsonPath} expression
* @see #hasPathSatisfying(String, Consumer)
*/
public JsonPathValueAssert extractingPath(String path) {
Object value = new JsonPathValue(path).getValue();
return new JsonPathValueAssert(value, path, this.converterDelegate);
}
/**
* Verify that the given JSON {@code path} is present with a JSON value
* satisfying the given {@code valueRequirements}.
* @param path the {@link JsonPath} expression
* @param valueRequirements a {@link Consumer} of the assertion object
*/
public SELF hasPathSatisfying(String path, Consumer<AssertProvider<JsonPathValueAssert>> valueRequirements) {
Object value = new JsonPathValue(path).assertHasPath();
JsonPathValueAssert valueAssert = new JsonPathValueAssert(value, path, this.converterDelegate);
valueRequirements.accept(() -> valueAssert);
return this.myself;
}
/**
* Verify that the given JSON {@code path} matches. For paths with an
* operator, this validates that the path expression is valid, but does not
* validate that it yield any results.
* @param path the {@link JsonPath} expression
*/
public SELF hasPath(String path) {
new JsonPathValue(path).assertHasPath();
return this.myself;
}
/**
* Verify that the given JSON {@code path} does not match.
* @param path the {@link JsonPath} expression
*/
public SELF doesNotHavePath(String path) {
new JsonPathValue(path).assertDoesNotHavePath();
return this.myself;
}
// JsonAssert support
/**
* Verify that the actual value is {@linkplain JsonCompareMode#STRICT strictly}
* equal to the given JSON. The {@code expected} value can contain the JSON
* itself or, if it ends with {@code .json}, the name of a resource to be
* loaded from the classpath.
* @param expected the expected JSON or the name of a resource containing
* the expected JSON
* @see #isEqualTo(CharSequence, JsonCompareMode)
*/
public SELF isEqualTo(@Nullable CharSequence expected) {
return isEqualTo(expected, JsonCompareMode.STRICT);
}
/**
* Verify that the actual value is equal to the given JSON. The
* {@code expected} value can contain the JSON itself or, if it ends with
* {@code .json}, the name of a resource to be loaded from the classpath.
* @param expected the expected JSON or the name of a resource containing
* the expected JSON
* @param compareMode the compare mode used when checking
*/
public SELF isEqualTo(@Nullable CharSequence expected, JsonCompareMode compareMode) {
String expectedJson = this.jsonLoader.getJson(expected);
return assertIsMatch(compare(expectedJson, compareMode));
}
/**
* Verify that the actual value is equal to the given JSON {@link Resource}.
* <p>The resource abstraction allows to provide several input types:
* <ul>
* <li>a {@code byte} array, using {@link ByteArrayResource}</li>
* <li>a {@code classpath} resource, using {@link ClassPathResource}</li>
* <li>a {@link File} or {@link Path}, using {@link FileSystemResource}</li>
* <li>an {@link InputStream}, using {@link InputStreamResource}</li>
* </ul>
* @param expected a resource containing the expected JSON
* @param compareMode the compare mode used when checking
*/
public SELF isEqualTo(Resource expected, JsonCompareMode compareMode) {
String expectedJson = this.jsonLoader.getJson(expected);
return assertIsMatch(compare(expectedJson, compareMode));
}
/**
* Verify that the actual value is equal to the given JSON. The
* {@code expected} value can contain the JSON itself or, if it ends with
* {@code .json}, the name of a resource to be loaded from the classpath.
* @param expected the expected JSON or the name of a resource containing
* the expected JSON
* @param comparator the comparator used when checking
*/
public SELF isEqualTo(@Nullable CharSequence expected, JsonComparator comparator) {
String expectedJson = this.jsonLoader.getJson(expected);
return assertIsMatch(compare(expectedJson, comparator));
}
/**
* Verify that the actual value is equal to the given JSON {@link Resource}.
* <p>The resource abstraction allows to provide several input types:
* <ul>
* <li>a {@code byte} array, using {@link ByteArrayResource}</li>
* <li>a {@code classpath} resource, using {@link ClassPathResource}</li>
* <li>a {@link File} or {@link Path}, using {@link FileSystemResource}</li>
* <li>an {@link InputStream}, using {@link InputStreamResource}</li>
* </ul>
* @param expected a resource containing the expected JSON
* @param comparator the comparator used when checking
*/
public SELF isEqualTo(Resource expected, JsonComparator comparator) {
String expectedJson = this.jsonLoader.getJson(expected);
return assertIsMatch(compare(expectedJson, comparator));
}
/**
* Verify that the actual value is {@link JsonCompareMode#LENIENT leniently}
* equal to the given JSON. The {@code expected} value can contain the JSON
* itself or, if it ends with {@code .json}, the name of a resource to be
* loaded from the classpath.
* @param expected the expected JSON or the name of a resource containing
* the expected JSON
*/
public SELF isLenientlyEqualTo(@Nullable CharSequence expected) {
return isEqualTo(expected, JsonCompareMode.LENIENT);
}
/**
* Verify that the actual value is {@link JsonCompareMode#LENIENT leniently}
* equal to the given JSON {@link Resource}.
* <p>The resource abstraction allows to provide several input types:
* <ul>
* <li>a {@code byte} array, using {@link ByteArrayResource}</li>
* <li>a {@code classpath} resource, using {@link ClassPathResource}</li>
* <li>a {@link File} or {@link Path}, using {@link FileSystemResource}</li>
* <li>an {@link InputStream}, using {@link InputStreamResource}</li>
* </ul>
* @param expected a resource containing the expected JSON
*/
public SELF isLenientlyEqualTo(Resource expected) {
return isEqualTo(expected, JsonCompareMode.LENIENT);
}
/**
* Verify that the actual value is {@link JsonCompareMode#STRICT strictly}
* equal to the given JSON. The {@code expected} value can contain the JSON
* itself or, if it ends with {@code .json}, the name of a resource to be
* loaded from the classpath.
* @param expected the expected JSON or the name of a resource containing
* the expected JSON
*/
public SELF isStrictlyEqualTo(@Nullable CharSequence expected) {
return isEqualTo(expected, JsonCompareMode.STRICT);
}
/**
* Verify that the actual value is {@link JsonCompareMode#STRICT strictly}
* equal to the given JSON {@link Resource}.
* <p>The resource abstraction allows to provide several input types:
* <ul>
* <li>a {@code byte} array, using {@link ByteArrayResource}</li>
* <li>a {@code classpath} resource, using {@link ClassPathResource}</li>
* <li>a {@link File} or {@link Path}, using {@link FileSystemResource}</li>
* <li>an {@link InputStream}, using {@link InputStreamResource}</li>
* </ul>
* @param expected a resource containing the expected JSON
*/
public SELF isStrictlyEqualTo(Resource expected) {
return isEqualTo(expected, JsonCompareMode.STRICT);
}
/**
* Verify that the actual value is {@linkplain JsonCompareMode#STRICT strictly}
* not equal to the given JSON. The {@code expected} value can contain the
* JSON itself or, if it ends with {@code .json}, the name of a resource to
* be loaded from the classpath.
* @param expected the expected JSON or the name of a resource containing
* the expected JSON
* @see #isNotEqualTo(CharSequence, JsonCompareMode)
*/
public SELF isNotEqualTo(@Nullable CharSequence expected) {
return isNotEqualTo(expected, JsonCompareMode.STRICT);
}
/**
* Verify that the actual value is not equal to the given JSON. The
* {@code expected} value can contain the JSON itself or, if it ends with
* {@code .json}, the name of a resource to be loaded from the classpath.
* @param expected the expected JSON or the name of a resource containing
* the expected JSON
* @param compareMode the compare mode used when checking
*/
public SELF isNotEqualTo(@Nullable CharSequence expected, JsonCompareMode compareMode) {
String expectedJson = this.jsonLoader.getJson(expected);
return assertIsMismatch(compare(expectedJson, compareMode));
}
/**
* Verify that the actual value is not equal to the given JSON {@link Resource}.
* <p>The resource abstraction allows to provide several input types:
* <ul>
* <li>a {@code byte} array, using {@link ByteArrayResource}</li>
* <li>a {@code classpath} resource, using {@link ClassPathResource}</li>
* <li>a {@link File} or {@link Path}, using {@link FileSystemResource}</li>
* <li>an {@link InputStream}, using {@link InputStreamResource}</li>
* </ul>
* @param expected a resource containing the expected JSON
* @param compareMode the compare mode used when checking
*/
public SELF isNotEqualTo(Resource expected, JsonCompareMode compareMode) {
String expectedJson = this.jsonLoader.getJson(expected);
return assertIsMismatch(compare(expectedJson, compareMode));
}
/**
* Verify that the actual value is not equal to the given JSON. The
* {@code expected} value can contain the JSON itself or, if it ends with
* {@code .json}, the name of a resource to be loaded from the classpath.
* @param expected the expected JSON or the name of a resource containing
* the expected JSON
* @param comparator the comparator used when checking
*/
public SELF isNotEqualTo(@Nullable CharSequence expected, JsonComparator comparator) {
String expectedJson = this.jsonLoader.getJson(expected);
return assertIsMismatch(compare(expectedJson, comparator));
}
/**
* Verify that the actual value is not equal to the given JSON {@link Resource}.
* <p>The resource abstraction allows to provide several input types:
* <ul>
* <li>a {@code byte} array, using {@link ByteArrayResource}</li>
* <li>a {@code classpath} resource, using {@link ClassPathResource}</li>
* <li>a {@link File} or {@link Path}, using {@link FileSystemResource}</li>
* <li>an {@link InputStream}, using {@link InputStreamResource}</li>
* </ul>
* @param expected a resource containing the expected JSON
* @param comparator the comparator used when checking
*/
public SELF isNotEqualTo(Resource expected, JsonComparator comparator) {
String expectedJson = this.jsonLoader.getJson(expected);
return assertIsMismatch(compare(expectedJson, comparator));
}
/**
* Verify that the actual value is not {@link JsonCompareMode#LENIENT
* leniently} equal to the given JSON. The {@code expected} value can
* contain the JSON itself or, if it ends with {@code .json}, the name of a
* resource to be loaded from the classpath.
* @param expected the expected JSON or the name of a resource containing
* the expected JSON
*/
public SELF isNotLenientlyEqualTo(@Nullable CharSequence expected) {
return isNotEqualTo(expected, JsonCompareMode.LENIENT);
}
/**
* Verify that the actual value is not {@link JsonCompareMode#LENIENT
* leniently} equal to the given JSON {@link Resource}.
* <p>The resource abstraction allows to provide several input types:
* <ul>
* <li>a {@code byte} array, using {@link ByteArrayResource}</li>
* <li>a {@code classpath} resource, using {@link ClassPathResource}</li>
* <li>a {@link File} or {@link Path}, using {@link FileSystemResource}</li>
* <li>an {@link InputStream}, using {@link InputStreamResource}</li>
* </ul>
* @param expected a resource containing the expected JSON
*/
public SELF isNotLenientlyEqualTo(Resource expected) {
return isNotEqualTo(expected, JsonCompareMode.LENIENT);
}
/**
* Verify that the actual value is not {@link JsonCompareMode#STRICT
* strictly} equal to the given JSON. The {@code expected} value can
* contain the JSON itself or, if it ends with {@code .json}, the name of a
* resource to be loaded from the classpath.
* @param expected the expected JSON or the name of a resource containing
* the expected JSON
*/
public SELF isNotStrictlyEqualTo(@Nullable CharSequence expected) {
return isNotEqualTo(expected, JsonCompareMode.STRICT);
}
/**
* Verify that the actual value is not {@link JsonCompareMode#STRICT
* strictly} equal to the given JSON {@link Resource}.
* <p>The resource abstraction allows to provide several input types:
* <ul>
* <li>a {@code byte} array, using {@link ByteArrayResource}</li>
* <li>a {@code classpath} resource, using {@link ClassPathResource}</li>
* <li>a {@link File} or {@link Path}, using {@link FileSystemResource}</li>
* <li>an {@link InputStream}, using {@link InputStreamResource}</li>
* </ul>
* @param expected a resource containing the expected JSON
*/
public SELF isNotStrictlyEqualTo(Resource expected) {
return isNotEqualTo(expected, JsonCompareMode.STRICT);
}
/**
* Override the
|
AbstractJsonContentAssert
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetReader.java
|
{
"start": 5611,
"end": 10241
}
|
class ____ extends FilterDirectoryReader {
private final Query roleQuery;
private final DocumentSubsetBitsetCache bitsetCache;
DocumentSubsetDirectoryReader(final DirectoryReader in, final DocumentSubsetBitsetCache bitsetCache, final Query roleQuery)
throws IOException {
super(in, new SubReaderWrapper() {
@Override
public LeafReader wrap(LeafReader reader) {
try {
return new DocumentSubsetReader(reader, bitsetCache, roleQuery);
} catch (Exception e) {
throw ExceptionsHelper.convertToElastic(e);
}
}
});
this.bitsetCache = bitsetCache;
this.roleQuery = roleQuery;
verifyNoOtherDocumentSubsetDirectoryReaderIsWrapped(in);
}
@Override
protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) throws IOException {
return new DocumentSubsetDirectoryReader(in, bitsetCache, roleQuery);
}
private static void verifyNoOtherDocumentSubsetDirectoryReaderIsWrapped(DirectoryReader reader) {
if (reader instanceof FilterDirectoryReader filterDirectoryReader) {
if (filterDirectoryReader instanceof DocumentSubsetDirectoryReader) {
throw new IllegalArgumentException(
LoggerMessageFormat.format("Can't wrap [{}] twice", DocumentSubsetDirectoryReader.class)
);
} else {
verifyNoOtherDocumentSubsetDirectoryReaderIsWrapped(filterDirectoryReader.getDelegate());
}
}
}
@Override
public CacheHelper getReaderCacheHelper() {
return in.getReaderCacheHelper();
}
}
private final DocumentSubsetBitsetCache bitsetCache;
private final Query roleQuery;
// we don't use a volatile here because the bitset is resolved before numDocs in the synchronized block
// so any thread that see numDocs != -1 should also see the true value of the roleQueryBits (happens-before).
private BitSet roleQueryBits;
private volatile int numDocs = -1;
private DocumentSubsetReader(final LeafReader in, DocumentSubsetBitsetCache bitsetCache, final Query roleQuery) {
super(in);
this.bitsetCache = bitsetCache;
this.roleQuery = roleQuery;
}
/**
* Resolve the role query and the number of docs lazily
*/
private void computeNumDocsIfNeeded() {
if (numDocs == -1) {
synchronized (this) {
if (numDocs == -1) {
assert Transports.assertNotTransportThread("resolving role query");
try {
roleQueryBits = bitsetCache.getBitSet(roleQuery, in.getContext());
numDocs = getNumDocs(in, roleQuery, roleQueryBits);
} catch (Exception e) {
throw new ElasticsearchException("Failed to load role query", e);
}
}
}
}
}
@Override
public Bits getLiveDocs() {
computeNumDocsIfNeeded();
final Bits actualLiveDocs = in.getLiveDocs();
if (roleQueryBits == null) {
// If we were to return a <code>null</code> liveDocs then that would mean that no docs are marked as deleted,
// but that isn't the case. No docs match with the role query and therefore all docs are marked as deleted.
return new Bits.MatchNoBits(in.maxDoc());
} else if (roleQueryBits instanceof MatchAllBitSet) {
return actualLiveDocs;
} else if (actualLiveDocs == null) {
return roleQueryBits;
} else {
// apply deletes when needed:
return new CombinedBits(roleQueryBits, actualLiveDocs);
}
}
@Override
public int numDocs() {
computeNumDocsIfNeeded();
return numDocs;
}
@Override
public boolean hasDeletions() {
// we always return liveDocs and hide docs:
return true;
}
@Override
public CacheHelper getCoreCacheHelper() {
return in.getCoreCacheHelper();
}
@Override
public CacheHelper getReaderCacheHelper() {
// Not delegated since we change the live docs
return null;
}
@Override
protected StoredFieldsReader doGetSequentialStoredFieldsReader(StoredFieldsReader reader) {
return reader;
}
}
|
DocumentSubsetDirectoryReader
|
java
|
apache__camel
|
components/camel-kubernetes/src/test/java/org/apache/camel/component/kubernetes/producer/KubernetesDeploymentsProducerTest.java
|
{
"start": 1965,
"end": 12964
}
|
class ____ extends KubernetesTestSupport {
KubernetesMockServer server;
NamespacedKubernetesClient client;
@BindToRegistry("kubernetesClient")
public KubernetesClient getClient() {
return client;
}
@Test
void listTest() {
server.expect().withPath("/apis/apps/v1/deployments")
.andReturn(200, new DeploymentListBuilder().addNewItem().and().addNewItem().and().build()).once();
server.expect().withPath("/apis/apps/v1/namespaces/test/deployments")
.andReturn(200, new DeploymentListBuilder().addNewItem().and().build()).once();
List<?> result = template.requestBody("direct:list", "", List.class);
assertEquals(2, result.size());
Exchange ex = template.request("direct:list",
exchange -> exchange.getIn().setHeader(KubernetesConstants.KUBERNETES_NAMESPACE_NAME, "test"));
assertEquals(1, ex.getMessage().getBody(List.class).size());
}
@Test
void listByLabelsTest() throws Exception {
Map<String, String> labels = Map.of(
"key1", "value1",
"key2", "value2");
String urlEncodedLabels = toUrlEncoded(labels.entrySet().stream().map(e -> e.getKey() + "=" + e.getValue())
.collect(Collectors.joining(",")));
server.expect()
.withPath("/apis/apps/v1/deployments?labelSelector=" + urlEncodedLabels)
.andReturn(200, new DeploymentListBuilder().addNewItem().and().addNewItem().and().addNewItem().and().build())
.once();
server.expect()
.withPath("/apis/apps/v1/namespaces/test/deployments?labelSelector=" + urlEncodedLabels)
.andReturn(200, new DeploymentListBuilder().addNewItem().and().addNewItem().and().build())
.once();
Exchange ex = template.request("direct:listByLabels",
exchange -> exchange.getIn().setHeader(KubernetesConstants.KUBERNETES_DEPLOYMENTS_LABELS, labels));
assertEquals(3, ex.getMessage().getBody(List.class).size());
ex = template.request("direct:listByLabels", exchange -> {
exchange.getIn().setHeader(KubernetesConstants.KUBERNETES_DEPLOYMENTS_LABELS, labels);
exchange.getIn().setHeader(KubernetesConstants.KUBERNETES_NAMESPACE_NAME, "test");
});
assertEquals(2, ex.getMessage().getBody(List.class).size());
}
@Test
void createDeployment() {
Map<String, String> labels = Map.of("my.label.key", "my.label.value");
DeploymentSpec spec = new DeploymentSpecBuilder().withReplicas(13).build();
Deployment de1
= new DeploymentBuilder().withNewMetadata().withName("de1").withNamespace("test").withLabels(labels).and()
.withSpec(spec).build();
server.expect().post().withPath("/apis/apps/v1/namespaces/test/deployments").andReturn(200, de1).once();
Exchange ex = template.request("direct:createDeployment", exchange -> {
exchange.getIn().setHeader(KubernetesConstants.KUBERNETES_NAMESPACE_NAME, "test");
exchange.getIn().setHeader(KubernetesConstants.KUBERNETES_DEPLOYMENTS_LABELS, labels);
exchange.getIn().setHeader(KubernetesConstants.KUBERNETES_DEPLOYMENT_NAME, "de1");
exchange.getIn().setHeader(KubernetesConstants.KUBERNETES_DEPLOYMENT_SPEC, spec);
});
Deployment result = ex.getMessage().getBody(Deployment.class);
assertEquals("test", result.getMetadata().getNamespace());
assertEquals("de1", result.getMetadata().getName());
assertEquals(labels, result.getMetadata().getLabels());
assertEquals(13, result.getSpec().getReplicas());
}
@Test
void createDeploymentWithAnnotations() {
Map<String, String> labels = Map.of("my.label.key", "my.label.value");
Map<String, String> annotations = Map.of("my.annotation.key", "my.annotation.value");
DeploymentSpec spec = new DeploymentSpecBuilder().withReplicas(13).build();
Deployment de1
= new DeploymentBuilder().withNewMetadata().withName("de1").withNamespace("test").withLabels(labels)
.withAnnotations(annotations).and()
.withSpec(spec).build();
server.expect().post().withPath("/apis/apps/v1/namespaces/test/deployments").andReturn(200, de1).once();
Exchange ex = template.request("direct:createDeploymentWithAnnotations", exchange -> {
exchange.getIn().setHeader(KubernetesConstants.KUBERNETES_NAMESPACE_NAME, "test");
exchange.getIn().setHeader(KubernetesConstants.KUBERNETES_DEPLOYMENTS_LABELS, labels);
exchange.getIn().setHeader(KubernetesConstants.KUBERNETES_DEPLOYMENT_NAME, "de1");
exchange.getIn().setHeader(KubernetesConstants.KUBERNETES_DEPLOYMENT_SPEC, spec);
exchange.getIn().setHeader(KubernetesConstants.KUBERNETES_DEPLOYMENTS_ANNOTATIONS, annotations);
});
Deployment result = ex.getMessage().getBody(Deployment.class);
assertEquals("test", result.getMetadata().getNamespace());
assertEquals("de1", result.getMetadata().getName());
assertEquals(labels, result.getMetadata().getLabels());
assertEquals(13, result.getSpec().getReplicas());
assertEquals(annotations, result.getMetadata().getAnnotations());
}
@Test
void updateDeployment() {
Map<String, String> labels = Map.of("my.label.key", "my.label.value");
DeploymentSpec spec = new DeploymentSpecBuilder().withReplicas(13).build();
Deployment de1
= new DeploymentBuilder().withNewMetadata().withName("de1").withNamespace("test").withLabels(labels).and()
.withSpec(spec).build();
server.expect().get().withPath("/apis/apps/v1/namespaces/test/deployments/de1")
.andReturn(200,
new DeploymentBuilder().withNewMetadata().withName("de1").withNamespace("test").endMetadata().build())
.once();
server.expect().put().withPath("/apis/apps/v1/namespaces/test/deployments/de1").andReturn(200, de1).once();
Exchange ex = template.request("direct:updateDeployment", exchange -> {
exchange.getIn().setHeader(KubernetesConstants.KUBERNETES_NAMESPACE_NAME, "test");
exchange.getIn().setHeader(KubernetesConstants.KUBERNETES_DEPLOYMENTS_LABELS, labels);
exchange.getIn().setHeader(KubernetesConstants.KUBERNETES_DEPLOYMENT_NAME, "de1");
exchange.getIn().setHeader(KubernetesConstants.KUBERNETES_DEPLOYMENT_SPEC, spec);
});
Deployment result = ex.getMessage().getBody(Deployment.class);
assertEquals("test", result.getMetadata().getNamespace());
assertEquals("de1", result.getMetadata().getName());
assertEquals(labels, result.getMetadata().getLabels());
assertEquals(13, result.getSpec().getReplicas());
}
@Test
void deleteDeployment() {
Deployment de1 = new DeploymentBuilder().withNewMetadata().withNamespace("test").withName("de1")
.withResourceVersion("1").withGeneration(2L).endMetadata().withNewSpec()
.withReplicas(0).endSpec().withNewStatus().withReplicas(1).withObservedGeneration(1L).endStatus().build();
server.expect().withPath("/apis/extensions/v1beta1/namespaces/test/deployments/de1").andReturn(200, de1).once();
server.expect().withPath("/apis/extensions/v1beta1/namespaces/test/deployments/de1")
.andReturn(200,
new DeploymentBuilder(de1).editStatus().withReplicas(0).withObservedGeneration(2L).endStatus().build())
.times(5);
server.expect().delete().withPath("/apis/apps/v1/namespaces/test/deployments/de1").andReturn(200, de1).once();
Exchange ex = template.request("direct:deleteDeployment", exchange -> {
exchange.getIn().setHeader(KubernetesConstants.KUBERNETES_DEPLOYMENT_NAME, "de1");
exchange.getIn().setHeader(KubernetesConstants.KUBERNETES_NAMESPACE_NAME, "test");
});
boolean deDeleted = ex.getMessage().getBody(Boolean.class);
assertTrue(deDeleted);
}
@Test
void scaleDeployment() {
server.expect().withPath("/apis/apps/v1/namespaces/test/deployments/de1")
.andReturn(200, new DeploymentBuilder().withNewMetadata().withName("de1")
.withResourceVersion("1").endMetadata().withNewSpec().withReplicas(5).endSpec().withNewStatus()
.withReplicas(5).endStatus().build())
.once();
server.expect().withPath("/apis/apps/v1/namespaces/test/deployments/de1/scale")
.andReturn(200, new DeploymentBuilder().withNewMetadata().withName("de1")
.withResourceVersion("1").endMetadata().withNewSpec().withReplicas(5).endSpec().withNewStatus()
.withReplicas(5).endStatus().build())
.always();
Exchange ex = template.request("direct:scaleDeployment", exchange -> {
exchange.getIn().setHeader(KubernetesConstants.KUBERNETES_NAMESPACE_NAME, "test");
exchange.getIn().setHeader(KubernetesConstants.KUBERNETES_DEPLOYMENT_NAME, "de1");
exchange.getIn().setHeader(KubernetesConstants.KUBERNETES_DEPLOYMENT_REPLICAS, 5);
});
int replicas = ex.getMessage().getBody(Integer.class);
assertEquals(5, replicas);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:list")
.toF("kubernetes-deployments:///?kubernetesClient=#kubernetesClient&operation=listDeployments");
from("direct:listByLabels")
.toF("kubernetes-deployments:///?kubernetesClient=#kubernetesClient&operation=listDeploymentsByLabels");
from("direct:deleteDeployment")
.toF("kubernetes-deployments:///?kubernetesClient=#kubernetesClient&operation=deleteDeployment");
from("direct:createDeployment")
.toF("kubernetes-deployments:///?kubernetesClient=#kubernetesClient&operation=createDeployment");
from("direct:createDeploymentWithAnnotations")
.toF("kubernetes-deployments:///?kubernetesClient=#kubernetesClient&operation=createDeployment");
from("direct:updateDeployment")
.toF("kubernetes-deployments:///?kubernetesClient=#kubernetesClient&operation=updateDeployment");
from("direct:scaleDeployment")
.toF("kubernetes-deployments:///?kubernetesClient=#kubernetesClient&operation=scaleDeployment");
}
};
}
}
|
KubernetesDeploymentsProducerTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/idgen/userdefined/BeforeExecutionAssignedValuesTest.java
|
{
"start": 7170,
"end": 7407
}
|
class ____ extends AssignedGenerator {
@Override
public EnumSet<EventType> getEventTypes() {
return EventTypeSets.INSERT_ONLY;
}
@Override
public boolean allowAssignedIdentifiers() {
return true;
}
}
}
|
AssignedIdGenerator
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/GetJobReportResponse.java
|
{
"start": 938,
"end": 1078
}
|
interface ____ {
public abstract JobReport getJobReport();
public abstract void setJobReport(JobReport jobReport);
}
|
GetJobReportResponse
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/component/log/LogListenerTest.java
|
{
"start": 1195,
"end": 2489
}
|
class ____ {
private static boolean listenerFired;
@Test
public void testLogMask() throws Exception {
listenerFired = false;
CamelContext context = createCamelContext();
MockEndpoint mock = context.getEndpoint("mock:foo", MockEndpoint.class);
mock.expectedMessageCount(1);
context.getCamelContextExtension().addLogListener((exchange, camelLogger, message) -> {
assertEquals("Exchange[ExchangePattern: InOnly, BodyType: String, Body: hello]", message);
listenerFired = true;
return message + " - modified by listener";
});
context.start();
context.createProducerTemplate().sendBody("direct:foo", "hello");
mock.assertIsSatisfied();
assertTrue(listenerFired);
context.stop();
}
protected CamelContext createCamelContext() throws Exception {
CamelContext context = new DefaultCamelContext();
context.addRoutes(createRouteBuilder());
return context;
}
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:foo").routeId("foo").to("log:foo").to("mock:foo");
}
};
}
}
|
LogListenerTest
|
java
|
apache__rocketmq
|
store/src/test/java/org/apache/rocketmq/store/timer/TimerCheckPointTest.java
|
{
"start": 1067,
"end": 5629
}
|
class ____ {
private String baseDir;
@Before
public void init() throws IOException {
baseDir = StoreTestUtils.createBaseDir();
}
@Test
public void testCheckPoint() throws IOException {
String baseSrc = baseDir + File.separator + "timercheck";
TimerCheckpoint first = new TimerCheckpoint(baseSrc);
assertEquals(0, first.getLastReadTimeMs());
assertEquals(0, first.getLastTimerLogFlushPos());
assertEquals(0, first.getLastTimerQueueOffset());
assertEquals(0, first.getMasterTimerQueueOffset());
first.setLastReadTimeMs(1000);
first.setLastTimerLogFlushPos(1100);
first.setLastTimerQueueOffset(1200);
first.setMasterTimerQueueOffset(1300);
first.shutdown();
TimerCheckpoint second = new TimerCheckpoint(baseSrc);
assertEquals(1000, second.getLastReadTimeMs());
assertEquals(1100, second.getLastTimerLogFlushPos());
assertEquals(1200, second.getLastTimerQueueOffset());
assertEquals(1300, second.getMasterTimerQueueOffset());
}
@Test
public void testNewCheckPoint() throws IOException {
String baseSrc = baseDir + File.separator + "timercheck2";
TimerCheckpoint first = new TimerCheckpoint(baseSrc);
assertEquals(0, first.getLastReadTimeMs());
assertEquals(0, first.getLastTimerLogFlushPos());
assertEquals(0, first.getLastTimerQueueOffset());
assertEquals(0, first.getMasterTimerQueueOffset());
assertEquals(0, first.getDataVersion().getStateVersion());
assertEquals(0, first.getDataVersion().getCounter().get());
first.setLastReadTimeMs(1000);
first.setLastTimerLogFlushPos(1100);
first.setLastTimerQueueOffset(1200);
first.setMasterTimerQueueOffset(1300);
first.getDataVersion().setStateVersion(1400);
first.getDataVersion().setTimestamp(1500);
first.getDataVersion().setCounter(new AtomicLong(1600));
first.shutdown();
TimerCheckpoint second = new TimerCheckpoint(baseSrc);
assertEquals(1000, second.getLastReadTimeMs());
assertEquals(1100, second.getLastTimerLogFlushPos());
assertEquals(1200, second.getLastTimerQueueOffset());
assertEquals(1300, second.getMasterTimerQueueOffset());
assertEquals(1400, second.getDataVersion().getStateVersion());
assertEquals(1500, second.getDataVersion().getTimestamp());
assertEquals(1600, second.getDataVersion().getCounter().get());
}
@Test
public void testEncodeDecode() throws IOException {
TimerCheckpoint first = new TimerCheckpoint();
first.setLastReadTimeMs(1000);
first.setLastTimerLogFlushPos(1100);
first.setLastTimerQueueOffset(1200);
first.setMasterTimerQueueOffset(1300);
TimerCheckpoint second = TimerCheckpoint.decode(TimerCheckpoint.encode(first));
assertEquals(first.getLastReadTimeMs(), second.getLastReadTimeMs());
assertEquals(first.getLastTimerLogFlushPos(), second.getLastTimerLogFlushPos());
assertEquals(first.getLastTimerQueueOffset(), second.getLastTimerQueueOffset());
assertEquals(first.getMasterTimerQueueOffset(), second.getMasterTimerQueueOffset());
}
@Test
public void testNewEncodeDecode() throws IOException {
TimerCheckpoint first = new TimerCheckpoint();
first.setLastReadTimeMs(1000);
first.setLastTimerLogFlushPos(1100);
first.setLastTimerQueueOffset(1200);
first.setMasterTimerQueueOffset(1300);
first.getDataVersion().setStateVersion(1400);
first.getDataVersion().setTimestamp(1500);
first.getDataVersion().setCounter(new AtomicLong(1600));
TimerCheckpoint second = TimerCheckpoint.decode(TimerCheckpoint.encode(first));
assertEquals(first.getLastReadTimeMs(), second.getLastReadTimeMs());
assertEquals(first.getLastTimerLogFlushPos(), second.getLastTimerLogFlushPos());
assertEquals(first.getLastTimerQueueOffset(), second.getLastTimerQueueOffset());
assertEquals(first.getMasterTimerQueueOffset(), second.getMasterTimerQueueOffset());
assertEquals(first.getDataVersion().getStateVersion(), 1400);
assertEquals(first.getDataVersion().getTimestamp(), 1500);
assertEquals(first.getDataVersion().getCounter().get(), 1600);
}
@After
public void shutdown() {
if (null != baseDir) {
StoreTestUtils.deleteFile(baseDir);
}
}
}
|
TimerCheckPointTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/PostConnectorSecretRequest.java
|
{
"start": 990,
"end": 3189
}
|
class ____ extends LegacyActionRequest {
private static final ParseField VALUE_FIELD = new ParseField("value");
public static final ConstructingObjectParser<PostConnectorSecretRequest, Void> PARSER = new ConstructingObjectParser<>(
"post_secret_request",
args -> new PostConnectorSecretRequest((String) args[0])
);
static {
PARSER.declareField(
ConstructingObjectParser.optionalConstructorArg(),
(p, c) -> p.text(),
VALUE_FIELD,
ObjectParser.ValueType.STRING
);
}
public static PostConnectorSecretRequest fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
private final String value;
public PostConnectorSecretRequest(String value) {
this.value = value;
}
public PostConnectorSecretRequest(StreamInput in) throws IOException {
super(in);
this.value = in.readString();
}
public String value() {
return value;
}
public XContentBuilder toXContent(XContentBuilder builder) throws IOException {
builder.startObject();
builder.field(VALUE_FIELD.getPreferredName(), this.value);
builder.endObject();
return builder;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(value);
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = null;
if (Strings.isNullOrEmpty(this.value)) {
validationException = addValidationError("[value] of the connector secret cannot be [null] or [\"\"]", validationException);
}
return validationException;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
PostConnectorSecretRequest that = (PostConnectorSecretRequest) o;
return Objects.equals(value, that.value);
}
@Override
public int hashCode() {
return Objects.hash(value);
}
}
|
PostConnectorSecretRequest
|
java
|
apache__kafka
|
storage/src/main/java/org/apache/kafka/storage/internals/log/LogCleaningState.java
|
{
"start": 978,
"end": 1297
}
|
interface ____ {
LogCleaningInProgress LOG_CLEANING_IN_PROGRESS = new LogCleaningInProgress();
LogCleaningAborted LOG_CLEANING_ABORTED = new LogCleaningAborted();
static LogCleaningPaused logCleaningPaused(int pausedCount) {
return new LogCleaningPaused(pausedCount);
}
final
|
LogCleaningState
|
java
|
apache__camel
|
components/camel-smb/src/test/java/org/apache/camel/component/smb/SmbConsumerPartialReadNoPathIT.java
|
{
"start": 1328,
"end": 2900
}
|
class ____ extends SmbServerTestSupport {
private final String uuid = new DefaultUuidGenerator().generateUuid() + ".txt";;
@Override
public void doPostSetup() throws Exception {
template.sendBodyAndHeader(getSmbUrl(), "Hello Uuid", Exchange.FILE_NAME, uuid);
}
protected String getSmbUrl() {
return String.format(
"smb:%s/%s?username=%s&password=%s&move=done&moveFailed=failed&searchPattern=%s",
service.address(), service.shareName(), service.userName(), service.password(), uuid);
}
@Test
public void testSmbSimpleConsumeNoPath() throws Exception {
NotifyBuilder nb = new NotifyBuilder(context).whenDone(1).create();
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedBodiesReceived("Hello Uuid");
mock.expectedHeaderReceived(Exchange.FILE_NAME, uuid);
MockEndpoint.assertIsSatisfied(context);
assertTrue(nb.matchesWaitTime());
await().atMost(5, TimeUnit.SECONDS)
.untilAsserted(() -> {
byte[] arr = copyFileContentFromContainer("/data/rw/failed/" + uuid);
assertEquals("Hello Uuid", new String(arr));
});
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from(getSmbUrl()).to("mock:result").throwException(new IllegalArgumentException("Forced"));
}
};
}
}
|
SmbConsumerPartialReadNoPathIT
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/UnnecessaryMethodInvocationMatcherTest.java
|
{
"start": 1918,
"end": 2405
}
|
class ____ {
private static final Matcher<ExpressionTree> TO_STRING =
instanceMethod().anyClass().named("toString");
}
""")
.doTest();
}
@Test
public void descendIntoCombinators() {
refactoringTestHelper
.addInputLines(
"Test.java",
"""
import static com.google.errorprone.matchers.Matchers.*;
import com.google.errorprone.matchers.Matcher;
import com.sun.source.tree.ExpressionTree;
public
|
Test
|
java
|
apache__camel
|
components/camel-snakeyaml/src/test/java/org/apache/camel/component/snakeyaml/SnakeYAMLTypeFilterHelper.java
|
{
"start": 4011,
"end": 4966
}
|
class ____ can unmarshall, UnsafePojo should not be allowed");
// Wrapped by SnakeYAML
assertTrue(ex.getCause() instanceof ConstructorException);
// Thrown by SnakeYAMLDataFormat
assertTrue(ex.getCause().getCause() instanceof IllegalArgumentException);
}
static void testAllowAllConstructor(ProducerTemplate template) {
Object testPojo = assertDoesNotThrow(() -> template.requestBody(
"direct:all-constructor",
"!!org.apache.camel.component.snakeyaml.model.TestPojo {name: Camel}"));
assertNotNull(testPojo);
assertTrue(testPojo instanceof TestPojo);
Object unsafePojo = assertDoesNotThrow(() -> template.requestBody(
"direct:all-constructor",
"!!org.apache.camel.component.snakeyaml.model.UnsafePojo {name: Camel}"));
assertNotNull(unsafePojo);
assertTrue(unsafePojo instanceof UnsafePojo);
}
}
|
is
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/reduce/IntSumReducer.java
|
{
"start": 1151,
"end": 1612
}
|
class ____<Key> extends Reducer<Key,IntWritable,
Key,IntWritable> {
private IntWritable result = new IntWritable();
public void reduce(Key key, Iterable<IntWritable> values,
Context context) throws IOException, InterruptedException {
int sum = 0;
for (IntWritable val : values) {
sum += val.get();
}
result.set(sum);
context.write(key, result);
}
}
|
IntSumReducer
|
java
|
junit-team__junit5
|
jupiter-tests/src/test/java/org/junit/jupiter/engine/extension/ExtensionRegistrationViaParametersAndFieldsTests.java
|
{
"start": 21019,
"end": 21287
}
|
class ____ {
@ExtendWith(LongParameterResolver.class)
@RegisterExtension
DummyExtension dummy = new DummyExtension();
@Test
void test(Long number) {
assertThat(number).isEqualTo(42L);
}
}
@NullUnmarked
static
|
MultipleMixedRegistrationsViaFieldTestCase
|
java
|
ReactiveX__RxJava
|
src/main/java/io/reactivex/rxjava3/internal/operators/observable/ObservableMapNotification.java
|
{
"start": 945,
"end": 2066
}
|
class ____<T, R> extends AbstractObservableWithUpstream<T, ObservableSource<? extends R>> {
final Function<? super T, ? extends ObservableSource<? extends R>> onNextMapper;
final Function<? super Throwable, ? extends ObservableSource<? extends R>> onErrorMapper;
final Supplier<? extends ObservableSource<? extends R>> onCompleteSupplier;
public ObservableMapNotification(
ObservableSource<T> source,
Function<? super T, ? extends ObservableSource<? extends R>> onNextMapper,
Function<? super Throwable, ? extends ObservableSource<? extends R>> onErrorMapper,
Supplier<? extends ObservableSource<? extends R>> onCompleteSupplier) {
super(source);
this.onNextMapper = onNextMapper;
this.onErrorMapper = onErrorMapper;
this.onCompleteSupplier = onCompleteSupplier;
}
@Override
public void subscribeActual(Observer<? super ObservableSource<? extends R>> t) {
source.subscribe(new MapNotificationObserver<>(t, onNextMapper, onErrorMapper, onCompleteSupplier));
}
static final
|
ObservableMapNotification
|
java
|
elastic__elasticsearch
|
modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SIf.java
|
{
"start": 682,
"end": 1624
}
|
class ____ extends AStatement {
private final AExpression conditionNode;
private final SBlock ifBlockNode;
public SIf(int identifier, Location location, AExpression conditionNode, SBlock ifBlockNode) {
super(identifier, location);
this.conditionNode = Objects.requireNonNull(conditionNode);
this.ifBlockNode = ifBlockNode;
}
public AExpression getConditionNode() {
return conditionNode;
}
public SBlock getIfBlockNode() {
return ifBlockNode;
}
@Override
public <Scope> void visit(UserTreeVisitor<Scope> userTreeVisitor, Scope scope) {
userTreeVisitor.visitIf(this, scope);
}
@Override
public <Scope> void visitChildren(UserTreeVisitor<Scope> userTreeVisitor, Scope scope) {
conditionNode.visit(userTreeVisitor, scope);
if (ifBlockNode != null) {
ifBlockNode.visit(userTreeVisitor, scope);
}
}
}
|
SIf
|
java
|
google__dagger
|
hilt-compiler/main/java/dagger/hilt/android/processor/internal/androidentrypoint/ViewGenerator.java
|
{
"start": 2120,
"end": 6811
}
|
class ____$CLASS extends $BASE implements
// ComponentManagerHolder<ViewComponentManager<$CLASS_EntryPoint>> {
// ...
// }
public void generate() {
// Note: we do not use the Generators helper methods here because injection is called
// from the constructor where the double-check pattern doesn't work (due to the super
// constructor being called before fields are initialized) and because it isn't necessary
// since the object isn't done constructing yet.
TypeSpec.Builder builder =
TypeSpec.classBuilder(generatedClassName.simpleName())
.superclass(metadata.baseClassName())
.addModifiers(metadata.generatedClassModifiers());
JavaPoetExtKt.addOriginatingElement(builder, metadata.element());
Generators.addGeneratedBaseClassJavadoc(builder, AndroidClassNames.ANDROID_ENTRY_POINT);
Processors.addGeneratedAnnotation(builder, env, getClass());
Generators.copyLintAnnotations(metadata.element(), builder);
Generators.copySuppressAnnotations(metadata.element(), builder);
metadata.baseElement().getTypeParameters().stream()
.map(XTypeParameterElement::getTypeVariableName)
.forEachOrdered(builder::addTypeVariable);
Generators.addComponentOverride(metadata, builder);
Generators.addInjectionMethods(metadata, builder);
metadata.baseElement().getConstructors().stream()
.filter(constructor -> Generators.isConstructorVisibleToSubclass(
constructor, metadata.element()))
.map(this::constructorMethod)
.forEach(builder::addMethod);
env.getFiler()
.write(
JavaFile.builder(generatedClassName.packageName(), builder.build()).build(),
XFiler.Mode.Isolating);
}
/**
* Returns a pass-through constructor matching the base class's provided constructorElement. The
* generated constructor simply calls super(), then inject().
*
* <p>Eg
*
* <pre>
* Hilt_$CLASS(Context context, ...) {
* super(context, ...);
* if (!isInEditMode()) {
* inject();
* }
* }
* </pre>
*/
private MethodSpec constructorMethod(XConstructorElement constructor) {
MethodSpec.Builder builder = Generators.copyConstructor(constructor).toBuilder();
// TODO(b/210544481): Once this bug is fixed we should require that the user adds this
// annotation to their constructor and we'll propagate it from there rather than trying to
// guess whether this needs @TargetApi from the signature. This check is a bit flawed. For
// example, the user could write a 5 parameter constructor that calls the restricted 4 parameter
// constructor and we would miss adding @TargetApi to it.
if (isRestrictedApiConstructor(constructor)) {
// 4 parameter constructors are only available on @TargetApi(21).
builder.addAnnotation(
AnnotationSpec.builder(AndroidClassNames.TARGET_API).addMember("value", "21").build());
}
builder.beginControlFlow("if(!isInEditMode())")
.addStatement("inject()")
.endControlFlow();
return builder.build();
}
private boolean isRestrictedApiConstructor(XConstructorElement constructor) {
if (constructor.getParameters().size() != 4) {
return false;
}
List<XExecutableParameterElement> constructorParams = constructor.getParameters();
for (int i = 0; i < constructorParams.size(); i++) {
XType type = constructorParams.get(i).getType();
switch (i) {
case 0:
if (!isFirstRestrictedParameter(type)) {
return false;
}
break;
case 1:
if (!isSecondRestrictedParameter(type)) {
return false;
}
break;
case 2:
if (!isThirdRestrictedParameter(type)) {
return false;
}
break;
case 3:
if (!isFourthRestrictedParameter(type)) {
return false;
}
break;
default:
return false;
}
}
return true;
}
private static boolean isFourthRestrictedParameter(XType type) {
return isPrimitive(type) && isInt(type);
}
private static boolean isThirdRestrictedParameter(XType type) {
return isPrimitive(type) && isInt(type);
}
private static boolean isSecondRestrictedParameter(XType type) {
return isDeclared(type)
&& Processors.isAssignableFrom(type.getTypeElement(), AndroidClassNames.ATTRIBUTE_SET);
}
private static boolean isFirstRestrictedParameter(XType type) {
return isDeclared(type)
&& Processors.isAssignableFrom(type.getTypeElement(), AndroidClassNames.CONTEXT);
}
}
|
Hilt_
|
java
|
FasterXML__jackson-databind
|
src/main/java/tools/jackson/databind/deser/jdk/EnumDeserializer.java
|
{
"start": 13042,
"end": 16425
}
|
enum ____
if (Boolean.TRUE.equals(_caseInsensitive)) {
Object match = lookup.findCaseInsensitive(name);
if (match != null) {
return match;
}
}
if (!ctxt.isEnabled(EnumFeature.FAIL_ON_NUMBERS_FOR_ENUMS)
&& !_isFromIntValue) {
// [databind#149]: Allow use of 'String' indexes as well -- unless prohibited (as per above)
char c = name.charAt(0);
if (c >= '0' && c <= '9') {
// [databind#4403]: cannot prevent "Stringified" numbers as Enum
// index yet (might need combination of "Does format have Numbers"
// (XML does not f.ex) and new `EnumFeature`. But can disallow "001" etc.
if (c == '0' && name.length() > 1) {
; // skip
} else {
try {
int index = Integer.parseInt(name);
if (!ctxt.isEnabled(MapperFeature.ALLOW_COERCION_OF_SCALARS)) {
return ctxt.handleWeirdStringValue(_enumClass(), name,
"value looks like quoted Enum index, but `DeserializationFeature.ALLOW_COERCION_OF_SCALARS` prevents use"
);
}
if (index >= 0 && index < _enumsByIndex.length) {
return _enumsByIndex[index];
}
} catch (NumberFormatException e) {
// fine, ignore, was not an integer
}
}
}
}
}
if (useDefaultValueForUnknownEnum(ctxt)) {
return _enumDefaultValue;
}
if (useNullForUnknownEnum(ctxt)) {
return null;
}
return ctxt.handleWeirdStringValue(_enumClass(), name,
"not one of the values accepted for Enum class: %s", lookup.keys());
}
protected Object _deserializeOther(JsonParser p, DeserializationContext ctxt)
throws JacksonException
{
// [databind#381]
if (p.hasToken(JsonToken.START_ARRAY)) {
return _deserializeFromArray(p, ctxt);
}
return ctxt.handleUnexpectedToken(getValueType(ctxt), p);
}
protected Class<?> _enumClass() {
return handledType();
}
// @since 2.15
protected boolean useNullForUnknownEnum(DeserializationContext ctxt) {
if (_useNullForUnknownEnum != null) {
return _useNullForUnknownEnum;
}
return ctxt.isEnabled(EnumFeature.READ_UNKNOWN_ENUM_VALUES_AS_NULL);
}
// @since 2.15
protected boolean useDefaultValueForUnknownEnum(DeserializationContext ctxt) {
// If we have a default value...
if (_enumDefaultValue != null) {
// Check if FormatFeature overrides exist first
if (_useDefaultValueForUnknownEnum != null) {
return _useDefaultValueForUnknownEnum;
}
// Otherwise, check the global setting
return ctxt.isEnabled(EnumFeature.READ_UNKNOWN_ENUM_VALUES_USING_DEFAULT_VALUE);
}
// No default value? then false
return false;
}
}
|
deserialization
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/inlineme/InlinerTest.java
|
{
"start": 44182,
"end": 44485
}
|
class ____ {
public void doTest() {
Object value = 42L;
Client.before((Long) value);
}
}
""")
.addOutputLines(
"Caller.java",
"import com.google.foo.Client;",
"public final
|
Caller
|
java
|
quarkusio__quarkus
|
extensions/micrometer/deployment/src/test/java/io/quarkus/micrometer/deployment/MetricFiltersTest.java
|
{
"start": 1748,
"end": 1928
}
|
class ____ {
@Produces
@Singleton
public MeterFilter producedMeterFilter() {
return new NonAnnotatedFilter();
}
}
}
|
MeterFilterProducer
|
java
|
spring-cloud__spring-cloud-gateway
|
spring-cloud-gateway-server-webmvc/src/main/java/org/springframework/cloud/gateway/server/mvc/filter/GatewayRetryFilterFunctions.java
|
{
"start": 4600,
"end": 5103
}
|
class ____ extends NeverRetryPolicy {
private final RetryConfig config;
public HttpRetryPolicy(RetryConfig config) {
this.config = config;
}
@Override
public boolean canRetry(RetryContext context) {
// TODO: custom exception
if (context.getLastThrowable() instanceof RetryException e) {
return isRetryableStatusCode(e.getResponse().statusCode(), config)
&& isRetryableMethod(e.getRequest().method(), config);
}
return super.canRetry(context);
}
}
}
|
HttpRetryPolicy
|
java
|
spring-projects__spring-framework
|
spring-web/src/test/java/org/springframework/web/method/annotation/ModelFactoryOrderingTests.java
|
{
"start": 6462,
"end": 7579
}
|
class ____ extends AbstractController {
@ModelAttribute
public A getA(Model model) throws IOException {
return updateAndReturn(model, "getA", new A());
}
@ModelAttribute
public B1 getB1(@ModelAttribute A a, Model model) throws IOException {
return updateAndReturn(model, "getB1", new B1());
}
@ModelAttribute
public B2 getB2(@ModelAttribute B1 b1, Model model) throws IOException {
return updateAndReturn(model, "getB2", new B2());
}
@ModelAttribute
public C1 getC1(@ModelAttribute B2 b2, Model model) throws IOException {
return updateAndReturn(model, "getC1", new C1());
}
@ModelAttribute
public C2 getC2(@ModelAttribute C1 c1, Model model) throws IOException {
return updateAndReturn(model, "getC2", new C2());
}
@ModelAttribute
public C3 getC3(@ModelAttribute C2 c2, Model model) throws IOException {
return updateAndReturn(model, "getC3", new C3());
}
@ModelAttribute
public C4 getC4(@ModelAttribute C3 c3, Model model) throws IOException {
return updateAndReturn(model, "getC4", new C4());
}
}
private static
|
StraightLineDependencyController
|
java
|
FasterXML__jackson-core
|
src/test/java/tools/jackson/core/unittest/json/async/AsyncParserInvalidKeywordsTest.java
|
{
"start": 482,
"end": 2452
}
|
class ____ extends AsyncTestBase
{
private final JsonFactory JSON_F = newStreamFactory();
@Test
void invalidKeywordsAfterMatching1st() throws Exception
{
_doTestInvalidKeyword("nul");
_doTestInvalidKeyword("nulla");
_doTestInvalidKeyword("fal");
_doTestInvalidKeyword("fals0");
_doTestInvalidKeyword("falsett0");
_doTestInvalidKeyword("tr");
_doTestInvalidKeyword("truE");
_doTestInvalidKeyword("treu");
_doTestInvalidKeyword("trueenough");
}
@Test
void invalidKeywordsAfterNonMatching1st() throws Exception
{
_doTestInvalidKeyword("Null");
_doTestInvalidKeyword("False");
_doTestInvalidKeyword("C");
_doTestInvalidKeyword("xy");
}
private void _doTestInvalidKeyword(String value)
{
final String EXP_MAIN = "Unrecognized token '"+value+"'";
final String EXP_ALT = "Unexpected character ('"+value.charAt(0)+"' (code";
String doc = "{ \"key1\" : "+value+" }";
try (AsyncReaderWrapper p = _createParser(doc)) {
assertToken(JsonToken.START_OBJECT, p.nextToken());
assertToken(JsonToken.PROPERTY_NAME, p.nextToken());
p.nextToken();
fail("Expected an exception for malformed value keyword");
} catch (StreamReadException jex) {
verifyException(jex, EXP_MAIN, EXP_ALT);
}
// Try as root-level value as well:
doc = value + " "; // may need space after for DataInput
try (AsyncReaderWrapper p = _createParser(doc)) {
p.nextToken();
fail("Expected an exception for malformed value keyword");
} catch (StreamReadException jex) {
verifyException(jex, EXP_MAIN, EXP_ALT);
}
}
private AsyncReaderWrapper _createParser(String doc)
{
return asyncForBytes(JSON_F, 1, _jsonDoc(doc), 1);
}
}
|
AsyncParserInvalidKeywordsTest
|
java
|
elastic__elasticsearch
|
build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/JdkDownloadPlugin.java
|
{
"start": 1338,
"end": 9743
}
|
class ____ implements Plugin<Project> {
public static final String VENDOR_ADOPTIUM = "adoptium";
public static final String VENDOR_OPENJDK = "openjdk";
public static final String VENDOR_ZULU = "zulu";
private static final String REPO_NAME_PREFIX = "jdk_repo_";
private static final String EXTENSION_NAME = "jdks";
public static final String JDK_TRIMMED_PREFIX = "(jdk-?\\d.*)|(zulu-?\\d.*).jdk";
public static final String ZULU_LINUX_AARCH_PATTERN = "zulu.*_aarch64";
@Override
public void apply(Project project) {
Attribute<Boolean> jdkAttribute = Attribute.of("jdk", Boolean.class);
project.getDependencies().getAttributesSchema().attribute(jdkAttribute);
project.getDependencies().getArtifactTypes().maybeCreate(ArtifactTypeDefinition.ZIP_TYPE);
project.getDependencies().registerTransform(UnzipTransform.class, transformSpec -> {
transformSpec.getFrom()
.attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.ZIP_TYPE)
.attribute(jdkAttribute, true);
transformSpec.getTo()
.attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE)
.attribute(jdkAttribute, true);
transformSpec.parameters(parameters -> parameters.setTrimmedPrefixPattern(JDK_TRIMMED_PREFIX));
});
ArtifactTypeDefinition tarArtifactTypeDefinition = project.getDependencies().getArtifactTypes().maybeCreate("tar.gz");
project.getDependencies().registerTransform(SymbolicLinkPreservingUntarTransform.class, transformSpec -> {
transformSpec.getFrom()
.attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, tarArtifactTypeDefinition.getName())
.attribute(jdkAttribute, true);
transformSpec.getTo()
.attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE)
.attribute(jdkAttribute, true);
transformSpec.parameters(parameters -> {
parameters.setTrimmedPrefixPattern(JDK_TRIMMED_PREFIX);
// parameters.setAsFiletreeOutput(true);
// parameters.setKeepStructureFor(Arrays.asList(ZULU_LINUX_AARCH_PATTERN));
});
});
NamedDomainObjectContainer<Jdk> jdksContainer = project.container(Jdk.class, name -> {
Configuration configuration = project.getConfigurations().create("jdk_" + name);
configuration.setCanBeConsumed(false);
configuration.getAttributes().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE);
configuration.getAttributes().attribute(jdkAttribute, true);
Jdk jdk = new Jdk(name, configuration, project.getObjects());
configuration.defaultDependencies(dependencies -> {
jdk.finalizeValues();
setupRepository(project, jdk);
dependencies.add(project.getDependencies().create(dependencyNotation(jdk)));
});
return jdk;
});
project.getExtensions().add(EXTENSION_NAME, jdksContainer);
}
private void setupRepository(Project project, Jdk jdk) {
RepositoryHandler repositories = project.getRepositories();
/*
* Define the appropriate repository for the given JDK vendor and version
*
* For Oracle/OpenJDK/Adoptium we define a repository per-version.
*/
String repoName = REPO_NAME_PREFIX + jdk.getVendor() + "_" + jdk.getVersion();
String repoUrl;
String artifactPattern;
if (jdk.getVendor().equals(VENDOR_ADOPTIUM)) {
repoUrl = "https://api.adoptium.net/v3/binary/version/";
if (jdk.getMajor().equals("8")) {
// legacy pattern for JDK 8
artifactPattern = "jdk"
+ jdk.getBaseVersion()
+ "-"
+ jdk.getBuild()
+ "/[module]/[classifier]/jdk/hotspot/normal/adoptium";
} else {
// current pattern since JDK 9
artifactPattern = "jdk-"
+ jdk.getBaseVersion()
+ "+"
+ jdk.getBuild()
+ "/[module]/[classifier]/jdk/hotspot/normal/adoptium";
}
} else if (jdk.getVendor().equals(VENDOR_OPENJDK)) {
if ("ea".equals(jdk.getDistributionVersion())) {
repoUrl = "https://builds.es-jdk-archive.com/";
// current pattern since 12.0.1
artifactPattern = "jdks/openjdk/"
+ jdk.getMajor()
+ "/openjdk-[revision]/openjdk-[revision]_[module]-[classifier]_bin.[ext]";
} else if ("rc".equals(jdk.getDistributionVersion())) {
repoUrl = "https://builds.es-jdk-archive.com/";
// current pattern since 12.0.1
artifactPattern = "jdks/openjdk/"
+ jdk.getMajor()
+ "/openjdk-[revision]/openjdk-"
+ jdk.getMajor()
+ "_[module]-[classifier]_bin.[ext]";
} else {
repoUrl = "https://download.oracle.com";
if (jdk.getHash() != null) {
// current pattern since 12.0.1
artifactPattern = "java/GA/jdk"
+ jdk.getBaseVersion()
+ "/"
+ jdk.getHash()
+ "/"
+ jdk.getBuild()
+ "/GPL/openjdk-[revision]_[module]-[classifier]_bin.[ext]";
} else {
// simpler legacy pattern from JDK 9 to JDK 12 that we are advocating to Oracle to bring back
artifactPattern = "java/GA/jdk"
+ jdk.getMajor()
+ "/"
+ jdk.getBuild()
+ "/GPL/openjdk-[revision]_[module]-[classifier]_bin.[ext]";
}
}
} else if (jdk.getVendor().equals(VENDOR_ZULU)) {
repoUrl = "https://cdn.azul.com";
if (jdk.getMajor().equals("8") && isJdkOnMacOsPlatform(jdk) && jdk.getArchitecture().equals("aarch64")) {
artifactPattern = "zulu/bin/zulu"
+ jdk.getDistributionVersion()
+ "-ca-jdk"
+ jdk.getBaseVersion().replace("u", ".0.")
+ "-[module]x_[classifier].[ext]";
} else {
throw new GradleException("JDK vendor zulu is supported only for JDK8 on MacOS with Apple Silicon.");
}
} else {
throw new GradleException("Unknown JDK vendor [" + jdk.getVendor() + "]");
}
// Define the repository if we haven't already
if (repositories.findByName(repoName) == null) {
repositories.ivy(repo -> {
repo.setName(repoName);
repo.setUrl(repoUrl);
repo.metadataSources(IvyArtifactRepository.MetadataSources::artifact);
repo.patternLayout(layout -> layout.artifact(artifactPattern));
repo.content(repositoryContentDescriptor -> repositoryContentDescriptor.includeGroup(groupName(jdk)));
});
}
}
@SuppressWarnings("unchecked")
public static NamedDomainObjectContainer<Jdk> getContainer(Project project) {
return (NamedDomainObjectContainer<Jdk>) project.getExtensions().getByName(EXTENSION_NAME);
}
private static String dependencyNotation(Jdk jdk) {
String platformDep = isJdkOnMacOsPlatform(jdk) ? (jdk.getVendor().equals(VENDOR_ADOPTIUM) ? "mac" : "macos") : jdk.getPlatform();
String extension = jdk.getPlatform().equals("windows") ? "zip" : "tar.gz";
return groupName(jdk) + ":" + platformDep + ":" + jdk.getBaseVersion() + ":" + jdk.getArchitecture() + "@" + extension;
}
private static boolean isJdkOnMacOsPlatform(Jdk jdk) {
return jdk.getPlatform().equals("darwin") || jdk.getPlatform().equals("mac");
}
private static String groupName(Jdk jdk) {
return jdk.getVendor() + "_" + jdk.getMajor();
}
}
|
JdkDownloadPlugin
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java
|
{
"start": 259127,
"end": 260633
}
|
class ____ extends ConstantContext {
public TerminalNode OPENING_BRACKET() { return getToken(EsqlBaseParser.OPENING_BRACKET, 0); }
public List<BooleanValueContext> booleanValue() {
return getRuleContexts(BooleanValueContext.class);
}
public BooleanValueContext booleanValue(int i) {
return getRuleContext(BooleanValueContext.class,i);
}
public TerminalNode CLOSING_BRACKET() { return getToken(EsqlBaseParser.CLOSING_BRACKET, 0); }
public List<TerminalNode> COMMA() { return getTokens(EsqlBaseParser.COMMA); }
public TerminalNode COMMA(int i) {
return getToken(EsqlBaseParser.COMMA, i);
}
@SuppressWarnings("this-escape")
public BooleanArrayLiteralContext(ConstantContext ctx) { copyFrom(ctx); }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterBooleanArrayLiteral(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitBooleanArrayLiteral(this);
}
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor<? extends T>)visitor).visitBooleanArrayLiteral(this);
else return visitor.visitChildren(this);
}
}
@SuppressWarnings("CheckReturnValue")
public static
|
BooleanArrayLiteralContext
|
java
|
netty__netty
|
common/src/test/java/io/netty/util/concurrent/ImmediateExecutorTest.java
|
{
"start": 1046,
"end": 1768
}
|
class ____ {
@Test
public void testExecuteNullRunnable() {
assertThrows(NullPointerException.class, new Executable() {
@Override
public void execute() {
ImmediateExecutor.INSTANCE.execute(null);
}
});
}
@Test
public void testExecuteNonNullRunnable() throws Exception {
FutureTask<Void> task = new FutureTask<Void>(new Runnable() {
@Override
public void run() {
// NOOP
}
}, null);
ImmediateExecutor.INSTANCE.execute(task);
assertTrue(task.isDone());
assertFalse(task.isCancelled());
assertNull(task.get());
}
}
|
ImmediateExecutorTest
|
java
|
quarkusio__quarkus
|
extensions/mailer/runtime/src/test/java/io/quarkus/mailer/runtime/FakeSmtpServer.java
|
{
"start": 461,
"end": 7297
}
|
class ____ {
private NetServer netServer;
private String[][] dialogue;
private boolean closeImmediately = false;
private int closeWaitTime = 10;
private final boolean ssl;
private String keystore;
/*
* set up server with a default reply that works for EHLO and no login with one recipient
*/
public FakeSmtpServer(Vertx vertx, boolean ssl, String keystore) {
setDialogue("220 example.com ESMTP",
"EHLO",
"250-example.com\n"
+ "250-SIZE 1000000\n"
+ "250 PIPELINING",
"MAIL FROM:",
"250 2.1.0 Ok",
"RCPT TO:",
"250 2.1.5 Ok",
"DATA",
"354 End data with <CR><LF>.<CR><LF>",
"250 2.0.0 Ok: queued as ABCDDEF0123456789",
"QUIT",
"221 2.0.0 Bye");
this.ssl = ssl;
this.keystore = keystore;
startServer(vertx);
}
private void startServer(Vertx vertx) {
NetServerOptions nsOptions = new NetServerOptions();
int port = ssl ? 1465 : 1587;
nsOptions.setPort(port);
JksOptions jksOptions = new JksOptions().setPath(keystore).setPassword("password");
nsOptions.setKeyStoreOptions(jksOptions);
if (ssl) {
nsOptions.setSsl(true);
}
netServer = vertx.createNetServer(nsOptions);
netServer.connectHandler(socket -> {
writeResponses(socket, dialogue[0]);
if (dialogue.length == 1) {
if (closeImmediately) {
socket.closeAndForget();
} else {
vertx.setTimer(closeWaitTime * 1000L, v -> socket.closeAndForget());
}
} else {
final AtomicInteger lines = new AtomicInteger(1);
final AtomicInteger skipUntilDot = new AtomicInteger(0);
final AtomicBoolean holdFire = new AtomicBoolean(false);
final AtomicInteger inputLineIndex = new AtomicInteger(0);
socket.handler(b -> RecordParser.newDelimited("\r\n", buffer -> {
final String inputLine = buffer.toString();
if (skipUntilDot.get() == 1) {
if (inputLine.equals(".")) {
skipUntilDot.set(0);
if (!holdFire.get() && lines.get() < dialogue.length) {
writeResponses(socket, dialogue[lines.getAndIncrement()]);
}
}
} else {
int currentLine = lines.get();
if (currentLine < dialogue.length) {
boolean inputValid = false;
holdFire.compareAndSet(false, true);
if (inputLineIndex.get() < dialogue[currentLine].length) {
String thisLine = dialogue[currentLine][inputLineIndex.get()];
boolean isRegexp = thisLine.startsWith("^");
if (!isRegexp && inputLine.contains(thisLine) || isRegexp && inputLine.matches(thisLine)) {
inputValid = true;
if (inputLineIndex.get() == dialogue[currentLine].length - 1) {
holdFire.compareAndSet(true, false);
lines.getAndIncrement();
inputLineIndex.set(0);
} else {
inputLineIndex.getAndIncrement();
}
}
}
if (!inputValid) {
socket.writeAndForget("500 didn't expect commands (\"" + String.join(",", dialogue[currentLine])
+ "\"/\"" + inputLine + "\")\r\n");
// stop here
lines.set(dialogue.length);
}
} else {
socket.writeAndForget("500 out of lines\r\n");
}
if (inputLine.toUpperCase(Locale.ENGLISH).equals("DATA")) {
skipUntilDot.set(1);
}
if (!holdFire.get() && inputLine.toUpperCase(Locale.ENGLISH).equals("STARTTLS")) {
writeResponses(socket, dialogue[lines.getAndIncrement()]);
} else if (!holdFire.get() && lines.get() < dialogue.length) {
writeResponses(socket, dialogue[lines.getAndIncrement()]);
}
if (inputLine.equals("QUIT")) {
socket.closeAndForget();
}
}
if (lines.get() == dialogue.length) {
if (closeImmediately) {
socket.closeAndForget();
} else {
vertx.setTimer(closeWaitTime * 1000L, v -> socket.closeAndForget());
}
}
}).handle(b.getDelegate()));
}
});
netServer.listenAndAwait();
}
private void writeResponses(NetSocket socket, String[] responses) {
for (String line : responses) {
socket.writeAndForget(line + "\r\n");
}
}
public FakeSmtpServer setDialogue(String... dialogue) {
this.dialogue = new String[dialogue.length][1];
for (int i = 0; i < dialogue.length; i++) {
this.dialogue[i] = new String[] { dialogue[i] };
}
return this;
}
/**
* Sets the dialogue array.
*
* This is useful in case of pipelining is supported to group commands and responses.
*
* @param dialogue the dialogues
* @return a reference to this, so the API can be used fluently
*/
public FakeSmtpServer setDialogueArray(String[][] dialogue) {
this.dialogue = dialogue;
return this;
}
public FakeSmtpServer setCloseImmediately(boolean close) {
closeImmediately = close;
return this;
}
public FakeSmtpServer setCloseWaitTime(int time) {
closeWaitTime = time;
return this;
}
public void stop() {
if (netServer != null) {
CountDownLatch latch = new CountDownLatch(1);
netServer.closeAndAwait();
netServer = null;
}
}
}
|
FakeSmtpServer
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGridBucket.java
|
{
"start": 1013,
"end": 3345
}
|
class ____ extends InternalMultiBucketAggregation.InternalBucketWritable
implements
GeoGrid.Bucket,
Comparable<InternalGeoGridBucket> {
protected long hashAsLong;
protected long docCount;
protected InternalAggregations aggregations;
public InternalGeoGridBucket(long hashAsLong, long docCount, InternalAggregations aggregations) {
this.docCount = docCount;
this.aggregations = aggregations;
this.hashAsLong = hashAsLong;
}
/**
* Read from a stream.
*/
public InternalGeoGridBucket(StreamInput in) throws IOException {
hashAsLong = in.readLong();
docCount = in.readVLong();
aggregations = InternalAggregations.readFrom(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeLong(hashAsLong);
out.writeVLong(docCount);
aggregations.writeTo(out);
}
public long hashAsLong() {
return hashAsLong;
}
@Override
public long getDocCount() {
return docCount;
}
@Override
public InternalAggregations getAggregations() {
return aggregations;
}
@Override
public int compareTo(InternalGeoGridBucket other) {
if (this.hashAsLong > other.hashAsLong) {
return 1;
}
if (this.hashAsLong < other.hashAsLong) {
return -1;
}
return 0;
}
final void bucketToXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
builder.startObject();
builder.field(Aggregation.CommonFields.KEY.getPreferredName(), getKeyAsString());
builder.field(Aggregation.CommonFields.DOC_COUNT.getPreferredName(), docCount);
aggregations.toXContentInternal(builder, params);
builder.endObject();
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
InternalGeoGridBucket bucket = (InternalGeoGridBucket) o;
return hashAsLong == bucket.hashAsLong && docCount == bucket.docCount && Objects.equals(aggregations, bucket.aggregations);
}
@Override
public int hashCode() {
return Objects.hash(hashAsLong, docCount, aggregations);
}
}
|
InternalGeoGridBucket
|
java
|
playframework__playframework
|
documentation/manual/working/javaGuide/main/i18n/code/javaguide/i18n/MyService.java
|
{
"start": 1335,
"end": 1524
}
|
class ____ {
private final play.i18n.MessagesApi messagesApi;
@Inject
public MyClass(MessagesApi messagesApi) {
this.messagesApi = messagesApi;
}
}
// #inject-messages-api
|
MyClass
|
java
|
square__retrofit
|
retrofit-adapters/rxjava2/src/test/java/retrofit2/adapter/rxjava2/RecordingCompletableObserver.java
|
{
"start": 1198,
"end": 3019
}
|
class ____ implements CompletableObserver {
private final Deque<Notification<?>> events = new ArrayDeque<>();
private RecordingCompletableObserver() {}
@Override
public void onSubscribe(Disposable disposable) {}
@Override
public void onComplete() {
events.add(Notification.createOnComplete());
}
@Override
public void onError(Throwable e) {
events.add(Notification.createOnError(e));
}
private Notification<?> takeNotification() {
Notification<?> notification = events.pollFirst();
if (notification == null) {
throw new AssertionError("No event found!");
}
return notification;
}
public Throwable takeError() {
Notification<?> notification = takeNotification();
assertWithMessage("Expected onError event but was " + notification)
.that(notification.isOnError())
.isTrue();
return notification.getError();
}
public void assertComplete() {
Notification<?> notification = takeNotification();
assertWithMessage("Expected onCompleted event but was " + notification)
.that(notification.isOnComplete())
.isTrue();
assertNoEvents();
}
public void assertError(Throwable throwable) {
assertThat(takeError()).isEqualTo(throwable);
}
public void assertError(Class<? extends Throwable> errorClass) {
assertError(errorClass, null);
}
public void assertError(Class<? extends Throwable> errorClass, String message) {
Throwable throwable = takeError();
assertThat(throwable).isInstanceOf(errorClass);
if (message != null) {
assertThat(throwable).hasMessageThat().isEqualTo(message);
}
assertNoEvents();
}
public void assertNoEvents() {
assertWithMessage("Unconsumed events found!").that(events).isEmpty();
}
public static final
|
RecordingCompletableObserver
|
java
|
quarkusio__quarkus
|
extensions/devui/deployment-spi/src/main/java/io/quarkus/devui/spi/DevUIContent.java
|
{
"start": 1349,
"end": 3632
}
|
class ____ {
private String fileName;
private byte[] template;
private Map<String, Object> data;
private Map<String, String> descriptions;
private Map<String, String> contentTypes;
private Map<String, String> mcpDefaultEnabled;
private Builder() {
this.data = new HashMap<>();
}
public Builder fileName(String fileName) {
if (fileName == null || fileName.isEmpty()) {
throw new RuntimeException("Invalid fileName");
}
this.fileName = fileName;
return this;
}
public Builder template(byte[] template) {
if (template == null || template.length == 0) {
throw new RuntimeException("Invalid template");
}
this.template = template;
return this;
}
public Builder addData(Map<String, Object> data) {
this.data.putAll(data);
return this;
}
public Builder addData(String key, Object value) {
this.data.put(key, value);
return this;
}
public Builder descriptions(Map<String, String> descriptions) {
this.descriptions = descriptions;
return this;
}
public Builder mcpDefaultEnables(Map<String, String> mcpDefaultEnabled) {
this.mcpDefaultEnabled = mcpDefaultEnabled;
return this;
}
public Builder contentTypes(Map<String, String> contentTypes) {
this.contentTypes = contentTypes;
return this;
}
public DevUIContent build() {
if (fileName == null) {
throw new RuntimeException(
ERROR + " FileName is mandatory, for example 'index.html'");
}
if (template == null) {
template = DEFAULT_TEMPLATE;
}
return new DevUIContent(this);
}
private static final String ERROR = "Not enough information to create Dev UI content.";
private static final byte[] DEFAULT_TEMPLATE = "Here the template of your page. Set your own by providing the template() in the DevUIContent"
.getBytes();
}
}
|
Builder
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/boot/models/annotations/internal/AnyDiscriminatorAnnotation.java
|
{
"start": 474,
"end": 1548
}
|
class ____ implements AnyDiscriminator {
private jakarta.persistence.DiscriminatorType value;
/**
* Used in creating dynamic annotation instances (e.g. from XML)
*/
public AnyDiscriminatorAnnotation(ModelsContext modelContext) {
this.value = jakarta.persistence.DiscriminatorType.STRING;
}
/**
* Used in creating annotation instances from JDK variant
*/
public AnyDiscriminatorAnnotation(AnyDiscriminator annotation, ModelsContext modelContext) {
this.value = annotation.value();
}
/**
* Used in creating annotation instances from Jandex variant
*/
public AnyDiscriminatorAnnotation(Map<String, Object> attributeValues, ModelsContext modelContext) {
this.value = (jakarta.persistence.DiscriminatorType) attributeValues.get( "value" );
}
@Override
public Class<? extends Annotation> annotationType() {
return AnyDiscriminator.class;
}
@Override
public jakarta.persistence.DiscriminatorType value() {
return value;
}
public void value(jakarta.persistence.DiscriminatorType value) {
this.value = value;
}
}
|
AnyDiscriminatorAnnotation
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/sql/SqlTableArgOperator.java
|
{
"start": 1549,
"end": 1770
}
|
class ____ extends SqlSpecialOperator {
public static final SqlTableArgOperator INSTANCE = new SqlTableArgOperator();
public SqlTableArgOperator() {
super("TABLE", SqlKind.OTHER);
}
}
|
SqlTableArgOperator
|
java
|
lettuce-io__lettuce-core
|
src/main/java/io/lettuce/core/event/jfr/JfrEventRecorder.java
|
{
"start": 974,
"end": 2938
}
|
class ____ implements EventRecorder {
private final Map<Class<?>, Constructor<?>> constructorMap = new ConcurrentHashMap<>();
@Override
public void record(Event event) {
LettuceAssert.notNull(event, "Event must not be null");
jdk.jfr.Event jfrEvent = createEvent(event);
if (jfrEvent != null) {
jfrEvent.commit();
}
}
@Override
public RecordableEvent start(Event event) {
LettuceAssert.notNull(event, "Event must not be null");
jdk.jfr.Event jfrEvent = createEvent(event);
if (jfrEvent != null) {
jfrEvent.begin();
return new JfrRecordableEvent(jfrEvent);
}
return NoOpEventRecorder.INSTANCE;
}
private Constructor<?> getEventConstructor(Event event) throws NoSuchMethodException {
Constructor<?> constructor = constructorMap.get(event.getClass());
if (constructor == null) {
String jfrClassName = event.getClass().getPackage().getName() + ".Jfr" + event.getClass().getSimpleName();
Class<?> eventClass = LettuceClassUtils.findClass(jfrClassName);
if (eventClass == null) {
constructor = Object.class.getConstructor();
} else {
constructor = eventClass.getDeclaredConstructors()[0];
constructor.setAccessible(true);
}
constructorMap.put(event.getClass(), constructor);
}
return constructor;
}
private jdk.jfr.Event createEvent(Event event) {
try {
Constructor<?> constructor = getEventConstructor(event);
if (constructor.getDeclaringClass() == Object.class) {
return null;
}
return (jdk.jfr.Event) constructor.newInstance(event);
} catch (ReflectiveOperationException e) {
throw new IllegalStateException(e);
}
}
static
|
JfrEventRecorder
|
java
|
spring-projects__spring-framework
|
spring-test/src/main/java/org/springframework/test/context/bean/override/convention/TestBeanOverrideProcessor.java
|
{
"start": 6276,
"end": 8186
}
|
class ____ fully-qualified method name: " + methodName, ex);
}
Method externalMethod = ReflectionUtils.findMethod(declaringClass, methodNameToUse);
Assert.state(externalMethod != null && Modifier.isStatic(externalMethod.getModifiers()) &&
methodReturnType.isAssignableFrom(externalMethod.getReturnType()), () ->
"No static method found named %s in %s with return type %s".formatted(
methodNameToUse, className, methodReturnType.getName()));
methods.add(externalMethod);
originalNames.remove(methodName);
}
}
Set<String> supportedNames = new LinkedHashSet<>(originalNames);
MethodFilter methodFilter = method -> (Modifier.isStatic(method.getModifiers()) &&
supportedNames.contains(method.getName()) &&
methodReturnType.isAssignableFrom(method.getReturnType()));
findMethods(methods, clazz, methodFilter);
String methodNamesDescription = supportedNames.stream()
.map(name -> name + "()").collect(Collectors.joining(" or "));
Assert.state(!methods.isEmpty(), () ->
"No static method found named %s in %s with return type %s".formatted(
methodNamesDescription, clazz.getName(), methodReturnType.getName()));
long uniqueMethodNameCount = methods.stream().map(Method::getName).distinct().count();
Assert.state(uniqueMethodNameCount == 1, () ->
"Found %d competing static methods named %s in %s with return type %s".formatted(
uniqueMethodNameCount, methodNamesDescription, clazz.getName(), methodReturnType.getName()));
return methods.iterator().next();
}
private static Set<Method> findMethods(Set<Method> methods, Class<?> clazz, MethodFilter methodFilter) {
methods.addAll(MethodIntrospector.selectMethods(clazz, methodFilter));
if (methods.isEmpty() && TestContextAnnotationUtils.searchEnclosingClass(clazz)) {
findMethods(methods, clazz.getEnclosingClass(), methodFilter);
}
return methods;
}
}
|
for
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/context/support/Spr7283Tests.java
|
{
"start": 1238,
"end": 1269
}
|
class ____ {
A() {}
}
static
|
A
|
java
|
quarkusio__quarkus
|
extensions/hibernate-search-orm-elasticsearch/deployment/src/test/java/io/quarkus/hibernate/search/orm/elasticsearch/test/boot/AmbiguousSearchExtensionTest.java
|
{
"start": 2345,
"end": 2717
}
|
class ____ implements FailureHandler {
@Override
public void handle(FailureContext failureContext) {
}
@Override
public void handle(EntityIndexingFailureContext entityIndexingFailureContext) {
}
}
@Test
public void test() {
Assertions.fail("Startup should have failed");
}
}
|
AnotherSearchFailureHandler
|
java
|
apache__camel
|
components/camel-resourceresolver-github/src/main/java/org/apache/camel/github/GistResourceResolver.java
|
{
"start": 1061,
"end": 2565
}
|
class ____ extends ServiceSupport implements org.apache.camel.spi.ResourceResolver {
// gist:davsclaus:477ddff5cdeb1ae03619aa544ce47e92:cd1be96034748e42e43879a4d27ed297752b6115:mybeer.xml
// https://gist.githubusercontent.com/davsclaus/477ddff5cdeb1ae03619aa544ce47e92/raw/cd1be96034748e42e43879a4d27ed297752b6115/mybeer.xml
private static final String GIST_URL = "https://gist.githubusercontent.com/%s/%s/raw/%s/%s";
private CamelContext camelContext;
@Override
public CamelContext getCamelContext() {
return camelContext;
}
@Override
public void setCamelContext(CamelContext camelContext) {
this.camelContext = camelContext;
}
@Override
public String getSupportedScheme() {
return "gist";
}
@Override
public Resource resolve(String location) {
String[] parts = location.split(":");
// scheme not in use as its gist
String user = null;
String gid = null;
String gid2 = null;
String fileName = null;
if (parts.length == 5) {
user = parts[1];
gid = parts[2];
gid2 = parts[3];
fileName = parts[4];
}
if (user == null || gid == null || gid2 == null || fileName == null) {
throw new IllegalArgumentException(location);
}
String target = String.format(GIST_URL, user, gid, gid2, fileName);
return new GistResource(camelContext, target);
}
}
|
GistResourceResolver
|
java
|
apache__flink
|
flink-models/flink-model-openai/src/test/java/org/apache/flink/model/openai/OpenAIEmbeddingModelTest.java
|
{
"start": 2291,
"end": 12546
}
|
class ____ {
private static final String MODEL_NAME = "m";
private static final Schema INPUT_SCHEMA =
Schema.newBuilder().column("input", DataTypes.STRING()).build();
private static final Schema OUTPUT_SCHEMA =
Schema.newBuilder().column("embedding", DataTypes.ARRAY(DataTypes.FLOAT())).build();
private static MockWebServer server;
private Map<String, String> modelOptions;
private TableEnvironment tEnv;
@BeforeAll
public static void beforeAll() throws IOException {
server = new MockWebServer();
server.setDispatcher(new TestDispatcher());
server.start();
}
@AfterAll
public static void afterAll() throws IOException {
if (server != null) {
server.close();
}
}
@BeforeEach
public void setup() {
tEnv = TableEnvironment.create(new Configuration());
tEnv.executeSql(
"CREATE TABLE MyTable(input STRING, invalid_input DOUBLE) WITH ( 'connector' = 'datagen', 'number-of-rows' = '10')");
modelOptions = new HashMap<>();
modelOptions.put("provider", "openai");
modelOptions.put("endpoint", server.url("/embeddings").toString());
modelOptions.put("model", "text-embedding-v3");
modelOptions.put("api-key", "foobar");
}
@AfterEach
public void afterEach() {
assertThat(OpenAIUtils.getCache()).isEmpty();
}
@Test
public void testEmbedding() {
CatalogManager catalogManager = ((TableEnvironmentImpl) tEnv).getCatalogManager();
catalogManager.createModel(
CatalogModel.of(INPUT_SCHEMA, OUTPUT_SCHEMA, modelOptions, "This is a new model."),
ObjectIdentifier.of(
catalogManager.getCurrentCatalog(),
catalogManager.getCurrentDatabase(),
MODEL_NAME),
false);
TableResult tableResult =
tEnv.executeSql(
String.format(
"SELECT input, embedding FROM TABLE(ML_PREDICT(TABLE MyTable, MODEL %s, DESCRIPTOR(`input`)))",
MODEL_NAME));
List<Row> result = IteratorUtils.toList(tableResult.collect());
assertThat(result).hasSize(10);
for (Row row : result) {
assertThat(row.getField(0)).isInstanceOf(String.class);
assertThat((Float[]) row.getFieldAs(1)).hasSize(512);
}
}
@Test
public void testEmbeddingWithSqlStatement() throws Exception {
tEnv.executeSql(
String.format(
"CREATE MODEL %s\n"
+ "INPUT (`input` STRING)\n"
+ "OUTPUT (`embedding` ARRAY<FLOAT>) \n"
+ "WITH (%s)",
MODEL_NAME,
modelOptions.entrySet().stream()
.map(
x ->
String.format(
"'%s'='%s'",
x.getKey(), x.getValue()))
.collect(Collectors.joining(","))))
.await();
TableResult tableResult =
tEnv.executeSql(
String.format(
"SELECT input, embedding FROM TABLE(ML_PREDICT(TABLE MyTable, MODEL %s, DESCRIPTOR(`input`)))",
MODEL_NAME));
List<Row> result = IteratorUtils.toList(tableResult.collect());
assertThat(result).hasSize(10);
for (Row row : result) {
assertThat(row.getField(0)).isInstanceOf(String.class);
assertThat((Float[]) row.getFieldAs(1)).hasSize(512);
}
}
@Test
public void testEmbeddingWithDimension() {
CatalogManager catalogManager = ((TableEnvironmentImpl) tEnv).getCatalogManager();
Map<String, String> modelOptions = new HashMap<>(this.modelOptions);
modelOptions.put("dimension", "256");
catalogManager.createModel(
CatalogModel.of(INPUT_SCHEMA, OUTPUT_SCHEMA, modelOptions, "This is a new model."),
ObjectIdentifier.of(
catalogManager.getCurrentCatalog(),
catalogManager.getCurrentDatabase(),
MODEL_NAME),
false);
TableResult tableResult =
tEnv.executeSql(
String.format(
"SELECT input, embedding FROM TABLE(ML_PREDICT(TABLE MyTable, MODEL %s, DESCRIPTOR(`input`)))",
MODEL_NAME));
List<Row> result = IteratorUtils.toList(tableResult.collect());
assertThat(result).hasSize(10);
for (Row row : result) {
assertThat(row.getField(0)).isInstanceOf(String.class);
assertThat((Float[]) row.getFieldAs(1)).hasSize(256);
}
}
@Test
public void testMaxContextSize() {
CatalogManager catalogManager = ((TableEnvironmentImpl) tEnv).getCatalogManager();
Map<String, String> modelOptions = new HashMap<>(this.modelOptions);
modelOptions.put("model", "text-embedding-3-small");
modelOptions.put("max-context-size", "2");
modelOptions.put("context-overflow-action", "skipped");
catalogManager.createModel(
CatalogModel.of(INPUT_SCHEMA, OUTPUT_SCHEMA, modelOptions, "This is a new model."),
ObjectIdentifier.of(
catalogManager.getCurrentCatalog(),
catalogManager.getCurrentDatabase(),
MODEL_NAME),
false);
TableResult tableResult =
tEnv.executeSql(
String.format(
"SELECT input, embedding FROM TABLE(ML_PREDICT(TABLE MyTable, MODEL %s, DESCRIPTOR(`input`)))",
MODEL_NAME));
List<Row> result = IteratorUtils.toList(tableResult.collect());
assertThat(result).isEmpty();
}
@Test
public void testNullValue() {
tEnv.executeSql(
"CREATE TABLE MyTableWithNull(input STRING, invalid_input DOUBLE) "
+ "WITH ( 'connector' = 'datagen', 'number-of-rows' = '10', 'fields.input.null-rate' = '1')");
CatalogManager catalogManager = ((TableEnvironmentImpl) tEnv).getCatalogManager();
catalogManager.createModel(
CatalogModel.of(INPUT_SCHEMA, OUTPUT_SCHEMA, modelOptions, "This is a new model."),
ObjectIdentifier.of(
catalogManager.getCurrentCatalog(),
catalogManager.getCurrentDatabase(),
MODEL_NAME),
false);
TableResult tableResult =
tEnv.executeSql(
String.format(
"SELECT input, embedding FROM TABLE(ML_PREDICT(TABLE MyTableWithNull, MODEL %s, DESCRIPTOR(`input`)))",
MODEL_NAME));
List<Row> result = IteratorUtils.toList(tableResult.collect());
assertThat(result).isEmpty();
}
@Test
public void testInvalidInputSchema() {
CatalogManager catalogManager = ((TableEnvironmentImpl) tEnv).getCatalogManager();
ObjectIdentifier modelIdentifier =
ObjectIdentifier.of(
catalogManager.getCurrentCatalog(),
catalogManager.getCurrentDatabase(),
MODEL_NAME);
Schema inputSchemaWithInvalidColumnType =
Schema.newBuilder().column("input", DataTypes.DOUBLE()).build();
catalogManager.createModel(
CatalogModel.of(
inputSchemaWithInvalidColumnType,
OUTPUT_SCHEMA,
modelOptions,
"This is a new model."),
modelIdentifier,
false);
assertThatThrownBy(
() ->
tEnv.executeSql(
String.format(
"SELECT * FROM TABLE(ML_PREDICT(TABLE MyTable, MODEL %s, DESCRIPTOR(`invalid_input`)))",
MODEL_NAME)))
.rootCause()
.isInstanceOf(IllegalArgumentException.class)
.hasMessageContainingAll("input", "DOUBLE", "STRING");
}
@Test
public void testInvalidOutputSchema() {
CatalogManager catalogManager = ((TableEnvironmentImpl) tEnv).getCatalogManager();
ObjectIdentifier modelIdentifier =
ObjectIdentifier.of(
catalogManager.getCurrentCatalog(),
catalogManager.getCurrentDatabase(),
MODEL_NAME);
Schema outputSchemaWithInvalidColumnType =
Schema.newBuilder().column("output", DataTypes.DOUBLE()).build();
catalogManager.createModel(
CatalogModel.of(
INPUT_SCHEMA,
outputSchemaWithInvalidColumnType,
modelOptions,
"This is a new model."),
modelIdentifier,
false);
assertThatThrownBy(
() ->
tEnv.executeSql(
String.format(
"SELECT * FROM TABLE(ML_PREDICT(TABLE MyTable, MODEL %s, DESCRIPTOR(`input`)))",
MODEL_NAME)))
.rootCause()
.isInstanceOf(IllegalArgumentException.class)
.hasMessageContainingAll("output", "DOUBLE", "ARRAY<FLOAT>");
}
private static
|
OpenAIEmbeddingModelTest
|
java
|
junit-team__junit5
|
jupiter-tests/src/test/java/org/junit/jupiter/engine/discovery/DiscoveryTests.java
|
{
"start": 21682,
"end": 21812
}
|
class ____ extends NonTestRecursiveHierarchyTestCase {
}
}
@SuppressWarnings("JUnitMalformedDeclaration")
@Tag("")
static
|
Inner
|
java
|
alibaba__nacos
|
auth/src/main/java/com/alibaba/nacos/auth/config/AuthErrorCode.java
|
{
"start": 744,
"end": 1486
}
|
enum ____ {
/**
* invalid auth type.
*/
INVALID_TYPE(50001,
"Invalid auth type, Please set `nacos.core.auth.system.type`, detail: https://nacos.io/docs/latest/manual/admin/auth/"),
EMPTY_IDENTITY(50002,
"Empty identity, Please set `nacos.core.auth.server.identity.key` and `nacos.core.auth.server.identity.value`, detail: https://nacos.io/docs/latest/manual/admin/auth/");
private final Integer code;
private final String msg;
public Integer getCode() {
return code;
}
public String getMsg() {
return msg;
}
AuthErrorCode(Integer code, String msg) {
this.code = code;
this.msg = msg;
}
}
|
AuthErrorCode
|
java
|
spring-projects__spring-framework
|
spring-core/src/main/java/org/springframework/core/annotation/MergedAnnotations.java
|
{
"start": 6582,
"end": 8769
}
|
class ____ of the annotation type
* to check
* @return {@code true} if the annotation is directly present
*/
boolean isDirectlyPresent(String annotationType);
/**
* Get the {@linkplain MergedAnnotationSelectors#nearest() nearest} matching
* annotation or meta-annotation of the specified type, or
* {@link MergedAnnotation#missing()} if none is present.
* @param annotationType the annotation type to get
* @return a {@link MergedAnnotation} instance
*/
<A extends Annotation> MergedAnnotation<A> get(Class<A> annotationType);
/**
* Get the {@linkplain MergedAnnotationSelectors#nearest() nearest} matching
* annotation or meta-annotation of the specified type, or
* {@link MergedAnnotation#missing()} if none is present.
* @param annotationType the annotation type to get
* @param predicate a predicate that must match, or {@code null} if only
* type matching is required
* @return a {@link MergedAnnotation} instance
* @see MergedAnnotationPredicates
*/
<A extends Annotation> MergedAnnotation<A> get(Class<A> annotationType,
@Nullable Predicate<? super MergedAnnotation<A>> predicate);
/**
* Get a matching annotation or meta-annotation of the specified type, or
* {@link MergedAnnotation#missing()} if none is present.
* @param annotationType the annotation type to get
* @param predicate a predicate that must match, or {@code null} if only
* type matching is required
* @param selector a selector used to choose the most appropriate annotation
* within an aggregate, or {@code null} to select the
* {@linkplain MergedAnnotationSelectors#nearest() nearest}
* @return a {@link MergedAnnotation} instance
* @see MergedAnnotationPredicates
* @see MergedAnnotationSelectors
*/
<A extends Annotation> MergedAnnotation<A> get(Class<A> annotationType,
@Nullable Predicate<? super MergedAnnotation<A>> predicate,
@Nullable MergedAnnotationSelector<A> selector);
/**
* Get the {@linkplain MergedAnnotationSelectors#nearest() nearest} matching
* annotation or meta-annotation of the specified type, or
* {@link MergedAnnotation#missing()} if none is present.
* @param annotationType the fully qualified
|
name
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/groups/FieldsOrPropertiesExtractor_assertNotNull_Test.java
|
{
"start": 903,
"end": 1474
}
|
class ____ {
@Test
void should_throw_assertion_error_in_absence_of_iterable() {
// WHEN
var assertionError = expectAssertionError(() -> extract((Iterable<?>) null, null));
// THEN
then(assertionError).hasMessage("Expecting actual not to be null");
}
@Test
void should_throw_assertion_error_in_absence_of_array() {
// WHEN
var assertionError = expectAssertionError(() -> extract((Object[]) null, null));
// THEN
then(assertionError).hasMessage("Expecting actual not to be null");
}
}
|
FieldsOrPropertiesExtractor_assertNotNull_Test
|
java
|
apache__flink
|
flink-clients/src/main/java/org/apache/flink/client/deployment/DefaultClusterClientServiceLoader.java
|
{
"start": 1426,
"end": 4657
}
|
class ____ implements ClusterClientServiceLoader {
private static final Logger LOG =
LoggerFactory.getLogger(DefaultClusterClientServiceLoader.class);
@Override
public <ClusterID> ClusterClientFactory<ClusterID> getClusterClientFactory(
final Configuration configuration) {
checkNotNull(configuration);
final ServiceLoader<ClusterClientFactory> loader =
ServiceLoader.load(ClusterClientFactory.class);
final List<ClusterClientFactory> compatibleFactories = new ArrayList<>();
final Iterator<ClusterClientFactory> factories = loader.iterator();
while (factories.hasNext()) {
try {
final ClusterClientFactory factory = factories.next();
if (factory != null && factory.isCompatibleWith(configuration)) {
compatibleFactories.add(factory);
}
} catch (Throwable e) {
if (e.getCause() instanceof NoClassDefFoundError) {
LOG.info("Could not load factory due to missing dependencies.");
} else {
throw e;
}
}
}
if (compatibleFactories.size() > 1) {
final List<String> configStr =
configuration.toMap().entrySet().stream()
.map(e -> e.getKey() + "=" + e.getValue())
.collect(Collectors.toList());
throw new IllegalStateException(
"Multiple compatible client factories found for:\n"
+ String.join("\n", configStr)
+ ".");
}
if (compatibleFactories.isEmpty()) {
throw new IllegalStateException(
"No ClusterClientFactory found. If you were targeting a Yarn cluster, "
+ "please make sure to export the HADOOP_CLASSPATH environment variable or have hadoop in your "
+ "classpath. For more information refer to the \"Deployment\" section of the official "
+ "Apache Flink documentation.");
}
return (ClusterClientFactory<ClusterID>) compatibleFactories.get(0);
}
@Override
public Stream<String> getApplicationModeTargetNames() {
final ServiceLoader<ClusterClientFactory> loader =
ServiceLoader.load(ClusterClientFactory.class);
final List<String> result = new ArrayList<>();
final Iterator<ClusterClientFactory> it = loader.iterator();
while (it.hasNext()) {
try {
final ClusterClientFactory clientFactory = it.next();
final Optional<String> applicationName = clientFactory.getApplicationTargetName();
if (applicationName.isPresent()) {
result.add(applicationName.get());
}
} catch (ServiceConfigurationError e) {
// cannot be loaded, most likely because Hadoop is not
// in the classpath, we can ignore it for now.
}
}
return result.stream();
}
}
|
DefaultClusterClientServiceLoader
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/search/fetch/FetchSubPhase.java
|
{
"start": 1103,
"end": 3376
}
|
class ____ {
private final SearchHit hit;
private final LeafReaderContext readerContext;
private final int docId;
private final Source source;
private final Map<String, List<Object>> loadedFields;
private final RankDoc rankDoc;
public HitContext(
SearchHit hit,
LeafReaderContext context,
int docId,
Map<String, List<Object>> loadedFields,
Source source,
RankDoc rankDoc
) {
this.hit = hit;
this.readerContext = context;
this.docId = docId;
this.source = source;
this.loadedFields = loadedFields;
this.rankDoc = rankDoc;
}
public SearchHit hit() {
return hit;
}
public LeafReader reader() {
return readerContext.reader();
}
public LeafReaderContext readerContext() {
return readerContext;
}
/**
* @return the docId of this hit relative to the leaf reader context
*/
public int docId() {
return docId;
}
/**
* This lookup provides access to the source for the given hit document. Note
* that it should always be set to the correct doc ID and {@link LeafReaderContext}.
*
* In most cases, the hit document's source is loaded eagerly at the start of the
* {@link FetchPhase}. This lookup will contain the preloaded source.
*/
public Source source() {
return source;
}
public Map<String, List<Object>> loadedFields() {
return loadedFields;
}
@Nullable
public RankDoc rankDoc() {
return this.rankDoc;
}
public IndexReader topLevelReader() {
return ReaderUtil.getTopLevelContext(readerContext).reader();
}
}
/**
* Returns a {@link FetchSubPhaseProcessor} for this sub phase.
*
* If nothing should be executed for the provided {@code FetchContext}, then the
* implementation should return {@code null}
*/
FetchSubPhaseProcessor getProcessor(FetchContext fetchContext) throws IOException;
}
|
HitContext
|
java
|
quarkusio__quarkus
|
independent-projects/arc/processor/src/main/java/io/quarkus/arc/processor/AnnotationLiteralGenerator.java
|
{
"start": 12515,
"end": 18467
}
|
interface ____ have the same `annotationType()`
bc.ifNotInstanceOf(other, ClassDesc.of(Annotation.class.getName()), BlockCreator::returnFalse);
Expr thisAnnType = Const.of(classDescOf(literal.annotationClass));
Expr thatAnnType = bc.invokeInterface(MethodDesc.of(Annotation.class, "annotationType", Class.class),
other);
bc.return_(bc.objEquals(thisAnnType, thatAnnType));
return;
}
bc.ifNotInstanceOf(other, classDescOf(literal.annotationClass), BlockCreator::returnFalse);
LocalVar that = bc.localVar("that", bc.cast(other, classDescOf(literal.annotationClass)));
for (MethodInfo annotationMember : literal.annotationMembers()) {
ClassDesc type = classDescOf(annotationMember.returnType());
// for `this` object, can read directly from the field, that's what the method also does
FieldDesc field = FieldDesc.of(cc.type(), annotationMember.name(), type);
Supplier<Expr> thisValue = () -> cc.this_().field(field);
// for the other object, must invoke the method
Supplier<Expr> thatValue = () -> bc.invokeInterface(methodDescOf(annotationMember), that);
// type of the field (in this class) is the same as return type of the method (in both classes)
Expr cmp;
if (type.equals(ConstantDescs.CD_float)) {
Expr thisIntBits = bc.invokeStatic(FLOAT_TO_INT_BITS, thisValue.get());
Expr thatIntBits = bc.invokeStatic(FLOAT_TO_INT_BITS, thatValue.get());
cmp = bc.eq(thisIntBits, thatIntBits);
} else if (type.equals(ConstantDescs.CD_double)) {
Expr thisLongBits = bc.invokeStatic(DOUBLE_TO_LONG_BITS, thisValue.get());
Expr thatLongBits = bc.invokeStatic(DOUBLE_TO_LONG_BITS, thatValue.get());
cmp = bc.eq(thisLongBits, thatLongBits);
} else if (type.isArray()) {
cmp = bc.arrayEquals(thisValue.get(), thatValue.get());
} else if (type.isClassOrInterface()) {
// annotation members are never `null`
cmp = bc.withObject(thisValue.get()).equals_(thatValue.get());
} else {
cmp = bc.eq(thisValue.get(), thatValue.get());
}
bc.ifNot(cmp, BlockCreator::returnFalse);
}
bc.returnTrue();
});
});
}
private static void generateHashCode(ClassCreator cc, AnnotationLiteralClassInfo literal) {
cc.method("hashCode", mc -> {
mc.public_();
mc.returning(int.class);
mc.body(bc -> {
if (literal.annotationMembers().isEmpty()) {
bc.return_(0);
return;
}
LocalVar result = bc.localVar("result", Const.of(0));
for (MethodInfo annotationMember : literal.annotationMembers()) {
Expr memberNameHash = bc.mul(Const.of(127), bc.withObject(Const.of(annotationMember.name())).hashCode_());
ClassDesc type = classDescOf(annotationMember.returnType());
FieldDesc field = FieldDesc.of(cc.type(), annotationMember.name(), type);
FieldVar value = cc.this_().field(field);
Expr memberValueHash;
if (type.isArray()) {
memberValueHash = bc.arrayHashCode(value);
} else if (type.isClassOrInterface()) {
// annotation members are never `null`
memberValueHash = bc.withObject(value).hashCode_();
} else {
memberValueHash = bc.objHashCode(value);
}
Expr xor = bc.xor(memberNameHash, memberValueHash);
bc.addAssign(result, xor);
}
bc.return_(result);
});
});
}
// CDI's `AnnotationLiteral` has special cases for `String` and `Class` values
// and wraps arrays into "{...}" instead of "[...]", but that's not necessary
private static void generateToString(ClassCreator cc, AnnotationLiteralClassInfo literal) {
cc.method("toString", mc -> {
mc.public_();
mc.returning(String.class);
mc.body(bc -> {
if (literal.annotationMembers().isEmpty()) {
// short-circuit for memberless annotations
bc.return_("@" + literal.annotationClass.name() + "()");
return;
}
StringBuilderGen str = StringBuilderGen.ofNew(bc);
str.append("@" + literal.annotationClass.name() + '(');
boolean first = true;
for (MethodInfo annotationMember : literal.annotationMembers()) {
if (first) {
str.append(annotationMember.name() + "=");
} else {
str.append(", " + annotationMember.name() + "=");
}
ClassDesc type = classDescOf(annotationMember.returnType());
FieldDesc field = FieldDesc.of(cc.type(), annotationMember.name(), type);
FieldVar value = cc.this_().field(field);
str.append(type.isArray() ? bc.arrayToString(value) : value);
first = false;
}
str.append(')');
bc.return_(str.toString_());
});
});
}
}
|
and
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/index/codec/tsdb/es819/ES819TSDBDocValuesConsumer.java
|
{
"start": 22851,
"end": 23214
}
|
interface ____ extends Closeable {
void addDoc(BytesRef v) throws IOException;
default void flushData() throws IOException {}
default void writeAddressMetadata(int minLength, int maxLength, int numDocsWithField) throws IOException {}
@Override
default void close() throws IOException {}
}
private final
|
BinaryWriter
|
java
|
quarkusio__quarkus
|
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/buildextension/beans/SynthBeanWithWrongScopeTest.java
|
{
"start": 1689,
"end": 2231
}
|
class ____ implements BeanRegistrar {
@Override
public void register(RegistrationContext context) {
context.configure(BeanifyMe.class).unremovable()
.types(BeanifyMe.class)
// assign annotation that isn't a known scope - this should lead to sensible exception
.scope(NotAScope.class)
.creator(MyBeanCreator.class).done();
}
}
@Target({ TYPE, METHOD, FIELD, PARAMETER })
@Retention(RUNTIME)
public @
|
TestRegistrar
|
java
|
netty__netty
|
transport-sctp/src/main/java/io/netty/channel/sctp/SctpChannelOption.java
|
{
"start": 870,
"end": 2046
}
|
class ____<T> extends ChannelOption<T> {
public static final ChannelOption<Boolean> SCTP_DISABLE_FRAGMENTS =
valueOf(SctpChannelOption.class, "SCTP_DISABLE_FRAGMENTS");
public static final ChannelOption<Boolean> SCTP_EXPLICIT_COMPLETE =
valueOf(SctpChannelOption.class, "SCTP_EXPLICIT_COMPLETE");
public static final ChannelOption<Integer> SCTP_FRAGMENT_INTERLEAVE =
valueOf(SctpChannelOption.class, "SCTP_FRAGMENT_INTERLEAVE");
public static final ChannelOption<InitMaxStreams> SCTP_INIT_MAXSTREAMS =
valueOf(SctpChannelOption.class, "SCTP_INIT_MAXSTREAMS");
public static final ChannelOption<Boolean> SCTP_NODELAY =
valueOf(SctpChannelOption.class, "SCTP_NODELAY");
public static final ChannelOption<SocketAddress> SCTP_PRIMARY_ADDR =
valueOf(SctpChannelOption.class, "SCTP_PRIMARY_ADDR");
public static final ChannelOption<SocketAddress> SCTP_SET_PEER_PRIMARY_ADDR =
valueOf(SctpChannelOption.class, "SCTP_SET_PEER_PRIMARY_ADDR");
@SuppressWarnings({ "unused", "deprecation" })
private SctpChannelOption() {
super(null);
}
}
|
SctpChannelOption
|
java
|
elastic__elasticsearch
|
x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java
|
{
"start": 295096,
"end": 296162
}
|
class ____ extends QuoteIdentifierContext {
public TerminalNode BACKQUOTED_IDENTIFIER() {
return getToken(SqlBaseParser.BACKQUOTED_IDENTIFIER, 0);
}
public BackQuotedIdentifierContext(QuoteIdentifierContext ctx) {
copyFrom(ctx);
}
@Override
public void enterRule(ParseTreeListener listener) {
if (listener instanceof SqlBaseListener) ((SqlBaseListener) listener).enterBackQuotedIdentifier(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if (listener instanceof SqlBaseListener) ((SqlBaseListener) listener).exitBackQuotedIdentifier(this);
}
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if (visitor instanceof SqlBaseVisitor) return ((SqlBaseVisitor<? extends T>) visitor).visitBackQuotedIdentifier(this);
else return visitor.visitChildren(this);
}
}
@SuppressWarnings("CheckReturnValue")
public static
|
BackQuotedIdentifierContext
|
java
|
spring-projects__spring-framework
|
spring-web/src/main/java/org/springframework/web/cors/CorsProcessor.java
|
{
"start": 1430,
"end": 1915
}
|
interface ____ {
/**
* Process a request given a {@code CorsConfiguration}.
* @param configuration the applicable CORS configuration (possibly {@code null})
* @param request the current request
* @param response the current response
* @return {@code false} if the request is rejected, {@code true} otherwise
*/
boolean processRequest(@Nullable CorsConfiguration configuration, HttpServletRequest request,
HttpServletResponse response) throws IOException;
}
|
CorsProcessor
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/repeatedtable/RepeatedTableTest.java
|
{
"start": 4530,
"end": 5366
}
|
class ____ {
private Integer id;
private String name;
private ObjectType objectType;
protected Prop() {
}
public Prop(Integer id, String name, ObjectType objectType) {
this.id = id;
this.name = name;
this.objectType = objectType;
}
@Id
@Column(name = "ID")
public Integer getId() {
return id;
}
protected void setId(Integer id) {
this.id = id;
}
@Column(name = "name")
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
@JoinColumn(name = "OBJ_TYPE_ID")
@ManyToOne
public ObjectType getObjectType() {
return objectType;
}
public void setObjectType(ObjectType objectType) {
this.objectType = objectType;
}
}
@Entity(name = "SimpleType")
@DiscriminatorValue("2")
@Table(name = "DATA_TYPE")
public static
|
Prop
|
java
|
spring-projects__spring-boot
|
module/spring-boot-thymeleaf/src/main/java/org/springframework/boot/thymeleaf/autoconfigure/ThymeleafAutoConfiguration.java
|
{
"start": 6368,
"end": 7885
}
|
class ____ {
@Bean
@ConditionalOnMissingBean(name = "thymeleafViewResolver")
ThymeleafViewResolver thymeleafViewResolver(ThymeleafProperties properties,
SpringTemplateEngine templateEngine) {
ThymeleafViewResolver resolver = new ThymeleafViewResolver();
resolver.setTemplateEngine(templateEngine);
resolver.setCharacterEncoding(properties.getEncoding().name());
resolver.setContentType(
appendCharset(properties.getServlet().getContentType(), resolver.getCharacterEncoding()));
resolver.setProducePartialOutputWhileProcessing(
properties.getServlet().isProducePartialOutputWhileProcessing());
resolver.setExcludedViewNames(properties.getExcludedViewNames());
resolver.setViewNames(properties.getViewNames());
// This resolver acts as a fallback resolver (e.g. like a
// InternalResourceViewResolver) so it needs to have low precedence
resolver.setOrder(Ordered.LOWEST_PRECEDENCE - 5);
resolver.setCache(properties.isCache());
return resolver;
}
private String appendCharset(MimeType type, String charset) {
if (type.getCharset() != null) {
return type.toString();
}
LinkedHashMap<String, String> parameters = new LinkedHashMap<>();
parameters.put("charset", charset);
parameters.putAll(type.getParameters());
return new MimeType(type, parameters).toString();
}
}
}
@Configuration(proxyBeanMethods = false)
@ConditionalOnWebApplication(type = Type.REACTIVE)
static
|
ThymeleafViewResolverConfiguration
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/boot/model/internal/AbstractPropertyHolder.java
|
{
"start": 2203,
"end": 22847
}
|
class ____ implements PropertyHolder {
private final String path;
protected final AbstractPropertyHolder parent;
protected final MetadataBuildingContext context;
private Boolean isInIdClass;
private Map<String, Column[]> holderColumnOverride;
private Map<String, Column[]> currentPropertyColumnOverride;
private Map<String, ColumnTransformer> holderColumnTransformerOverride;
private Map<String, ColumnTransformer> currentPropertyColumnTransformerOverride;
private Map<String, JoinColumn[]> holderJoinColumnOverride;
private Map<String, JoinColumn[]> currentPropertyJoinColumnOverride;
private Map<String, JoinTable> holderJoinTableOverride;
private Map<String, JoinTable> currentPropertyJoinTableOverride;
private Map<String, ForeignKey> holderForeignKeyOverride;
private Map<String, ForeignKey> currentPropertyForeignKeyOverride;
AbstractPropertyHolder(
String path,
PropertyHolder parent,
ClassDetails clazzToProcess,
MetadataBuildingContext context) {
this.path = path;
this.parent = (AbstractPropertyHolder) parent;
this.context = context;
buildHierarchyColumnOverride( clazzToProcess );
}
protected abstract String normalizeCompositePathForLogging(String attributeName);
protected abstract String normalizeCompositePath(String attributeName);
protected abstract AttributeConversionInfo locateAttributeConversionInfo(MemberDetails attributeMember);
protected abstract AttributeConversionInfo locateAttributeConversionInfo(String path);
@Override
public ConverterDescriptor<?,?> resolveAttributeConverterDescriptor(MemberDetails attributeMember, boolean autoApply) {
final var info = locateAttributeConversionInfo( attributeMember );
if ( info != null ) {
if ( info.isConversionDisabled() ) {
return null;
}
else {
try {
return makeAttributeConverterDescriptor( info );
}
catch (Exception e) {
throw buildExceptionFromInstantiationError( info, e );
}
}
}
else {
return autoApply
? context.getMetadataCollector().getConverterRegistry()
.getAttributeConverterAutoApplyHandler()
.findAutoApplyConverterForAttribute( attributeMember, context )
: null;
}
}
protected IllegalStateException buildExceptionFromInstantiationError(AttributeConversionInfo info, Exception e) {
if ( void.class.equals( info.getConverterClass() ) ) {
// the user forgot to set @Convert.converter
// we already know it's not a @Convert.disableConversion
return new IllegalStateException(
"Unable to instantiate AttributeConverter: you left @Convert.converter to its default value void.",
e
);
}
else {
return new IllegalStateException(
String.format(
"Unable to instantiate AttributeConverter [%s]",
info.getConverterClass().getName()
),
e
);
}
}
protected ConverterDescriptor<?,?> makeAttributeConverterDescriptor(AttributeConversionInfo conversion) {
try {
return ConverterDescriptors.of( conversion.getConverterClass(),
null, false,
context.getBootstrapContext().getClassmateContext() );
}
catch (Exception e) {
throw new AnnotationException( "Unable to create AttributeConverter instance", e );
}
}
@Override
public boolean isInIdClass() {
if ( isInIdClass != null ) {
return isInIdClass;
}
else if ( parent != null ) {
return parent.isInIdClass();
}
else {
return false;
}
}
@Override
public void setInIdClass(Boolean isInIdClass) {
this.isInIdClass = isInIdClass;
}
@Override
public String getPath() {
return path;
}
/**
* Get the mappings
*
* @return The mappings
*/
protected MetadataBuildingContext getContext() {
return context;
}
protected ModelsContext getSourceModelContext() {
return getContext().getBootstrapContext().getModelsContext();
}
/**
* Set the property to be processed. property can be null
*
* @param attributeMember The property
*/
protected void setCurrentProperty(MemberDetails attributeMember) {
// todo (jpa32) : some of this (association override handling esp) does the same work multiple times - consolidate
if ( attributeMember == null ) {
currentPropertyColumnOverride = null;
currentPropertyColumnTransformerOverride = null;
currentPropertyJoinColumnOverride = null;
currentPropertyJoinTableOverride = null;
currentPropertyForeignKeyOverride = null;
}
else {
currentPropertyColumnOverride = buildColumnOverride( attributeMember, getPath(), context );
if ( currentPropertyColumnOverride.isEmpty() ) {
currentPropertyColumnOverride = null;
}
currentPropertyColumnTransformerOverride = buildColumnTransformerOverride( attributeMember, context );
if ( currentPropertyColumnTransformerOverride.isEmpty() ) {
currentPropertyColumnTransformerOverride = null;
}
currentPropertyJoinColumnOverride = buildJoinColumnOverride( attributeMember, getPath(), context );
if ( currentPropertyJoinColumnOverride.isEmpty() ) {
currentPropertyJoinColumnOverride = null;
}
currentPropertyJoinTableOverride = buildJoinTableOverride( attributeMember, getPath(), context );
if ( currentPropertyJoinTableOverride.isEmpty() ) {
currentPropertyJoinTableOverride = null;
}
currentPropertyForeignKeyOverride = buildForeignKeyOverride( attributeMember, getPath(), context );
if ( currentPropertyForeignKeyOverride.isEmpty() ) {
currentPropertyForeignKeyOverride = null;
}
}
}
/**
* Get column overriding, property first, then parent, then holder
* replace the placeholder 'collection&&element' with nothing
* <p>
* These rules are here to support both JPA 2 and legacy overriding rules.
*/
@Override
public Column[] getOverriddenColumn(String propertyName) {
final var overriddenColumn = getExactOverriddenColumn( propertyName );
// support for non-map collections where no prefix is needed
return overriddenColumn == null && propertyName.contains( ".{element}." )
? getExactOverriddenColumn( propertyName.replace( ".{element}.", "." ) )
: overriddenColumn;
}
@Override
public ColumnTransformer getOverriddenColumnTransformer(String logicalColumnName) {
ColumnTransformer result = null;
if ( parent != null ) {
result = parent.getOverriddenColumnTransformer( logicalColumnName );
}
if ( result == null && currentPropertyColumnTransformerOverride != null ) {
result = currentPropertyColumnTransformerOverride.get( logicalColumnName );
}
if ( result == null && holderColumnTransformerOverride != null ) {
result = holderColumnTransformerOverride.get( logicalColumnName );
}
return result;
}
/**
* Get column overriding, property first, then parent, then holder
* find the overridden rules from the exact property name.
*/
private Column[] getExactOverriddenColumn(String propertyName) {
Column[] result = null;
if ( parent != null ) {
result = parent.getExactOverriddenColumn( propertyName );
}
if ( result == null && currentPropertyColumnOverride != null ) {
result = currentPropertyColumnOverride.get( propertyName );
}
if ( result == null && holderColumnOverride != null ) {
result = holderColumnOverride.get( propertyName );
}
return result;
}
/**
* Get column overriding, property first, then parent, then holder
* replace the placeholder 'collection&&element' with nothing
* <p>
* These rules are here to support both JPA 2 and legacy overriding rules.
*/
@Override
public JoinColumn[] getOverriddenJoinColumn(String propertyName) {
final JoinColumn[] result = getExactOverriddenJoinColumn( propertyName );
if ( result == null && propertyName.contains( ".{element}." ) ) {
//support for non map collections where no prefix is needed
//TODO cache the underlying regexp
return getExactOverriddenJoinColumn( propertyName.replace( ".{element}.", "." ) );
}
else {
return result;
}
}
/**
* Get column overriding, property first, then parent, then holder
*/
private JoinColumn[] getExactOverriddenJoinColumn(String propertyName) {
JoinColumn[] result = null;
if ( parent != null ) {
result = parent.getExactOverriddenJoinColumn( propertyName );
}
if ( result == null && currentPropertyJoinColumnOverride != null ) {
result = currentPropertyJoinColumnOverride.get( propertyName );
}
if ( result == null && holderJoinColumnOverride != null ) {
result = holderJoinColumnOverride.get( propertyName );
}
return result;
}
@Override
public ForeignKey getOverriddenForeignKey(String propertyName) {
final ForeignKey result = getExactOverriddenForeignKey( propertyName );
if ( result == null && propertyName.contains( ".{element}." ) ) {
//support for non map collections where no prefix is needed
//TODO cache the underlying regexp
return getExactOverriddenForeignKey( propertyName.replace( ".{element}.", "." ) );
}
return result;
}
private ForeignKey getExactOverriddenForeignKey(String propertyName) {
ForeignKey result = null;
if ( parent != null ) {
result = parent.getExactOverriddenForeignKey( propertyName );
}
if ( result == null && currentPropertyForeignKeyOverride != null ) {
result = currentPropertyForeignKeyOverride.get( propertyName );
}
if ( result == null && holderForeignKeyOverride != null ) {
result = holderForeignKeyOverride.get( propertyName );
}
return result;
}
/**
* Get column overriding, property first, then parent, then holder
* replace the placeholder 'collection&&element' with nothing
* <p>
* These rules are here to support both JPA 2 and legacy overriding rules.
*/
@Override
public JoinTable getJoinTable(MemberDetails attributeMember) {
final String propertyName = qualify( getPath(), attributeMember.getName() );
final JoinTable result = getOverriddenJoinTable( propertyName );
return result == null
? attributeMember.getDirectAnnotationUsage( JoinTable.class )
: result;
}
/**
* Get column overriding, property first, then parent, then holder
* replace the placeholder 'collection&&element' with nothing
*
* These rules are here to support both JPA 2 and legacy overriding rules.
*/
public JoinTable getOverriddenJoinTable(String propertyName) {
final JoinTable result = getExactOverriddenJoinTable( propertyName );
if ( result == null && propertyName.contains( ".{element}." ) ) {
//support for non map collections where no prefix is needed
//TODO cache the underlying regexp
return getExactOverriddenJoinTable( propertyName.replace( ".{element}.", "." ) );
}
else {
return result;
}
}
/**
* Get column overriding, property first, then parent, then holder
*/
private JoinTable getExactOverriddenJoinTable(String propertyName) {
JoinTable override = null;
if ( parent != null ) {
override = parent.getExactOverriddenJoinTable( propertyName );
}
if ( override == null && currentPropertyJoinTableOverride != null ) {
override = currentPropertyJoinTableOverride.get( propertyName );
}
if ( override == null && holderJoinTableOverride != null ) {
override = holderJoinTableOverride.get( propertyName );
}
return override;
}
private void buildHierarchyColumnOverride(ClassDetails element) {
ClassDetails current = element;
Map<String, Column[]> columnOverride = new HashMap<>();
Map<String, ColumnTransformer> columnTransformerOverride = new HashMap<>();
Map<String, JoinColumn[]> joinColumnOverride = new HashMap<>();
Map<String, JoinTable> joinTableOverride = new HashMap<>();
Map<String, ForeignKey> foreignKeyOverride = new HashMap<>();
while ( current != null && !ClassDetails.OBJECT_CLASS_DETAILS.equals( current ) ) {
if ( current.hasDirectAnnotationUsage( Entity.class )
|| current.hasDirectAnnotationUsage( MappedSuperclass.class )
|| current.hasDirectAnnotationUsage( Embeddable.class ) ) {
//FIXME is embeddable override?
Map<String, Column[]> currentOverride = buildColumnOverride( current, getPath(), context );
Map<String, ColumnTransformer> currentTransformerOverride = buildColumnTransformerOverride( current, context );
Map<String, JoinColumn[]> currentJoinOverride = buildJoinColumnOverride( current, getPath(), context );
Map<String, JoinTable> currentJoinTableOverride = buildJoinTableOverride( current, getPath(), context );
Map<String, ForeignKey> currentForeignKeyOverride = buildForeignKeyOverride( current, getPath(), context );
currentOverride.putAll( columnOverride ); //subclasses have precedence over superclasses
currentTransformerOverride.putAll( columnTransformerOverride ); //subclasses have precedence over superclasses
currentJoinOverride.putAll( joinColumnOverride ); //subclasses have precedence over superclasses
currentJoinTableOverride.putAll( joinTableOverride ); //subclasses have precedence over superclasses
currentForeignKeyOverride.putAll( foreignKeyOverride ); //subclasses have precedence over superclasses
columnOverride = currentOverride;
columnTransformerOverride = currentTransformerOverride;
joinColumnOverride = currentJoinOverride;
joinTableOverride = currentJoinTableOverride;
foreignKeyOverride = currentForeignKeyOverride;
}
current = current.getSuperClass();
}
holderColumnOverride = !columnOverride.isEmpty() ? columnOverride : null;
holderColumnTransformerOverride = !columnTransformerOverride.isEmpty() ? columnTransformerOverride : null;
holderJoinColumnOverride = !joinColumnOverride.isEmpty() ? joinColumnOverride : null;
holderJoinTableOverride = !joinTableOverride.isEmpty() ? joinTableOverride : null;
holderForeignKeyOverride = !foreignKeyOverride.isEmpty() ? foreignKeyOverride : null;
}
private static Map<String, Column[]> buildColumnOverride(
AnnotationTarget element,
String path,
MetadataBuildingContext context) {
final Map<String,Column[]> result = new HashMap<>();
if ( element == null ) {
return result;
}
final var modelContext = context.getBootstrapContext().getModelsContext();
final Map<String, List<Column>> columnOverrideMap = new HashMap<>();
final var overrides = element.getRepeatedAnnotationUsages( AttributeOverride.class, modelContext );
if ( isNotEmpty( overrides ) ) {
for ( AttributeOverride depAttr : overrides ) {
final String qualifiedName = qualify( path, depAttr.name() );
final Column column = depAttr.column();
if ( columnOverrideMap.containsKey( qualifiedName ) ) {
// already an entry, just add to that List
columnOverrideMap.get( qualifiedName ).add( column );
}
else {
// not yet an entry, create the list and add
final List<Column> list = new ArrayList<>();
list.add( column );
columnOverrideMap.put( qualifiedName, list );
}
}
}
else if ( useColumnForTimeZoneStorage( element, context ) ) {
final var column = createTemporalColumn( element, path, context );
if ( isOffsetTimeClass( element ) ) {
columnOverrideMap.put(
path + "." + OffsetTimeCompositeUserType.LOCAL_TIME_NAME,
List.of( column )
);
}
else {
columnOverrideMap.put(
path + "." + AbstractTimeZoneStorageCompositeUserType.INSTANT_NAME,
List.of( column )
);
}
final var offsetColumn = createTimeZoneColumn( element, column, context );
columnOverrideMap.put(
path + "." + AbstractTimeZoneStorageCompositeUserType.ZONE_OFFSET_NAME,
List.of( offsetColumn )
);
}
columnOverrideMap.forEach( (name, columns) -> {
result.put( name, columns.toArray(new Column[0]) );
} );
return result;
}
private static Column createTimeZoneColumn(
AnnotationTarget element,
Column column,
MetadataBuildingContext context) {
final var timeZoneColumn = element.getDirectAnnotationUsage( TimeZoneColumn.class );
final ColumnJpaAnnotation created =
JpaAnnotations.COLUMN.createUsage( context.getBootstrapContext().getModelsContext() );
final String columnName =
timeZoneColumn != null
? timeZoneColumn.name()
: column.name() + "_tz";
created.name( columnName );
created.nullable( column.nullable() );
if ( timeZoneColumn != null ) {
created.options( timeZoneColumn.options() );
created.comment( timeZoneColumn.comment() );
created.table( timeZoneColumn.table() );
created.insertable( timeZoneColumn.insertable() );
created.updatable( timeZoneColumn.updatable() );
created.columnDefinition( timeZoneColumn.columnDefinition() );
}
else {
created.table( column.table() );
created.insertable( column.insertable() );
created.updatable( column.updatable() );
created.columnDefinition( column.columnDefinition() );
created.options( column.options() );
created.comment( column.comment() );
}
return created;
}
private static Column createTemporalColumn(
AnnotationTarget element,
String path,
MetadataBuildingContext context) {
int precision;
int secondPrecision;
final var annotatedColumn = element.getDirectAnnotationUsage( Column.class );
if ( annotatedColumn != null ) {
if ( isNotBlank( annotatedColumn.name() ) ) {
return annotatedColumn;
}
precision = annotatedColumn.precision();
secondPrecision = annotatedColumn.secondPrecision();
}
else {
precision = 0;
secondPrecision = -1;
}
// Base the name of the synthetic dateTime field on the name of the original attribute
final Identifier implicitName = context.getObjectNameNormalizer().normalizeIdentifierQuoting(
context.getBuildingOptions().getImplicitNamingStrategy().determineBasicColumnName(
new ImplicitBasicColumnNameSource() {
final AttributePath attributePath = AttributePath.parse(path);
@Override
public AttributePath getAttributePath() {
return attributePath;
}
@Override
public boolean isCollectionElement() {
return false;
}
@Override
public MetadataBuildingContext getBuildingContext() {
return context;
}
}
)
);
final ColumnJpaAnnotation created =
JpaAnnotations.COLUMN.createUsage( context.getBootstrapContext().getModelsContext() );
if ( StringHelper.isNotEmpty( implicitName.getText() ) ) {
created.name( implicitName.getText() );
}
created.precision( precision );
created.secondPrecision( secondPrecision );
return created;
}
private static Map<String, ColumnTransformer> buildColumnTransformerOverride(AnnotationTarget element, MetadataBuildingContext context) {
final var sourceModelContext = context.getBootstrapContext().getModelsContext();
final Map<String, ColumnTransformer> columnOverride = new HashMap<>();
if ( element != null ) {
element.forEachAnnotationUsage( ColumnTransformer.class, sourceModelContext, (usage) -> {
columnOverride.put( usage.forColumn(), usage );
} );
}
return columnOverride;
}
private static Map<String, JoinColumn[]> buildJoinColumnOverride(AnnotationTarget element, String path, MetadataBuildingContext context) {
final Map<String, JoinColumn[]> columnOverride = new HashMap<>();
if ( element != null ) {
for ( var override : buildAssociationOverrides( element, path, context ) ) {
columnOverride.put( qualify( path, override.name() ), override.joinColumns() );
}
}
return columnOverride;
}
private static Map<String, ForeignKey> buildForeignKeyOverride(AnnotationTarget element, String path, MetadataBuildingContext context) {
final Map<String, ForeignKey> foreignKeyOverride = new HashMap<>();
if ( element != null ) {
for ( var override : buildAssociationOverrides( element, path, context ) ) {
foreignKeyOverride.put( qualify( path, override.name() ), override.foreignKey() );
}
}
return foreignKeyOverride;
}
private static AssociationOverride[] buildAssociationOverrides(AnnotationTarget element, String path, MetadataBuildingContext context) {
return element.getRepeatedAnnotationUsages( AssociationOverride.class, context.getBootstrapContext().getModelsContext() );
}
private static Map<String, JoinTable> buildJoinTableOverride(AnnotationTarget element, String path, MetadataBuildingContext context) {
final Map<String, JoinTable> result = new HashMap<>();
if ( element != null ) {
for ( var override : buildAssociationOverrides( element, path, context ) ) {
if ( isEmpty( override.joinColumns() ) ) {
result.put( qualify( path, override.name() ), override.joinTable() );
}
}
}
return result;
}
@Override
public void setParentProperty(String parentProperty) {
throw new AssertionFailure( "Setting the parent property to a non component" );
}
}
|
AbstractPropertyHolder
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/fetchprofiles/join/Enrollment.java
|
{
"start": 187,
"end": 994
}
|
class ____ {
private Integer id;
private CourseOffering offering;
private Student student;
private int finalGrade;
public Enrollment() {
}
public Enrollment(Integer id, CourseOffering offering, Student student) {
this.id = id;
this.offering = offering;
this.student = student;
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public CourseOffering getOffering() {
return offering;
}
public void setOffering(CourseOffering offering) {
this.offering = offering;
}
public Student getStudent() {
return student;
}
public void setStudent(Student student) {
this.student = student;
}
public int getFinalGrade() {
return finalGrade;
}
public void setFinalGrade(int finalGrade) {
this.finalGrade = finalGrade;
}
}
|
Enrollment
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10DoubleEvaluator.java
|
{
"start": 4163,
"end": 4741
}
|
class ____ implements EvalOperator.ExpressionEvaluator.Factory {
private final Source source;
private final EvalOperator.ExpressionEvaluator.Factory val;
public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) {
this.source = source;
this.val = val;
}
@Override
public Log10DoubleEvaluator get(DriverContext context) {
return new Log10DoubleEvaluator(source, val.get(context), context);
}
@Override
public String toString() {
return "Log10DoubleEvaluator[" + "val=" + val + "]";
}
}
}
|
Factory
|
java
|
apache__kafka
|
streams/src/main/java/org/apache/kafka/streams/processor/api/FixedKeyRecord.java
|
{
"start": 1256,
"end": 1450
}
|
class ____ all the data attributes of a record: the key and value, but
* also the timestamp of the record and any record headers.
* Though key is not allowed to be changes.
*
* This
|
encapsulates
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/DataPartitioning.java
|
{
"start": 706,
"end": 2770
}
|
enum ____ {
/**
* Automatically select the data partitioning based on the query and index.
* Usually that's {@link #SEGMENT}, but for small indices it's {@link #SHARD}.
* When the additional overhead from {@link #DOC} is fairly low then it'll
* pick {@link #DOC}.
*/
AUTO,
/**
* Make one partition per shard. This is generally the slowest option, but it
* has the lowest CPU overhead.
*/
SHARD,
/**
* Partition on segment boundaries, this doesn't allow forking to as many CPUs
* as {@link #DOC} but it has much lower overhead.
* <p>
* It packs segments smaller than {@link LuceneSliceQueue#MAX_DOCS_PER_SLICE}
* docs together into a partition. Larger segments get their own partition.
* Each slice contains no more than {@link LuceneSliceQueue#MAX_SEGMENTS_PER_SLICE}.
*/
SEGMENT,
/**
* Partitions into dynamic-sized slices to improve CPU utilization while keeping overhead low.
* This approach is more flexible than {@link #SEGMENT} and works as follows:
*
* <ol>
* <li>The slice size starts from a desired size based on {@code task_concurrency} but is capped
* at around {@link LuceneSliceQueue#MAX_DOCS_PER_SLICE}. This prevents poor CPU usage when
* matching documents are clustered together.</li>
* <li>For small and medium segments (less than five times the desired slice size), it uses a
* slightly different {@link #SEGMENT} strategy, which also splits segments that are larger
* than the desired size. See {@link org.apache.lucene.search.IndexSearcher#slices(List, int, int, boolean)}.</li>
* <li>For very large segments, multiple segments are not combined into a single slice. This allows
* one driver to process an entire large segment until other drivers steal the work after finishing
* their own tasks. See {@link LuceneSliceQueue#nextSlice(LuceneSlice)}.</li>
* </ol>
*/
DOC;
@FunctionalInterface
public
|
DataPartitioning
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/nullness/ReturnMissingNullableTest.java
|
{
"start": 30916,
"end": 31391
}
|
class ____ {
public String getMessage() {
return null;
}
}
""")
.doTest();
}
@Test
public void negativeCases_typeVariableUsage() {
createCompilationTestHelper()
.addSourceLines(
"com/google/errorprone/bugpatterns/nullness/LiteralNullReturnTest.java",
"""
package com.google.errorprone.bugpatterns.nullness;
public
|
LiteralNullReturnTest
|
java
|
apache__flink
|
flink-state-backends/flink-statebackend-rocksdb/src/test/java/org/apache/flink/state/rocksdb/RocksDBStateDownloaderTest.java
|
{
"start": 10610,
"end": 10766
}
|
class ____ extends IOException {
SpecifiedException(String message) {
super(message);
}
}
private static
|
SpecifiedException
|
java
|
mapstruct__mapstruct
|
core/src/main/java/org/mapstruct/factory/package-info.java
|
{
"start": 221,
"end": 422
}
|
class ____ the retrieval of mapper instances if no
* explicit component model such as CDI is configured via {@link org.mapstruct.Mapper#componentModel()}.
* </p>
*
*/
package org.mapstruct.factory;
|
for
|
java
|
dropwizard__dropwizard
|
dropwizard-logging/src/main/java/io/dropwizard/logging/common/ExternalLoggingFactory.java
|
{
"start": 269,
"end": 580
}
|
class ____ implements LoggingFactory {
@Override
public void configure(MetricRegistry metricRegistry, String name) {
// Do nothing
}
@Override
public void stop() {
// Do nothing
}
@Override
public void reset() {
// Do nothing
}
}
|
ExternalLoggingFactory
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/context/annotation/configuration/Spr10668Tests.java
|
{
"start": 1528,
"end": 1669
}
|
class ____ extends ParentConfig {
@Bean
public MyComponentImpl myComponent() {
return new MyComponentImpl();
}
}
public
|
ChildConfig
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/scheduling/annotation/ScheduledAnnotationBeanPostProcessorTests.java
|
{
"start": 36489,
"end": 36611
}
|
class ____ {
@Scheduled(fixedRate = 5, timeUnit = TimeUnit.SECONDS)
void fixedRate() {
}
}
static
|
FixedRateInSeconds
|
java
|
apache__camel
|
components/camel-spring-parent/camel-spring-ws/src/main/java/org/apache/camel/component/spring/ws/converter/StringSourceConverter.java
|
{
"start": 1703,
"end": 2494
}
|
class ____ {
private StringSourceConverter() {
}
/**
* Converts a Spring-WS {@link org.springframework.xml.transform.StringSource} to a Camel {@link StringSource}
*/
@Converter
public static StringSource toStringSourceFromSpring(org.springframework.xml.transform.StringSource springStringSource) {
return new StringSource(springStringSource.toString());
}
/**
* Converts a Camel {@link StringSource} to a Spring-WS {@link org.springframework.xml.transform.StringSource}
*/
@Converter
public static org.springframework.xml.transform.StringSource toStringSourceFromCamel(StringSource camelStringSource) {
return new org.springframework.xml.transform.StringSource(camelStringSource.getText());
}
}
|
StringSourceConverter
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java
|
{
"start": 4040,
"end": 4411
}
|
class ____ extends AbstractPointGeometryFieldMapper<GeoPoint> {
public static final String CONTENT_TYPE = "geo_point";
private static Builder builder(FieldMapper in) {
return toType(in).builder;
}
private static GeoPointFieldMapper toType(FieldMapper in) {
return (GeoPointFieldMapper) in;
}
public static final
|
GeoPointFieldMapper
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/parser/deser/generic/GenericArrayTest3.java
|
{
"start": 874,
"end": 949
}
|
class ____ extends A<Number, String> {
}
public static
|
VO
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/recursive/comparison/legacy/RecursiveComparisonAssert_isEqualTo_comparingOnlyFields_Test.java
|
{
"start": 9567,
"end": 15990
}
|
class ____ {
final int a;
A2(int a) {
this.a = a;
}
}
@Test
public void should_fix_2610() {
// GIVEN
A1 actual = new A1(1, 2);
A2 expected = new A2(2);
recursiveComparisonConfiguration.compareOnlyFields("a");
// WHEN/THEN
ComparisonDifference difference = javaTypeDiff("a", actual.a, expected.a);
compareRecursivelyFailsWithDifferences(actual, expected, difference);
}
@ParameterizedTest(name = "actual={0} / expected={1}")
@MethodSource
void should_fail_when_actual_is_a_container_whose_elements_differs_from_expected_on_compared_fields(Object actual,
Object expected,
ComparisonDifference difference) {
// GIVEN
recursiveComparisonConfiguration.compareOnlyFields("name", "subject");
// WHEN/THEN
compareRecursivelyFailsWithDifferences(actual, expected, difference);
}
private static Stream<Arguments> should_fail_when_actual_is_a_container_whose_elements_differs_from_expected_on_compared_fields() {
Student john1 = new Student("John", "math", 1);
Student john2 = new Student("John", "math", 1);
Student rohit = new Student("Rohit", "english", 2);
Student rohyt = new Student("Rohyt", "english", 2);
ComparisonDifference difference = javaTypeDiff("[1].name", "Rohit", "Rohyt");
return Stream.of(arguments(list(john1, rohit), list(john2, rohyt), difference),
arguments(array(john1, rohit), array(john2, rohyt), difference),
arguments(set(john1, rohit), set(john2, rohyt), difference));
}
// #3129
@ParameterizedTest(name = "{2}: actual={0} / expected={1}")
@MethodSource
void should_fail_when_non_existent_fields_specified(Object actual, Object expected, String[] fieldNamesToCompare,
String unknownFields) {
// GIVEN
recursiveComparisonConfiguration.compareOnlyFields(fieldNamesToCompare);
// WHEN
IllegalArgumentException iae = catchIllegalArgumentException(() -> assertThat(actual).usingRecursiveComparison(recursiveComparisonConfiguration)
.comparingOnlyFields(fieldNamesToCompare)
.isEqualTo(expected));
// THEN
then(iae).hasMessage("The following fields don't exist: " + unknownFields);
}
private static Stream<Arguments> should_fail_when_non_existent_fields_specified() {
Person john = new Person("John");
Person alice = new Person("Alice");
Person jack = new Person("Jack");
Person joan = new Person("Joan");
john.neighbour = jack;
alice.neighbour = joan;
jack.neighbour = john;
joan.neighbour = alice;
FriendlyPerson sherlockHolmes = friend("Sherlock Holmes");
FriendlyPerson drWatson = friend("Dr. John Watson");
FriendlyPerson mollyHooper = friend("Molly Hooper");
sherlockHolmes.friends.add(drWatson);
sherlockHolmes.friends.add(mollyHooper);
drWatson.friends.add(mollyHooper);
drWatson.friends.add(sherlockHolmes);
return Stream.of(arguments(john, alice, array("naame"), "{naame}"),
arguments(john, alice, array("name", "neighbour", "number"), "{number}"),
arguments(john, alice, array("neighbor"), "{neighbor}"),
arguments(john, alice, array("neighbour.neighbor.name"), "{neighbor in <neighbour.neighbor.name>}"),
arguments(sherlockHolmes, drWatson, array("friends.other"), "{other in <friends.other>}"),
arguments(john, alice, array("neighbour.neighbour.name", "neighbour.neighbour.number"),
"{number in <neighbour.neighbour.number>}"));
}
@Test
void should_fail_when_actual_differs_from_expected_lists_on_compared_fields() {
// GIVEN
Person john = new Person("John");
Person alice = new Person("Alice");
Person jack = new Person("Jack");
Person joan = new Person("Joan");
john.neighbour = jack;
alice.neighbour = joan;
jack.neighbour = john;
joan.neighbour = alice;
List<Person> actual = list(john, alice);
List<Person> expected = list(jack, joan);
recursiveComparisonConfiguration.compareOnlyFields("neighbour.neighbour.name");
// WHEN/THEN
ComparisonDifference difference1 = javaTypeDiff("[0].neighbour.neighbour.name", "John", "Jack");
ComparisonDifference difference2 = javaTypeDiff("[1].neighbour.neighbour.name", "Alice", "Joan");
compareRecursivelyFailsWithDifferences(actual, expected, difference1, difference2);
}
// #3129
@Test
void should_pass_when_fields_are_nested() {
// GIVEN
Person john = new Person("John");
Person alice = new Person("Alice");
Person jack = new Person("Jack");
Person joan = new Person("Joan");
john.neighbour = jack;
alice.neighbour = joan;
jack.neighbour = jack;
joan.neighbour = jack;
// WHEN/THEN
then(john).usingRecursiveComparison(recursiveComparisonConfiguration)
.comparingOnlyFields("neighbour.neighbour.name")
.isEqualTo(alice);
}
@Test
void should_pass_with_cycles() {
// GIVEN
Person john = new Person("John");
Person alice = new Person("Alice");
Person jack = new Person("Jack");
Person joan = new Person("Joan");
john.neighbour = jack;
alice.neighbour = joan;
jack.neighbour = jack;
joan.neighbour = jack;
// WHEN/THEN
then(john).usingRecursiveComparison(recursiveComparisonConfiguration)
.comparingOnlyFields("neighbour.neighbour.neighbour.neighbour")
.isEqualTo(alice);
}
@Test
void cannot_report_unknown_compared_fields_if_parent_object_is_null() {
// GIVEN
Person john = new Person("John");
Person alice = new Person("Alice");
// WHEN/THEN
// badField is not detected as an unknown field since john.neighbour is null
// neighbour fields are compared and match since they are both null
then(john).usingRecursiveComparison(recursiveComparisonConfiguration)
.comparingOnlyFields("neighbour.badField")
.isEqualTo(alice);
}
static
|
A2
|
java
|
spring-projects__spring-framework
|
spring-aop/src/main/java/org/springframework/aop/interceptor/AbstractMonitoringInterceptor.java
|
{
"start": 1437,
"end": 2503
}
|
class ____ extends AbstractTraceInterceptor {
private String prefix = "";
private String suffix = "";
private boolean logTargetClassInvocation = false;
/**
* Set the text that will get appended to the trace data.
* <p>Default is none.
*/
public void setPrefix(@Nullable String prefix) {
this.prefix = (prefix != null ? prefix : "");
}
/**
* Return the text that will get appended to the trace data.
*/
protected String getPrefix() {
return this.prefix;
}
/**
* Set the text that will get prepended to the trace data.
* <p>Default is none.
*/
public void setSuffix(@Nullable String suffix) {
this.suffix = (suffix != null ? suffix : "");
}
/**
* Return the text that will get prepended to the trace data.
*/
protected String getSuffix() {
return this.suffix;
}
/**
* Set whether to log the invocation on the target class, if applicable
* (i.e. if the method is actually delegated to the target class).
* <p>Default is "false", logging the invocation based on the proxy
* interface/
|
AbstractMonitoringInterceptor
|
java
|
apache__maven
|
its/core-it-support/core-it-plugins/maven-it-plugin-uses-wagon/src/main/java/org/apache/maven/plugin/coreit/LookupWagonMojo.java
|
{
"start": 1823,
"end": 3832
}
|
class ____ extends AbstractMojo {
/**
* The Wagon manager used to retrieve wagon providers.
*/
@Component
private WagonManager wagonManager;
/**
* The path to the properties file used to track the results of the wagon lookups.
*/
@Parameter(property = "wagon.outputFile")
private File outputFile;
/**
* The URLs for which to look up wagons.
*/
@Parameter
private String[] urls;
/**
* Runs this mojo.
*
* @throws MojoFailureException If the attached file has not been set.
*/
public void execute() throws MojoExecutionException, MojoFailureException {
Properties loaderProperties = new Properties();
if (urls != null) {
for (int i = 0; i < urls.length; i++) {
String url = urls[i];
getLog().info("[MAVEN-CORE-IT-LOG] Looking up wagon for URL " + url);
try {
Repository repo = new Repository("repo-" + i, url);
Wagon wagon = wagonManager.getWagon(repo);
getLog().info("[MAVEN-CORE-IT-LOG] " + wagon);
loaderProperties.setProperty(url + ".hash", Integer.toString(System.identityHashCode(wagon)));
loaderProperties.setProperty(
url + ".class", wagon.getClass().getName());
} catch (Exception e) {
getLog().warn("[MAVEN-CORE-IT-LOG] Failed to look up wagon for URL " + url, e);
}
}
}
getLog().info("[MAVEN-CORE-IT-LOG] Creating output file " + outputFile);
outputFile.getParentFile().mkdirs();
try (OutputStream out = new FileOutputStream(outputFile)) {
loaderProperties.store(out, "MAVEN-CORE-IT-LOG");
} catch (IOException e) {
throw new MojoExecutionException(e);
}
getLog().info("[MAVEN-CORE-IT-LOG] Created output file " + outputFile);
}
}
|
LookupWagonMojo
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/event/internal/ReattachVisitor.java
|
{
"start": 686,
"end": 3114
}
|
class ____ extends ProxyVisitor {
private final Object ownerIdentifier;
private final Object owner;
public ReattachVisitor(EventSource session, Object ownerIdentifier, Object owner) {
super( session );
this.ownerIdentifier = ownerIdentifier;
this.owner = owner;
}
/**
* Retrieve the identifier of the entity being visited.
*
* @return The entity's identifier.
*/
final Object getOwnerIdentifier() {
return ownerIdentifier;
}
/**
* Retrieve the entity being visited.
*
* @return The entity.
*/
final Object getOwner() {
return owner;
}
/**
* {@inheritDoc}
*/
@Override
Object processComponent(Object component, CompositeType componentType) throws HibernateException {
final Type[] types = componentType.getSubtypes();
if ( component == null ) {
processValues( new Object[types.length], types );
}
else {
super.processComponent( component, componentType );
}
return null;
}
/**
* Schedules a collection for deletion.
*
* @param role The persister representing the collection to be removed.
* @param collectionKey The collection key (differs from owner-id in the case of property-refs).
* @param source The session from which the request originated.
*
*/
void removeCollection(CollectionPersister role, Object collectionKey, EventSource source)
throws HibernateException {
if ( EVENT_LISTENER_LOGGER.isTraceEnabled() ) {
EVENT_LISTENER_LOGGER.collectionDereferencedWhileTransient(
collectionInfoString( role, ownerIdentifier, source.getFactory() )
);
}
source.getActionQueue()
.addAction( new CollectionRemoveAction( owner, role, collectionKey, false, source ) );
}
/**
* This version is slightly different for say
* {@link org.hibernate.type.CollectionType#getKeyOfOwner} in that here we
* need to assume that the owner is not yet associated with the session,
* and thus we cannot rely on the owner's EntityEntry snapshot...
*
* @param role The persister for the collection role being processed.
*
* @return The value from the owner that identifies the grouping into the collection
*/
final Object extractCollectionKeyFromOwner(CollectionPersister role) {
final var collectionType = role.getCollectionType();
return collectionType.useLHSPrimaryKey()
? ownerIdentifier :
role.getOwnerEntityPersister()
.getPropertyValue( owner, collectionType.getLHSPropertyName() );
}
}
|
ReattachVisitor
|
java
|
apache__maven
|
impl/maven-impl/src/main/java/org/apache/maven/impl/di/SessionScope.java
|
{
"start": 1514,
"end": 4690
}
|
class ____ {
private final Map<Key<?>, CachingProvider<?>> provided = new ConcurrentHashMap<>();
public <T> void seed(Class<T> clazz, Supplier<T> value) {
provided.put(Key.of(clazz), new CachingProvider<>(value));
}
@SuppressWarnings("unchecked")
public <T> Supplier<T> scope(Key<T> key, Supplier<T> unscoped) {
Supplier<?> provider = provided.computeIfAbsent(key, k -> new CachingProvider<>(unscoped));
return (Supplier<T>) provider;
}
public Collection<CachingProvider<?>> providers() {
return provided.values();
}
}
protected final List<ScopeState> values = new CopyOnWriteArrayList<>();
public void enter() {
values.add(0, new ScopeState());
}
protected ScopeState getScopeState() {
if (values.isEmpty()) {
throw new OutOfScopeException("Cannot access session scope outside of a scoping block");
}
return values.get(0);
}
public void exit() {
if (values.isEmpty()) {
throw new IllegalStateException();
}
values.remove(0);
}
public <T> void seed(Class<T> clazz, Supplier<T> value) {
getScopeState().seed(clazz, value);
}
public <T> void seed(Class<T> clazz, T value) {
seed(clazz, (Supplier<T>) () -> value);
}
@Nonnull
@Override
public <T> Supplier<T> scope(@Nonnull Key<T> key, @Nonnull Supplier<T> unscoped) {
// Lazy evaluating provider
return () -> {
if (values.isEmpty()) {
return createProxy(key, unscoped);
} else {
return getScopeState().scope(key, unscoped).get();
}
};
}
@SuppressWarnings("unchecked")
protected <T> T createProxy(Key<T> key, Supplier<T> unscoped) {
InvocationHandler dispatcher = (proxy, method, args) -> dispatch(key, unscoped, method, args);
Class<T> superType = (Class<T>) Types.getRawType(key.getType());
Class<?>[] interfaces = getInterfaces(superType);
return (T) java.lang.reflect.Proxy.newProxyInstance(superType.getClassLoader(), interfaces, dispatcher);
}
protected <T> Object dispatch(Key<T> key, Supplier<T> unscoped, Method method, Object[] args) throws Throwable {
method.setAccessible(true);
try {
return method.invoke(getScopeState().scope(key, unscoped).get(), args);
} catch (InvocationTargetException e) {
throw e.getCause();
}
}
protected Class<?>[] getInterfaces(Class<?> superType) {
if (superType.isInterface()) {
return new Class<?>[] {superType};
}
for (Annotation a : superType.getAnnotations()) {
Class<? extends Annotation> annotationType = a.annotationType();
if (isTypeAnnotation(annotationType)) {
try {
Class<?>[] value =
(Class<?>[]) annotationType.getMethod("value").invoke(a);
if (value.length == 0) {
// Only direct interfaces implemented by the
|
ScopeState
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/resource/transaction/jta/BasicJtaTransactionManagerTests.java
|
{
"start": 193,
"end": 349
}
|
class ____ extends AbstractBasicJtaTestScenarios {
@Override
protected boolean preferUserTransactions() {
return false;
}
}
|
BasicJtaTransactionManagerTests
|
java
|
apache__flink
|
flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/kryo/Serializers.java
|
{
"start": 2094,
"end": 2277
}
|
interface ____ provide custom serialization for their classes. Also, there is a Java Annotation for
* adding a default serializer (@DefaultSerializer) to classes.
*/
@Internal
public
|
to
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.