language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/chararray/CharArrayAssert_usingDefaultElementComparator_Test.java | {
"start": 1168,
"end": 1819
} | class ____ extends CharArrayAssertBaseTest {
private Objects objectsBefore;
@BeforeEach
void before() {
objectsBefore = getObjects(assertions);
}
@Override
protected CharArrayAssert invoke_api_method() {
return assertions.usingElementComparator(alwaysEqual())
.usingDefaultElementComparator();
}
@Override
protected void verify_internal_effects() {
assertThat(getObjects(assertions)).isSameAs(objectsBefore);
assertThat(getArrays(assertions)).isSameAs(CharArrays.instance());
assertThat(getArrays(assertions).getComparator()).isNull();
}
}
| CharArrayAssert_usingDefaultElementComparator_Test |
java | apache__kafka | server-common/src/main/java/org/apache/kafka/timeline/TimelineHashSet.java | {
"start": 1372,
"end": 3190
} | class ____<T>
implements SnapshottableHashTable.ElementWithStartEpoch {
private final T value;
private long startEpoch;
TimelineHashSetEntry(T value) {
this.value = value;
this.startEpoch = SnapshottableHashTable.LATEST_EPOCH;
}
public T getValue() {
return value;
}
@Override
public void setStartEpoch(long startEpoch) {
this.startEpoch = startEpoch;
}
@Override
public long startEpoch() {
return startEpoch;
}
@SuppressWarnings("unchecked")
@Override
public boolean equals(Object o) {
if (!(o instanceof TimelineHashSetEntry)) return false;
TimelineHashSetEntry<T> other = (TimelineHashSetEntry<T>) o;
return value.equals(other.value);
}
@Override
public int hashCode() {
return value.hashCode();
}
}
public TimelineHashSet(SnapshotRegistry snapshotRegistry, int expectedSize) {
super(snapshotRegistry, expectedSize);
}
@Override
public int size() {
return size(SnapshottableHashTable.LATEST_EPOCH);
}
public int size(long epoch) {
return snapshottableSize(epoch);
}
@Override
public boolean isEmpty() {
return isEmpty(SnapshottableHashTable.LATEST_EPOCH);
}
public boolean isEmpty(long epoch) {
return snapshottableSize(epoch) == 0;
}
@Override
public boolean contains(Object key) {
return contains(key, SnapshottableHashTable.LATEST_EPOCH);
}
public boolean contains(Object object, long epoch) {
return snapshottableGet(new TimelineHashSetEntry<>(object), epoch) != null;
}
final | TimelineHashSetEntry |
java | google__error-prone | core/src/test/java/com/google/errorprone/matchers/CompileTimeConstantExpressionMatcherTest.java | {
"start": 3301,
"end": 4258
} | class ____ {
private final int nonfinal_int;
public Test(int i) {
nonfinal_int = i;
}
public void m(String s) {
// BUG: Diagnostic contains: false
String s1 = s;
// BUG: Diagnostic contains: false
int int2 = s.length();
// BUG: Diagnostic contains: false
Integer int3 = nonfinal_int;
// BUG: Diagnostic contains: false
Integer int4 = 14 * nonfinal_int;
// BUG: Diagnostic contains: true
boolean bool4 = false;
}
}
""")
.doTest();
}
@Test
public void finalCompileTimeConstantMethodParameters() {
testHelper
.addSourceLines(
"Test.java",
"""
import com.google.errorprone.annotations.CompileTimeConstant;
public | Test |
java | spring-projects__spring-framework | spring-messaging/src/test/java/org/springframework/messaging/handler/invocation/ResolvableMethod.java | {
"start": 2357,
"end": 2450
} | class ____ spring-web so it can
* be used for tests in spring-messaging.
*
* <p>Convenience | in |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ZKUtil.java | {
"start": 1401,
"end": 6137
} | class ____ {
/**
* Parse ACL permission string, partially borrowed from
* ZooKeeperMain private method
*/
private static int getPermFromString(String permString) {
int perm = 0;
for (int i = 0; i < permString.length(); i++) {
char c = permString.charAt(i);
switch (c) {
case 'r':
perm |= ZooDefs.Perms.READ;
break;
case 'w':
perm |= ZooDefs.Perms.WRITE;
break;
case 'c':
perm |= ZooDefs.Perms.CREATE;
break;
case 'd':
perm |= ZooDefs.Perms.DELETE;
break;
case 'a':
perm |= ZooDefs.Perms.ADMIN;
break;
default:
throw new BadAclFormatException(
"Invalid permission '" + c + "' in permission string '" +
permString + "'");
}
}
return perm;
}
/**
* Helper method to remove a subset of permissions (remove) from a
* given set (perms).
* @param perms The permissions flag to remove from. Should be an OR of a
* some combination of {@link ZooDefs.Perms}
* @param remove The permissions to be removed. Should be an OR of a
* some combination of {@link ZooDefs.Perms}
* @return A permissions flag that is an OR of {@link ZooDefs.Perms}
* present in perms and not present in remove
*/
public static int removeSpecificPerms(int perms, int remove) {
return perms ^ remove;
}
/**
* Parse comma separated list of ACL entries to secure generated nodes, e.g.
* <code>sasl:hdfs/host1@MY.DOMAIN:cdrwa,sasl:hdfs/host2@MY.DOMAIN:cdrwa</code>
*
* @param aclString aclString.
* @return ACL list
* @throws BadAclFormatException if an ACL is invalid
*/
public static List<ACL> parseACLs(String aclString) throws
BadAclFormatException {
List<ACL> acl = Lists.newArrayList();
if (aclString == null) {
return acl;
}
List<String> aclComps = Lists.newArrayList(
Splitter.on(',').omitEmptyStrings().trimResults()
.split(aclString));
for (String a : aclComps) {
// from ZooKeeperMain private method
int firstColon = a.indexOf(':');
int lastColon = a.lastIndexOf(':');
if (firstColon == -1 || lastColon == -1 || firstColon == lastColon) {
throw new BadAclFormatException(
"ACL '" + a + "' not of expected form scheme:id:perm");
}
ACL newAcl = new ACL();
newAcl.setId(new Id(a.substring(0, firstColon), a.substring(
firstColon + 1, lastColon)));
newAcl.setPerms(getPermFromString(a.substring(lastColon + 1)));
acl.add(newAcl);
}
return acl;
}
/**
* Parse a comma-separated list of authentication mechanisms. Each
* such mechanism should be of the form 'scheme:auth' -- the same
* syntax used for the 'addAuth' command in the ZK CLI.
*
* @param authString the comma-separated auth mechanisms
* @return a list of parsed authentications
* @throws BadAuthFormatException if the auth format is invalid
*/
public static List<ZKAuthInfo> parseAuth(String authString) throws
BadAuthFormatException{
List<ZKAuthInfo> ret = Lists.newArrayList();
if (authString == null) {
return ret;
}
List<String> authComps = Lists.newArrayList(
Splitter.on(',').omitEmptyStrings().trimResults()
.split(authString));
for (String comp : authComps) {
String parts[] = comp.split(":", 2);
if (parts.length != 2) {
throw new BadAuthFormatException(
"Auth '" + comp + "' not of expected form scheme:auth");
}
ret.add(new ZKAuthInfo(parts[0],
parts[1].getBytes(StandardCharsets.UTF_8)));
}
return ret;
}
/**
* Because ZK ACLs and authentication information may be secret,
* allow the configuration values to be indirected through a file
* by specifying the configuration as "@/path/to/file". If this
* syntax is used, this function will return the contents of the file
* as a String.
*
* @param valInConf the value from the Configuration
* @return either the same value, or the contents of the referenced
* file if the configured value starts with "@"
* @throws IOException if the file cannot be read
*/
public static String resolveConfIndirection(String valInConf)
throws IOException {
if (valInConf == null) return null;
if (!valInConf.startsWith("@")) {
return valInConf;
}
String path = valInConf.substring(1).trim();
return Files.asCharSource(new File(path), StandardCharsets.UTF_8).read().trim();
}
/**
* An authentication token passed to ZooKeeper.addAuthInfo
*/
@InterfaceAudience.Private
public static | ZKUtil |
java | apache__camel | core/camel-support/src/main/java/org/apache/camel/throttling/ThrottlingExceptionRoutePolicy.java | {
"start": 4572,
"end": 14514
} | class ____) can be separated by comma.")
private String exceptions;
@Metadata(description = "Logging level for state changes", defaultValue = "DEBUG")
private LoggingLevel stateLoggingLevel = LoggingLevel.DEBUG;
private List<Class<?>> throttledExceptions;
// handler for half open circuit can be used instead of resuming route to check on resources
@Metadata(label = "advanced",
description = "Custom check to perform whether the circuit breaker can move to half-open state."
+ " If set then this is used instead of resuming the route.")
private ThrottlingExceptionHalfOpenHandler halfOpenHandler;
// stateful information
private final AtomicInteger failures = new AtomicInteger();
private final AtomicInteger success = new AtomicInteger();
private final AtomicInteger state = new AtomicInteger(STATE_CLOSED);
private final AtomicBoolean keepOpenBool = new AtomicBoolean();
private volatile Timer halfOpenTimer;
private volatile long lastFailure;
private volatile long openedAt;
public ThrottlingExceptionRoutePolicy() {
}
public ThrottlingExceptionRoutePolicy(int threshold, long failureWindow, long halfOpenAfter,
List<Class<?>> handledExceptions) {
this(threshold, failureWindow, halfOpenAfter, handledExceptions, false);
}
public ThrottlingExceptionRoutePolicy(int threshold, long failureWindow, long halfOpenAfter,
List<Class<?>> handledExceptions, boolean keepOpen) {
this.throttledExceptions = handledExceptions;
this.failureWindow = failureWindow;
this.halfOpenAfter = halfOpenAfter;
this.failureThreshold = threshold;
this.keepOpenBool.set(keepOpen);
}
@Override
public void setCamelContext(CamelContext camelContext) {
this.camelContext = camelContext;
}
@Override
public CamelContext getCamelContext() {
return camelContext;
}
@Override
public Route getRoute() {
return route;
}
@Override
public void setRoute(Route route) {
this.route = route;
}
public List<Class<?>> getThrottledExceptions() {
return throttledExceptions;
}
public String getExceptions() {
return exceptions;
}
public void setExceptions(String exceptions) {
this.exceptions = exceptions;
}
@Override
protected void doInit() throws Exception {
super.doInit();
this.stateLogger = new CamelLogger(LOG, stateLoggingLevel);
if (exceptions != null && throttledExceptions == null) {
var list = new ArrayList<Class<?>>();
for (String fqn : exceptions.split(",")) {
Class<?> clazz = camelContext.getClassResolver().resolveMandatoryClass(fqn);
list.add(clazz);
}
this.throttledExceptions = list;
}
}
@Override
public void onInit(Route route) {
LOG.debug("Initializing ThrottlingExceptionRoutePolicy route policy");
logState();
}
@Override
public void onStart(Route route) {
// if keepOpen then start w/ the circuit open
if (keepOpenBool.get()) {
openCircuit(route);
}
}
@Override
protected void doStop() throws Exception {
Timer timer = halfOpenTimer;
if (timer != null) {
timer.cancel();
halfOpenTimer = null;
}
}
@Override
public void onExchangeDone(Route route, Exchange exchange) {
if (keepOpenBool.get()) {
if (state.get() != STATE_OPEN) {
LOG.debug("Opening circuit (keepOpen is true)");
openCircuit(route);
}
} else {
if (hasFailed(exchange)) {
// record the failure
failures.incrementAndGet();
lastFailure = System.currentTimeMillis();
} else {
success.incrementAndGet();
}
// check for state change
calculateState(route);
}
}
/**
* Uses similar approach as circuit breaker if the exchange has an exception that we are watching then we count that
* as a failure otherwise we ignore it
*/
private boolean hasFailed(Exchange exchange) {
if (exchange == null) {
return false;
}
boolean answer = false;
if (exchange.getException() != null) {
if (throttledExceptions == null || throttledExceptions.isEmpty()) {
// if no exceptions defined then always fail
// (ie) assume we throttle on all exceptions
answer = true;
} else {
for (Class<?> exception : throttledExceptions) {
// will look in exception hierarchy
if (exchange.getException(exception) != null) {
answer = true;
break;
}
}
}
}
if (LOG.isDebugEnabled()) {
String exceptionName
= exchange.getException() == null ? "none" : exchange.getException().getClass().getSimpleName();
LOG.debug("hasFailed ({}) with Throttled Exception: {} for exchangeId: {}", answer, exceptionName,
exchange.getExchangeId());
}
return answer;
}
private void calculateState(Route route) {
// have we reached the failure limit?
boolean failureLimitReached = isThresholdExceeded();
if (state.get() == STATE_CLOSED) {
if (failureLimitReached) {
LOG.debug("Opening circuit...");
openCircuit(route);
}
} else if (state.get() == STATE_HALF_OPEN) {
if (failureLimitReached) {
LOG.debug("Opening circuit...");
openCircuit(route);
} else {
LOG.debug("Closing circuit...");
closeCircuit(route);
}
} else if (state.get() == STATE_OPEN) {
if (!keepOpenBool.get()) {
long elapsedTimeSinceOpened = System.currentTimeMillis() - openedAt;
if (halfOpenAfter <= elapsedTimeSinceOpened) {
LOG.debug("Checking an open circuit...");
if (halfOpenHandler != null) {
if (halfOpenHandler.isReadyToBeClosed()) {
LOG.debug("Closing circuit...");
closeCircuit(route);
} else {
LOG.debug("Opening circuit...");
openCircuit(route);
}
} else {
LOG.debug("Half opening circuit...");
halfOpenCircuit(route);
}
} else {
LOG.debug("Keeping circuit open (time not elapsed)...");
}
} else {
LOG.debug("Keeping circuit open (keepOpen is true)...");
this.addHalfOpenTimer(route);
}
}
}
protected boolean isThresholdExceeded() {
boolean output = false;
logState();
// failures exceed the threshold
// AND the last of those failures occurred within window
if (failures.get() >= failureThreshold && lastFailure >= System.currentTimeMillis() - failureWindow) {
output = true;
}
return output;
}
protected void openCircuit(Route route) {
try {
lock.lock();
suspendOrStopConsumer(route.getConsumer());
state.set(STATE_OPEN);
openedAt = System.currentTimeMillis();
this.addHalfOpenTimer(route);
logState();
} catch (Exception e) {
handleException(e);
} finally {
lock.unlock();
}
}
protected void addHalfOpenTimer(Route route) {
halfOpenTimer = new Timer();
halfOpenTimer.schedule(new HalfOpenTask(route), halfOpenAfter);
}
protected void halfOpenCircuit(Route route) {
try {
lock.lock();
resumeOrStartConsumer(route.getConsumer());
state.set(STATE_HALF_OPEN);
logState();
} catch (Exception e) {
handleException(e);
} finally {
lock.unlock();
}
}
protected void closeCircuit(Route route) {
try {
lock.lock();
resumeOrStartConsumer(route.getConsumer());
failures.set(0);
success.set(0);
lastFailure = 0;
openedAt = 0;
state.set(STATE_CLOSED);
logState();
} catch (Exception e) {
handleException(e);
} finally {
lock.unlock();
}
}
private void logState() {
if (stateLogger != null) {
stateLogger.log(dumpState());
}
}
public String getStateAsString() {
return stateAsString(state.get());
}
public String dumpState() {
String routeState = getStateAsString();
if (failures.get() > 0) {
return String.format("State %s, failures %d, last failure %d ms ago", routeState, failures.get(),
System.currentTimeMillis() - lastFailure);
} else {
return String.format("State %s, failures %d", routeState, failures.get());
}
}
private static String stateAsString(int num) {
if (num == STATE_CLOSED) {
return "closed";
} else if (num == STATE_HALF_OPEN) {
return "half opened";
} else {
return "opened";
}
}
| name |
java | netty__netty | codec-compression/src/test/java/io/netty/handler/codec/compression/BrotliEncoderTest.java | {
"start": 885,
"end": 2668
} | class ____ extends AbstractEncoderTest {
private EmbeddedChannel ENCODER_CHANNEL;
private EmbeddedChannel DECODER_CHANNEL;
@BeforeAll
static void setUp() {
try {
Brotli.ensureAvailability();
} catch (Throwable throwable) {
throw new ExceptionInInitializerError(throwable);
}
}
@Override
public EmbeddedChannel createChannel() {
// Setup Encoder and Decoder
ENCODER_CHANNEL = new EmbeddedChannel(new BrotliEncoder());
DECODER_CHANNEL = new EmbeddedChannel(new BrotliDecoder());
// Return the main channel (Encoder)
return ENCODER_CHANNEL;
}
@Override
public void destroyChannel() {
ENCODER_CHANNEL.finishAndReleaseAll();
DECODER_CHANNEL.finishAndReleaseAll();
}
@Override
protected ByteBuf decompress(ByteBuf compressed, int originalLength) {
DECODER_CHANNEL.writeInbound(compressed);
ByteBuf aggregatedBuffer = Unpooled.buffer();
ByteBuf decompressed = DECODER_CHANNEL.readInbound();
while (decompressed != null) {
aggregatedBuffer.writeBytes(decompressed);
decompressed.release();
decompressed = DECODER_CHANNEL.readInbound();
}
return aggregatedBuffer;
}
@Override
protected ByteBuf readDecompressed(final int dataLength) throws Exception {
CompositeByteBuf decompressed = Unpooled.compositeBuffer();
ByteBuf msg;
while ((msg = channel.readOutbound()) != null) {
if (msg.isReadable()) {
decompressed.addComponent(true, decompress(msg, -1));
} else {
msg.release();
}
}
return decompressed;
}
}
| BrotliEncoderTest |
java | elastic__elasticsearch | x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/file/FileUserPasswdStoreTests.java | {
"start": 1957,
"end": 14053
} | class ____ extends ESTestCase {
private Settings settings;
private Environment env;
private ThreadPool threadPool;
@Before
public void init() {
final String hashingAlgorithm = inFipsJvm()
? randomFrom("pbkdf2", "pbkdf2_1000", "pbkdf2_50000", "pbkdf2_stretch")
: randomFrom("bcrypt", "bcrypt11", "pbkdf2", "pbkdf2_1000", "pbkdf2_50000", "pbkdf2_stretch");
settings = Settings.builder()
.put("resource.reload.interval.high", "100ms")
.put("path.home", createTempDir())
.put("xpack.security.authc.password_hashing.algorithm", hashingAlgorithm)
.build();
env = TestEnvironment.newEnvironment(settings);
threadPool = new TestThreadPool("test");
}
@After
public void shutdown() {
terminate(threadPool);
}
public void testStore_ConfiguredWithUnreadableFile() throws Exception {
Path configDir = env.configDir();
Files.createDirectories(configDir);
Path file = configDir.resolve("users");
// writing in utf_16 should cause a parsing error as we try to read the file in utf_8
Files.write(file, Collections.singletonList("aldlfkjldjdflkjd"), StandardCharsets.UTF_16);
RealmConfig config = getRealmConfig();
try (ResourceWatcherService watcherService = new ResourceWatcherService(settings, threadPool)) {
FileUserPasswdStore store = new FileUserPasswdStore(config, watcherService);
assertThat(store.usersCount(), is(0));
}
}
public void testStore_AutoReload() throws Exception {
Path users = getDataPath("users");
Path configDir = env.configDir();
Files.createDirectories(configDir);
Path file = configDir.resolve("users");
Files.copy(users, file, StandardCopyOption.REPLACE_EXISTING);
final Hasher hasher = Hasher.resolve(settings.get("xpack.security.authc.password_hashing.algorithm"));
RealmConfig config = getRealmConfig();
try (ResourceWatcherService watcherService = new ResourceWatcherService(settings, threadPool)) {
final CountDownLatch latch = new CountDownLatch(1);
FileUserPasswdStore store = new FileUserPasswdStore(config, watcherService, latch::countDown);
// Test users share the hashing algorithm name for convenience
String username = settings.get("xpack.security.authc.password_hashing.algorithm");
User user = new User(username);
assertThat(store.userExists(username), is(true));
final String password = username.startsWith("pbkdf2") ? "longertestpassword" : "test123";
AuthenticationResult<User> result = store.verifyPassword(username, new SecureString(password), () -> user);
assertThat(result.getStatus(), is(AuthenticationResult.Status.SUCCESS));
assertThat(result.getValue(), is(user));
try (BufferedWriter writer = Files.newBufferedWriter(file, StandardCharsets.UTF_8, StandardOpenOption.APPEND)) {
writer.append("\n");
}
watcherService.notifyNow(ResourceWatcherService.Frequency.HIGH);
if (latch.getCount() != 1) {
fail("Listener should not be called as users passwords are not changed.");
}
assertThat(store.userExists(username), is(true));
result = store.verifyPassword(username, new SecureString(password), () -> user);
assertThat(result.getStatus(), is(AuthenticationResult.Status.SUCCESS));
assertThat(result.getValue(), is(user));
try (BufferedWriter writer = Files.newBufferedWriter(file, StandardCharsets.UTF_8, StandardOpenOption.APPEND)) {
writer.newLine();
writer.append("foobar:").append(new String(hasher.hash(new SecureString("longtestpassword"))));
}
if (latch.await(5, TimeUnit.SECONDS) == false) {
fail("Waited too long for the updated file to be picked up");
}
assertThat(store.userExists("foobar"), is(true));
result = store.verifyPassword("foobar", new SecureString("longtestpassword"), () -> user);
assertThat(result.getStatus(), is(AuthenticationResult.Status.SUCCESS));
assertThat(result.getValue(), is(user));
}
}
private RealmConfig getRealmConfig() {
final RealmConfig.RealmIdentifier identifier = new RealmConfig.RealmIdentifier("file", "file-test");
return new RealmConfig(
identifier,
Settings.builder().put(settings).put(RealmSettings.getFullSettingKey(identifier, RealmSettings.ORDER_SETTING), 0).build(),
env,
threadPool.getThreadContext()
);
}
public void testStore_AutoReload_WithParseFailures() throws Exception {
Path users = getDataPath("users");
Path confDir = env.configDir();
Files.createDirectories(confDir);
Path testUsers = confDir.resolve("users");
Files.copy(users, testUsers, StandardCopyOption.REPLACE_EXISTING);
RealmConfig config = getRealmConfig();
try (ResourceWatcherService watcherService = new ResourceWatcherService(settings, threadPool)) {
final CountDownLatch latch = new CountDownLatch(1);
FileUserPasswdStore store = new FileUserPasswdStore(config, watcherService, latch::countDown);
// Test users share the hashing algorithm name for convenience
String username = settings.get("xpack.security.authc.password_hashing.algorithm");
User user = new User(username);
final String password = username.startsWith("pbkdf2") ? "longertestpassword" : "test123";
final AuthenticationResult<User> result = store.verifyPassword(username, new SecureString(password), () -> user);
assertThat(result.getStatus(), is(AuthenticationResult.Status.SUCCESS));
assertThat(result.getValue(), is(user));
// now replacing the content of the users file with something that cannot be read
Files.write(testUsers, Collections.singletonList("aldlfkjldjdflkjd"), StandardCharsets.UTF_16);
if (latch.await(5, TimeUnit.SECONDS) == false) {
fail("Waited too long for the updated file to be picked up");
}
assertThat(store.usersCount(), is(0));
}
}
public void testParseFile() throws Exception {
Path path = getDataPath("users");
Map<String, char[]> users = FileUserPasswdStore.parseFile(path, null, Settings.EMPTY);
assertThat(users, notNullValue());
assertThat(users.size(), is(12));
assertThat(users.get("bcrypt"), notNullValue());
assertThat(new String(users.get("bcrypt")), equalTo("$2a$05$zxnP0vdREMxnEpkLCDI2OuSaSk/QEKA2.A42iOpI6U2u.RLLOWm1e"));
assertThat(users.get("bcrypt10"), notNullValue());
assertThat(new String(users.get("bcrypt10")), equalTo("$2a$10$cFxpMx6YDrH/PXwLpTlux.KVykN1TG2Pgdl5oJX5/G/KYp3G6jbFG"));
assertThat(users.get("md5"), notNullValue());
assertThat(new String(users.get("md5")), equalTo("$apr1$R3DdqiAZ$aljIkaIVPSarmDMlJUBBP."));
assertThat(users.get("crypt"), notNullValue());
assertThat(new String(users.get("crypt")), equalTo("hsP1PYSLsEEvs"));
assertThat(users.get("plain"), notNullValue());
assertThat(new String(users.get("plain")), equalTo("{plain}test123"));
assertThat(users.get("sha"), notNullValue());
assertThat(new String(users.get("sha")), equalTo("{SHA}cojt0Pw//L6ToM8G41aOKFIWh7w="));
assertThat(users.get("pbkdf2"), notNullValue());
assertThat(
new String(users.get("pbkdf2")),
equalTo("{PBKDF2}10000$NB6kwTrIPrwJJTu+KXiPUkW5bMf1oG2BMzDJLA479Bk=$CvCgHb5UkalUiNPicqMDOzIsnh3ppyz3SZOp+Gjv+hc=")
);
assertThat(users.get("pbkdf2_1000"), notNullValue());
assertThat(
new String(users.get("pbkdf2_1000")),
equalTo("{PBKDF2}1000$cofpEhehEObS+tNtS8/t9Zpf6UgwqkgkQFct2hhmGWA=$9Qb0S04fkF+Ebz1sGIaB9S6huZAXDihopPc6Z748f3E=")
);
assertThat(users.get("pbkdf2_50000"), notNullValue());
assertThat(
new String(users.get("pbkdf2_50000")),
equalTo("{PBKDF2}50000$riPhBgfrNIpsN91QmF5mQNCwxHfJm0q2XtGt0x5+PRM=$v2j/DD+aFIRrusEeSDUO+eX3IrBPiG+ysgc9y0RDmhs=")
);
assertThat(
new String(users.get("pbkdf2_stretch")),
equalTo("{PBKDF2_STRETCH}10000$s1y/xv1T1iJxS9BKQ1FkZpSO19dSs6vsGgOb14d+KkU=$PtdgZoRGCSaim033lz/RcEoyhXQ/3WU4E6hfeKGsGes=")
);
}
public void testParseFile_Empty() throws Exception {
Path empty = createTempFile();
Logger logger = CapturingLogger.newCapturingLogger(Level.DEBUG, null);
Map<String, char[]> users = FileUserPasswdStore.parseFile(empty, logger, Settings.EMPTY);
assertThat(users, notNullValue());
assertThat(users.isEmpty(), is(true));
List<String> events = CapturingLogger.output(logger.getName(), Level.DEBUG);
assertThat(events.size(), is(1));
assertThat(events.get(0), containsString("parsed [0] users"));
}
public void testParseFile_WhenFileDoesNotExist() throws Exception {
Path file = createTempDir().resolve(randomAlphaOfLength(10));
Logger logger = CapturingLogger.newCapturingLogger(Level.INFO, null);
Map<String, char[]> users = FileUserPasswdStore.parseFile(file, logger, Settings.EMPTY);
assertThat(users, nullValue());
users = FileUserPasswdStore.parseFileLenient(file, logger, Settings.EMPTY);
assertThat(users, notNullValue());
assertThat(users.isEmpty(), is(true));
}
public void testParseFile_WhenCannotReadFile() throws Exception {
Path file = createTempFile();
// writing in utf_16 should cause a parsing error as we try to read the file in utf_8
Files.write(file, Collections.singletonList("aldlfkjldjdflkjd"), StandardCharsets.UTF_16);
Logger logger = CapturingLogger.newCapturingLogger(Level.INFO, null);
try {
FileUserPasswdStore.parseFile(file, logger, Settings.EMPTY);
fail("expected a parse failure");
} catch (IllegalStateException se) {
this.logger.info("expected", se);
}
}
public void testParseFile_InvalidLineDoesNotResultInLoggerNPE() throws Exception {
Path file = createTempFile();
Files.write(file, Arrays.asList("NotValidUsername=Password", "user:pass"), StandardCharsets.UTF_8);
Map<String, char[]> users = FileUserPasswdStore.parseFile(file, null, Settings.EMPTY);
assertThat(users, notNullValue());
assertThat(users.keySet(), hasSize(1));
}
public void testParseFileLenient_WhenCannotReadFile() throws Exception {
Path file = createTempFile();
// writing in utf_16 should cause a parsing error as we try to read the file in utf_8
Files.write(file, Collections.singletonList("aldlfkjldjdflkjd"), StandardCharsets.UTF_16);
Logger logger = CapturingLogger.newCapturingLogger(Level.INFO, null);
Map<String, char[]> users = FileUserPasswdStore.parseFileLenient(file, logger, Settings.EMPTY);
assertThat(users, notNullValue());
assertThat(users.isEmpty(), is(true));
List<String> events = CapturingLogger.output(logger.getName(), Level.ERROR);
assertThat(events.size(), is(1));
assertThat(events.get(0), containsString("failed to parse users file"));
}
public void testParseFileWithLineWithEmptyPasswordAndWhitespace() throws Exception {
Path file = createTempFile();
Files.write(file, Collections.singletonList("user: "), StandardCharsets.UTF_8);
Map<String, char[]> users = FileUserPasswdStore.parseFile(file, null, Settings.EMPTY);
assertThat(users, notNullValue());
assertThat(users.keySet(), is(empty()));
}
}
| FileUserPasswdStoreTests |
java | apache__camel | components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/interceptor/route/DataSourceSpringRouteBuilder.java | {
"start": 1121,
"end": 2071
} | class ____ extends SpringRouteBuilder {
@Override
public void configure() throws Exception {
// get the required policy
SpringTransactionPolicy required = lookup("PROPAGATION_REQUIRED", SpringTransactionPolicy.class);
// For spring based transaction, end users are encouraged to use the
// transaction error handler instead of the default DeadLetterChannel.
errorHandler(transactionErrorHandler(required));
// set the required policy for this route
from("direct:okay").policy(required).setBody(constant("Tiger in Action")).bean("bookService")
.setBody(constant("Elephant in Action")).bean("bookService");
// set the required policy for this route
from("direct:fail").policy(required).setBody(constant("Tiger in Action")).bean("bookService")
.setBody(constant("Donkey in Action")).bean("bookService");
}
}
| DataSourceSpringRouteBuilder |
java | junit-team__junit5 | platform-tests/src/test/java/org/junit/platform/launcher/MethodFilterTests.java | {
"start": 7321,
"end": 7400
} | class ____ {
@Test
void test1() {
}
@Test
void test2() {
}
}
}
| Class2 |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/UnusedVariableTest.java | {
"start": 27517,
"end": 27852
} | class ____ {
int a = foo();
private int foo() {
return 1;
}
}
""")
.doTest();
}
@Test
public void fixPrivateMethod_parameterLocations() {
refactoringHelper
.addInputLines(
"Test.java",
"""
| Test |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/dynamic/AsyncExecutableCommand.java | {
"start": 538,
"end": 2722
} | class ____ implements ExecutableCommand {
private final CommandMethod commandMethod;
private final CommandFactory commandFactory;
private final StatefulConnection<Object, Object> connection;
AsyncExecutableCommand(CommandMethod commandMethod, CommandFactory commandFactory,
StatefulConnection<Object, Object> connection) {
this.commandMethod = commandMethod;
this.commandFactory = commandFactory;
this.connection = connection;
}
@Override
public Object execute(Object[] parameters) throws ExecutionException, InterruptedException {
RedisCommand<Object, Object, Object> command = commandFactory.createCommand(parameters);
return dispatchCommand(parameters, command);
}
protected Object dispatchCommand(Object[] arguments, RedisCommand<Object, Object, Object> command)
throws InterruptedException, java.util.concurrent.ExecutionException {
AsyncCommand<Object, Object, Object> asyncCommand = new AsyncCommand<>(command);
if (commandMethod.isFutureExecution()) {
RedisCommand<Object, Object, Object> dispatched = connection.dispatch(asyncCommand);
if (dispatched instanceof AsyncCommand) {
return dispatched;
}
return asyncCommand;
}
connection.dispatch(asyncCommand);
Duration timeout = connection.getTimeout();
if (commandMethod.getParameters() instanceof ExecutionSpecificParameters) {
ExecutionSpecificParameters executionSpecificParameters = (ExecutionSpecificParameters) commandMethod
.getParameters();
if (executionSpecificParameters.hasTimeoutIndex()) {
Timeout timeoutArg = (Timeout) arguments[executionSpecificParameters.getTimeoutIndex()];
if (timeoutArg != null) {
timeout = timeoutArg.getTimeout();
}
}
}
Futures.await(timeout, asyncCommand);
return asyncCommand.get();
}
@Override
public CommandMethod getCommandMethod() {
return commandMethod;
}
}
| AsyncExecutableCommand |
java | google__dagger | javatests/dagger/hilt/android/CustomTestApplicationTest.java | {
"start": 1673,
"end": 2019
} | class ____ extends Application {}
@Rule public HiltAndroidRule rule = new HiltAndroidRule(this);
@Test
public void testApplicationBaseClass() throws Exception {
assertThat((Context) getApplicationContext()).isInstanceOf(BaseApplication.class);
}
@CustomTestApplication(OtherBaseApplication.class)
public static | OtherBaseApplication |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/AsyncCallHandler.java | {
"start": 6421,
"end": 7090
} | class ____<V> {
private V value;
synchronized V waitAsyncValue(long timeout, TimeUnit unit)
throws InterruptedException, TimeoutException {
if (value != null) {
return value;
}
AsyncGet.Util.wait(this, timeout, unit);
if (value != null) {
return value;
}
throw new TimeoutException("waitCallReturn timed out "
+ timeout + " " + unit);
}
synchronized void set(V v) {
Preconditions.checkNotNull(v);
Preconditions.checkState(value == null);
value = v;
notify();
}
synchronized boolean isDone() {
return value != null;
}
}
static | AsyncValue |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/script/ScriptService.java | {
"start": 39556,
"end": 39822
} | class ____ in two modes:
* 1) general mode, if the general script cache is configured. There are no context caches in this case.
* 2) context mode, if the context script cache is configured. There is no general cache in this case.
*/
static | operates |
java | apache__camel | core/camel-core-reifier/src/main/java/org/apache/camel/reifier/language/SingleInputTypedExpressionReifier.java | {
"start": 1261,
"end": 1781
} | class ____<T extends SingleInputTypedExpressionDefinition>
extends TypedExpressionReifier<T> {
SingleInputTypedExpressionReifier(CamelContext camelContext, ExpressionDefinition definition) {
super(camelContext, definition);
}
@Override
protected Object[] createProperties() {
Object[] properties = new Object[2];
properties[0] = asResultType();
properties[1] = parseString(definition.getSource());
return properties;
}
}
| SingleInputTypedExpressionReifier |
java | quarkusio__quarkus | extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/AutoAddScopeProcessor.java | {
"start": 3297,
"end": 6747
} | class ____ lazily
requiresContainerServices = requiresContainerServices(clazz, containerAnnotationNames,
beanArchiveIndex.getIndex());
}
if (!requiresContainerServices) {
// Skip - no injection point detected
continue;
}
}
if (autoScope.test(clazz, context.getAnnotations(), beanArchiveIndex.getIndex())) {
if (scope != null) {
BiConsumer<DotName, String> consumer = autoScope.getScopeAlreadyAdded();
if (consumer != null) {
consumer.accept(scope, reason);
} else {
LOGGER.debugf("Scope %s was already added for reason: %s", scope, reason);
}
continue;
}
scope = autoScope.getDefaultScope();
reason = autoScope.getReason();
context.transform().add(scope).done();
if (unremovables != null && autoScope.isUnremovable()) {
unremovables.put(clazz.name(), autoScope);
}
LOGGER.debugf("Automatically added scope %s to class %s: %s", scope, clazz, autoScope.getReason());
}
}
}
}));
if (unremovables != null) {
unremovableBeans.produce(new UnremovableBeanBuildItem(new Predicate<BeanInfo>() {
@Override
public boolean test(BeanInfo bean) {
return bean.isClassBean() && unremovables.containsKey(bean.getBeanClass());
}
}));
}
}
private boolean requiresContainerServices(ClassInfo clazz, Set<DotName> containerAnnotationNames, IndexView index) {
// Note that transformed methods/fields are not taken into account
if (hasContainerAnnotation(clazz, containerAnnotationNames)) {
return true;
}
if (index != null) {
DotName superName = clazz.superName();
while (superName != null && !superName.equals(DotNames.OBJECT)) {
final ClassInfo superClass = index.getClassByName(superName);
if (superClass != null) {
if (hasContainerAnnotation(superClass, containerAnnotationNames)) {
return true;
}
superName = superClass.superName();
} else {
superName = null;
}
}
}
return false;
}
private boolean hasContainerAnnotation(ClassInfo clazz, Set<DotName> containerAnnotationNames) {
if (clazz.annotationsMap().isEmpty() || containerAnnotationNames.isEmpty()) {
return false;
}
return containsAny(clazz, containerAnnotationNames);
}
private boolean containsAny(ClassInfo clazz, Set<DotName> annotationNames) {
for (DotName annotation : clazz.annotationsMap().keySet()) {
if (annotationNames.contains(annotation)) {
return true;
}
}
return false;
}
}
| hierarchy |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TestCGroupsV2HandlerImpl.java | {
"start": 1674,
"end": 18367
} | class ____ extends TestCGroupsHandlerBase {
// Create a controller file in the unified hierarchy of cgroup v2
@Override
protected String getControllerFilePath(String controllerName) {
return new File(tmpPath, hierarchy).getAbsolutePath();
}
/*
* Create a mock mtab file with the following content:
* cgroup2 /path/to/parentDir cgroup2 rw,nosuid,nodev,noexec,relatime,nsdelegate,memory_recursiveprot 0 0
*
* Create the following cgroup v2 file hierarchy:
* parentDir
* ___________________________________________________
* / \ \
* cgroup.controllers cgroup.subtree_control test-hadoop-yarn (hierarchyDir)
* _________________
* / \
* cgroup.controllers cgroup.subtree_control
*/
public File createPremountedCgroups(File parentDir)
throws IOException {
String baseCgroup2Line =
"cgroup2 " + parentDir.getAbsolutePath()
+ " cgroup2 rw,nosuid,nodev,noexec,relatime,nsdelegate,memory_recursiveprot 0 0\n";
File mockMtab = createFileWithContent(parentDir, UUID.randomUUID().toString(), baseCgroup2Line);
String enabledControllers = "cpuset cpu io memory hugetlb pids rdma misc\n";
File controllersFile = createFileWithContent(parentDir, CGroupsHandler.CGROUP_CONTROLLERS_FILE,
enabledControllers);
File subtreeControlFile = new File(parentDir, CGroupsHandler.CGROUP_SUBTREE_CONTROL_FILE);
assertTrue(subtreeControlFile.createNewFile(),
"empty subtree_control file should be created");
File hierarchyDir = new File(parentDir, hierarchy);
if (!hierarchyDir.mkdirs()) {
String message = "Could not create directory " + hierarchyDir.getAbsolutePath();
throw new IOException(message);
}
hierarchyDir.deleteOnExit();
FileUtils.copyFile(controllersFile, new File(hierarchyDir,
CGroupsHandler.CGROUP_CONTROLLERS_FILE));
FileUtils.copyFile(subtreeControlFile, new File(hierarchyDir,
CGroupsHandler.CGROUP_SUBTREE_CONTROL_FILE));
return mockMtab;
}
@Test
public void testCGroupPaths() throws IOException, ResourceHandlerException {
verifyZeroInteractions(privilegedOperationExecutorMock);
File parentDir = new File(tmpPath);
File mtab = createPremountedCgroups(parentDir);
assertTrue(new File(controllerPath).exists(),
"Sample subsystem should be created");
CGroupsHandler cGroupsHandler = new CGroupsV2HandlerImpl(createNoMountConfiguration(hierarchy),
privilegedOperationExecutorMock, mtab.getAbsolutePath());
cGroupsHandler.initializeCGroupController(controller);
String testCGroup = "container_01";
String expectedPath =
controllerPath + Path.SEPARATOR + testCGroup;
String path = cGroupsHandler.getPathForCGroup(controller, testCGroup);
assertEquals(expectedPath, path);
String expectedPathTasks = expectedPath + Path.SEPARATOR
+ CGroupsHandler.CGROUP_PROCS_FILE;
path = cGroupsHandler.getPathForCGroupTasks(controller, testCGroup);
assertEquals(expectedPathTasks, path);
String param = CGroupsHandler.CGROUP_PARAM_CLASSID;
String expectedPathParam = expectedPath + Path.SEPARATOR
+ controller.getName() + "." + param;
path = cGroupsHandler.getPathForCGroupParam(controller, testCGroup, param);
assertEquals(expectedPathParam, path);
}
@Test
public void testUnsupportedMountConfiguration() throws Exception {
assertThrows(UnsupportedOperationException.class, () -> {
//As per junit behavior, we expect a new mock object to be available
//in this test.
verifyZeroInteractions(privilegedOperationExecutorMock);
CGroupsHandler cGroupsHandler;
File mtab = createEmptyMtabFile();
assertTrue(new File(controllerPath).mkdirs(),
"Sample subsystem should be created");
cGroupsHandler = new CGroupsV2HandlerImpl(createMountConfiguration(),
privilegedOperationExecutorMock, mtab.getAbsolutePath());
cGroupsHandler.initializeCGroupController(controller);
});
}
@Test
public void testCGroupOperations() throws IOException, ResourceHandlerException {
verifyZeroInteractions(privilegedOperationExecutorMock);
File parentDir = new File(tmpPath);
File mtab = createPremountedCgroups(parentDir);
assertTrue(new File(controllerPath).exists(),
"Sample subsystem should be created");
CGroupsHandler cGroupsHandler = new CGroupsV2HandlerImpl(createNoMountConfiguration(hierarchy),
privilegedOperationExecutorMock, mtab.getAbsolutePath());
cGroupsHandler.initializeCGroupController(controller);
String testCGroup = "container_01";
String expectedPath = controllerPath
+ Path.SEPARATOR + testCGroup;
String path = cGroupsHandler.createCGroup(controller, testCGroup);
assertTrue(new File(expectedPath).exists());
assertEquals(expectedPath, path);
String param = "test_param";
String paramValue = "test_param_value";
cGroupsHandler
.updateCGroupParam(controller, testCGroup, param, paramValue);
String paramPath = expectedPath + Path.SEPARATOR + controller.getName()
+ "." + param;
File paramFile = new File(paramPath);
assertTrue(paramFile.exists());
assertEquals(paramValue, new String(Files.readAllBytes(
paramFile.toPath())));
assertEquals(paramValue,
cGroupsHandler.getCGroupParam(controller, testCGroup, param));
}
/**
* Tests whether mtab parsing works as expected with a valid hierarchy set.
* @throws Exception the test will fail
*/
@Test
public void testMtabParsing() throws Exception {
// Initialize mtab and cgroup dir
File parentDir = new File(tmpPath);
// create mock cgroup
File mockMtabFile = createPremountedCgroups(parentDir);
CGroupsV2HandlerImpl cGroupsHandler = new CGroupsV2HandlerImpl(
createMountConfiguration(),
privilegedOperationExecutorMock, mockMtabFile.getAbsolutePath());
// Run mtabs parsing
Map<String, Set<String>> newMtab =
cGroupsHandler.parseMtab(mockMtabFile.getAbsolutePath());
Map<CGroupsHandler.CGroupController, String> controllerPaths =
cGroupsHandler.initializeControllerPathsFromMtab(
newMtab);
// Verify
assertEquals(4, controllerPaths.size());
assertTrue(controllerPaths
.containsKey(CGroupsHandler.CGroupController.CPU));
assertTrue(controllerPaths
.containsKey(CGroupsHandler.CGroupController.MEMORY));
String cpuDir = controllerPaths.get(CGroupsHandler.CGroupController.CPU);
String memoryDir =
controllerPaths.get(CGroupsHandler.CGroupController.MEMORY);
assertEquals(parentDir.getAbsolutePath(), cpuDir);
assertEquals(parentDir.getAbsolutePath(), memoryDir);
}
/*
* Create a mock mtab file with the following content for hybrid v1/v2:
* cgroup2 /path/to/parentV2Dir cgroup2 rw,nosuid,nodev,noexec,relatime,memory_recursiveprot 0 0
* cgroup /path/to/parentDir/memory cgroup rw,nosuid,nodev,noexec,relatime,memory 0 0
*
* Create the following cgroup hierarchy:
*
* parentDir
* __________________________________
* / \
* unified memory
* _________________________________________________
* / \ \
* cgroup.controllers cgroup.subtree_control test-hadoop-yarn (hierarchyDir)
* _________________
* / \
* cgroup.controllers cgroup.subtree_control
*/
public File createPremountedHybridCgroups(File v1ParentDir)
throws IOException {
File v2ParentDir = new File(v1ParentDir, "unified");
String mtabContent =
"cgroup " + v1ParentDir.getAbsolutePath() + "/memory"
+ " cgroup rw,nosuid,nodev,noexec,relatime,memory 0 0\n"
+ "cgroup2 " + v2ParentDir.getAbsolutePath()
+ " cgroup2 rw,nosuid,nodev,noexec,relatime,memory_recursiveprot 0 0\n";
File mockMtab = createFileWithContent(v1ParentDir, UUID.randomUUID().toString(), mtabContent);
String enabledV2Controllers = "cpuset cpu io hugetlb pids rdma misc\n";
File controllersFile = createFileWithContent(v2ParentDir,
CGroupsHandler.CGROUP_CONTROLLERS_FILE, enabledV2Controllers);
File subtreeControlFile = new File(v2ParentDir, CGroupsHandler.CGROUP_SUBTREE_CONTROL_FILE);
assertTrue(subtreeControlFile.createNewFile(),
"empty subtree_control file should be created");
File hierarchyDir = new File(v2ParentDir, hierarchy);
if (!hierarchyDir.mkdirs()) {
String message = "Could not create directory " + hierarchyDir.getAbsolutePath();
throw new IOException(message);
}
hierarchyDir.deleteOnExit();
FileUtils.copyFile(controllersFile, new File(hierarchyDir,
CGroupsHandler.CGROUP_CONTROLLERS_FILE));
FileUtils.copyFile(subtreeControlFile, new File(hierarchyDir,
CGroupsHandler.CGROUP_SUBTREE_CONTROL_FILE));
return mockMtab;
}
@Test
public void testHybridMtabParsing() throws Exception {
// Initialize mtab and cgroup dir
File v1ParentDir = new File(tmpPath);
File v2ParentDir = new File(v1ParentDir, "unified");
assertTrue(v2ParentDir.mkdirs(), "temp dir should be created");
v2ParentDir.deleteOnExit();
// create mock cgroup
File mockMtabFile = createPremountedHybridCgroups(v1ParentDir);
// create memory cgroup for v1
File memoryCgroup = new File(v1ParentDir, "memory");
assertTrue(memoryCgroup.mkdirs(), "Directory should be created");
// init v1 and v2 handlers
CGroupsHandlerImpl cGroupsHandler = new CGroupsHandlerImpl(
createMountConfiguration(),
privilegedOperationExecutorMock, mockMtabFile.getAbsolutePath());
CGroupsV2HandlerImpl cGroupsV2Handler = new CGroupsV2HandlerImpl(
createMountConfiguration(),
privilegedOperationExecutorMock, mockMtabFile.getAbsolutePath());
// Verify resource handlers that are enabled in v1
Map<String, Set<String>> newMtab =
cGroupsHandler.parseMtab(mockMtabFile.getAbsolutePath());
Map<CGroupsHandler.CGroupController, String> controllerv1Paths =
cGroupsHandler.initializeControllerPathsFromMtab(
newMtab);
assertEquals(1, controllerv1Paths.size());
assertTrue(controllerv1Paths
.containsKey(CGroupsHandler.CGroupController.MEMORY));
String memoryDir =
controllerv1Paths.get(CGroupsHandler.CGroupController.MEMORY);
assertEquals(memoryCgroup.getAbsolutePath(), memoryDir);
// Verify resource handlers that are enabled in v2
newMtab =
cGroupsV2Handler.parseMtab(mockMtabFile.getAbsolutePath());
Map<CGroupsHandler.CGroupController, String> controllerPaths =
cGroupsV2Handler.initializeControllerPathsFromMtab(
newMtab);
assertEquals(3, controllerPaths.size());
assertTrue(controllerPaths
.containsKey(CGroupsHandler.CGroupController.CPU));
String cpuDir = controllerPaths.get(CGroupsHandler.CGroupController.CPU);
assertEquals(v2ParentDir.getAbsolutePath(), cpuDir);
}
@Test
public void testManualCgroupSetting() throws Exception {
YarnConfiguration conf = new YarnConfiguration();
conf.set(YarnConfiguration.NM_LINUX_CONTAINER_CGROUPS_MOUNT_PATH, tmpPath);
conf.set(YarnConfiguration.NM_LINUX_CONTAINER_CGROUPS_HIERARCHY,
"/hadoop-yarn");
validateCgroupV2Controllers(conf, tmpPath);
}
@Test
public void testManualHybridCgroupSetting() throws Exception {
String unifiedPath = tmpPath + "/unified";
YarnConfiguration conf = new YarnConfiguration();
conf.set(YarnConfiguration.NM_LINUX_CONTAINER_CGROUPS_MOUNT_PATH, tmpPath);
conf.set(YarnConfiguration.NM_LINUX_CONTAINER_CGROUPS_V2_MOUNT_PATH, unifiedPath);
conf.set(YarnConfiguration.NM_LINUX_CONTAINER_CGROUPS_HIERARCHY,
"/hadoop-yarn");
validateCgroupV1Controllers(conf, tmpPath);
validateCgroupV2Controllers(conf, unifiedPath);
}
private void validateCgroupV2Controllers(YarnConfiguration conf, String mountPath)
throws Exception {
File baseCgroup = new File(mountPath);
File subCgroup = new File(mountPath, "/hadoop-yarn");
assertTrue(subCgroup.mkdirs(), "temp dir should be created");
subCgroup.deleteOnExit();
String enabledControllers = "cpuset cpu io memory hugetlb pids rdma misc\n";
createFileWithContent(baseCgroup, CGroupsHandler.CGROUP_CONTROLLERS_FILE, enabledControllers);
createFileWithContent(subCgroup, CGroupsHandler.CGROUP_CONTROLLERS_FILE, enabledControllers);
File subtreeControlFile = new File(subCgroup.getAbsolutePath(),
CGroupsHandler.CGROUP_SUBTREE_CONTROL_FILE);
assertTrue(subtreeControlFile.createNewFile(),
"empty subtree_control file should be created");
CGroupsV2HandlerImpl cGroupsHandler = new CGroupsV2HandlerImpl(conf, null);
cGroupsHandler.initializeCGroupController(CGroupsHandler.CGroupController.CPU);
assertEquals(subCgroup.getAbsolutePath(),
new File(cGroupsHandler.getPathForCGroup(
CGroupsHandler.CGroupController.CPU, "")).getAbsolutePath(),
"CPU cgroup path was not set");
// Verify that the subtree control file was updated
String subtreeControllersEnabledString = FileUtils.readFileToString(subtreeControlFile,
StandardCharsets.UTF_8);
assertEquals(1, StringUtils.countMatches(subtreeControllersEnabledString, "+"),
"The newly added controller doesn't contain + sign");
assertEquals(controller.getName(), subtreeControllersEnabledString.replace("+", "").trim(),
"Controller is not enabled in subtree control file");
cGroupsHandler.initializeCGroupController(CGroupsHandler.CGroupController.MEMORY);
subtreeControllersEnabledString = FileUtils.readFileToString(subtreeControlFile,
StandardCharsets.UTF_8);
assertEquals(2, StringUtils.countMatches(subtreeControllersEnabledString, "+"),
"The newly added controllers doesn't contain + signs");
Set<String> subtreeControllersEnabled = new HashSet<>(Arrays.asList(
subtreeControllersEnabledString.replace("+", " ").trim().split(" ")));
assertEquals(2, subtreeControllersEnabled.size());
assertTrue(cGroupsHandler.getValidCGroups().containsAll(subtreeControllersEnabled),
"Controller is not enabled in subtree control file");
// Test that the subtree control file is appended correctly
// even if some controllers are present
subtreeControlFile.delete();
createFileWithContent(subCgroup, CGroupsHandler.CGROUP_SUBTREE_CONTROL_FILE, "cpu io");
cGroupsHandler.initializeCGroupController(CGroupsHandler.CGroupController.MEMORY);
subtreeControllersEnabledString = FileUtils.readFileToString(subtreeControlFile,
StandardCharsets.UTF_8);
assertEquals(1, StringUtils.countMatches(subtreeControllersEnabledString, "+"),
"The newly added controller doesn't contain + sign");
subtreeControllersEnabled = new HashSet<>(Arrays.asList(
subtreeControllersEnabledString.replace("+", " ").split(" ")));
assertEquals(3, subtreeControllersEnabled.size());
assertTrue(cGroupsHandler.getValidCGroups().containsAll(subtreeControllersEnabled),
"Controllers not enabled in subtree control file");
}
private void validateCgroupV1Controllers(YarnConfiguration conf, String mountPath)
throws ResourceHandlerException {
File blkio = new File(new File(mountPath, "blkio"), "/hadoop-yarn");
assertTrue(blkio.mkdirs(), "temp dir should be created");
CGroupsHandlerImpl cGroupsv1Handler = new CGroupsHandlerImpl(conf, null);
cGroupsv1Handler.initializeCGroupController(
CGroupsHandler.CGroupController.BLKIO);
assertEquals(blkio.getAbsolutePath(),
new File(cGroupsv1Handler.getPathForCGroup(
CGroupsHandler.CGroupController.BLKIO, "")).getAbsolutePath(),
"BLKIO CGRoup path was not set");
FileUtils.deleteQuietly(blkio);
}
} | TestCGroupsV2HandlerImpl |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/web/client/DefaultRestClientBuilder.java | {
"start": 2596,
"end": 17909
} | class ____ implements RestClient.Builder {
// request factories
private static final boolean HTTP_COMPONENTS_CLIENT_PRESENT;
private static final boolean JETTY_CLIENT_PRESENT;
private static final boolean REACTOR_NETTY_CLIENT_PRESENT;
private static final boolean JDK_CLIENT_PRESENT;
static {
ClassLoader loader = DefaultRestClientBuilder.class.getClassLoader();
HTTP_COMPONENTS_CLIENT_PRESENT = ClassUtils.isPresent("org.apache.hc.client5.http.classic.HttpClient", loader);
JETTY_CLIENT_PRESENT = ClassUtils.isPresent("org.eclipse.jetty.client.HttpClient", loader);
REACTOR_NETTY_CLIENT_PRESENT = ClassUtils.isPresent("reactor.netty.http.client.HttpClient", loader);
JDK_CLIENT_PRESENT = ClassUtils.isPresent("java.net.http.HttpClient", loader);
}
private @Nullable String baseUrl;
private @Nullable Map<String, ?> defaultUriVariables;
private @Nullable UriBuilderFactory uriBuilderFactory;
private @Nullable HttpHeaders defaultHeaders;
private @Nullable MultiValueMap<String, String> defaultCookies;
private @Nullable Object defaultApiVersion;
private @Nullable ApiVersionInserter apiVersionInserter;
private @Nullable Consumer<RestClient.RequestHeadersSpec<?>> defaultRequest;
private @Nullable List<StatusHandler> statusHandlers;
private @Nullable List<ClientHttpRequestInterceptor> interceptors;
private @Nullable BiPredicate<URI, HttpMethod> bufferingPredicate;
private @Nullable List<ClientHttpRequestInitializer> initializers;
private @Nullable ClientHttpRequestFactory requestFactory;
private @Nullable List<HttpMessageConverter<?>> messageConverters;
private @Nullable Consumer<HttpMessageConverters.ClientBuilder> convertersConfigurer;
private ObservationRegistry observationRegistry = ObservationRegistry.NOOP;
private @Nullable ClientRequestObservationConvention observationConvention;
public DefaultRestClientBuilder() {
}
public DefaultRestClientBuilder(DefaultRestClientBuilder other) {
Assert.notNull(other, "Other must not be null");
this.baseUrl = other.baseUrl;
this.defaultUriVariables = (other.defaultUriVariables != null ? new LinkedHashMap<>(other.defaultUriVariables) : null);
this.uriBuilderFactory = other.uriBuilderFactory;
if (other.defaultHeaders != null) {
this.defaultHeaders = new HttpHeaders();
this.defaultHeaders.putAll(other.defaultHeaders);
}
else {
this.defaultHeaders = null;
}
this.defaultCookies = (other.defaultCookies != null ? new LinkedMultiValueMap<>(other.defaultCookies) : null);
this.defaultApiVersion = other.defaultApiVersion;
this.apiVersionInserter = other.apiVersionInserter;
this.defaultRequest = other.defaultRequest;
this.statusHandlers = (other.statusHandlers != null ? new ArrayList<>(other.statusHandlers) : null);
this.interceptors = (other.interceptors != null) ? new ArrayList<>(other.interceptors) : null;
this.bufferingPredicate = other.bufferingPredicate;
this.initializers = (other.initializers != null) ? new ArrayList<>(other.initializers) : null;
this.requestFactory = other.requestFactory;
this.messageConverters = (other.messageConverters != null ? new ArrayList<>(other.messageConverters) : null);
this.convertersConfigurer = other.convertersConfigurer;
this.observationRegistry = other.observationRegistry;
this.observationConvention = other.observationConvention;
}
public DefaultRestClientBuilder(RestTemplate restTemplate) {
Assert.notNull(restTemplate, "RestTemplate must not be null");
this.uriBuilderFactory = getUriBuilderFactory(restTemplate);
this.statusHandlers = new ArrayList<>();
this.statusHandlers.add(StatusHandler.fromErrorHandler(restTemplate.getErrorHandler()));
if (!CollectionUtils.isEmpty(restTemplate.getInterceptors())) {
this.interceptors = new ArrayList<>(restTemplate.getInterceptors());
}
this.bufferingPredicate = restTemplate.getBufferingPredicate();
if (!CollectionUtils.isEmpty(restTemplate.getClientHttpRequestInitializers())) {
this.initializers = new ArrayList<>(restTemplate.getClientHttpRequestInitializers());
}
this.requestFactory = getRequestFactory(restTemplate);
this.messageConverters = new ArrayList<>(restTemplate.getMessageConverters());
this.observationRegistry = restTemplate.getObservationRegistry();
this.observationConvention = restTemplate.getObservationConvention();
}
private static @Nullable UriBuilderFactory getUriBuilderFactory(RestTemplate restTemplate) {
UriTemplateHandler uriTemplateHandler = restTemplate.getUriTemplateHandler();
if (uriTemplateHandler instanceof DefaultUriBuilderFactory builderFactory) {
// only reuse the DefaultUriBuilderFactory if it has been customized
if (hasRestTemplateDefaults(builderFactory)) {
return null;
}
else {
return builderFactory;
}
}
else if (uriTemplateHandler instanceof UriBuilderFactory builderFactory) {
return builderFactory;
}
else {
return null;
}
}
/**
* Indicate whether this {@code DefaultUriBuilderFactory} uses the default
* {@link org.springframework.web.client.RestTemplate RestTemplate} settings.
*/
private static boolean hasRestTemplateDefaults(DefaultUriBuilderFactory factory) {
// see RestTemplate::initUriTemplateHandler
return (!factory.hasBaseUri() &&
factory.getEncodingMode() == DefaultUriBuilderFactory.EncodingMode.URI_COMPONENT &&
CollectionUtils.isEmpty(factory.getDefaultUriVariables()) &&
factory.shouldParsePath());
}
private static ClientHttpRequestFactory getRequestFactory(RestTemplate restTemplate) {
ClientHttpRequestFactory requestFactory = restTemplate.getRequestFactory();
if (requestFactory instanceof InterceptingClientHttpRequestFactory interceptingClientHttpRequestFactory) {
return interceptingClientHttpRequestFactory.getDelegate();
}
else {
return requestFactory;
}
}
@Override
public RestClient.Builder baseUrl(String baseUrl) {
this.baseUrl = baseUrl;
return this;
}
@Override
public RestClient.Builder baseUrl(URI baseUrl) {
this.baseUrl = baseUrl.toString();
return this;
}
@Override
public RestClient.Builder defaultUriVariables(Map<String, ?> defaultUriVariables) {
this.defaultUriVariables = defaultUriVariables;
return this;
}
@Override
public RestClient.Builder uriBuilderFactory(UriBuilderFactory uriBuilderFactory) {
this.uriBuilderFactory = uriBuilderFactory;
return this;
}
@Override
public RestClient.Builder defaultHeader(String header, String... values) {
initHeaders().put(header, Arrays.asList(values));
return this;
}
@Override
public RestClient.Builder defaultHeaders(Consumer<HttpHeaders> headersConsumer) {
headersConsumer.accept(initHeaders());
return this;
}
private HttpHeaders initHeaders() {
if (this.defaultHeaders == null) {
this.defaultHeaders = new HttpHeaders();
}
return this.defaultHeaders;
}
@Override
public RestClient.Builder defaultCookie(String cookie, String... values) {
initCookies().addAll(cookie, Arrays.asList(values));
return this;
}
@Override
public RestClient.Builder defaultCookies(Consumer<MultiValueMap<String, String>> cookiesConsumer) {
cookiesConsumer.accept(initCookies());
return this;
}
private MultiValueMap<String, String> initCookies() {
if (this.defaultCookies == null) {
this.defaultCookies = new LinkedMultiValueMap<>(3);
}
return this.defaultCookies;
}
@Override
public RestClient.Builder defaultApiVersion(@Nullable Object version) {
this.defaultApiVersion = version;
return this;
}
@Override
public RestClient.Builder apiVersionInserter(@Nullable ApiVersionInserter apiVersionInserter) {
this.apiVersionInserter = apiVersionInserter;
return this;
}
@Override
public RestClient.Builder defaultRequest(Consumer<RestClient.RequestHeadersSpec<?>> defaultRequest) {
this.defaultRequest = this.defaultRequest != null ?
this.defaultRequest.andThen(defaultRequest) : defaultRequest;
return this;
}
@Override
public RestClient.Builder defaultStatusHandler(Predicate<HttpStatusCode> statusPredicate, RestClient.ResponseSpec.ErrorHandler errorHandler) {
return defaultStatusHandlerInternal(StatusHandler.of(statusPredicate, errorHandler));
}
@Override
public RestClient.Builder defaultStatusHandler(ResponseErrorHandler errorHandler) {
return defaultStatusHandlerInternal(StatusHandler.fromErrorHandler(errorHandler));
}
private RestClient.Builder defaultStatusHandlerInternal(StatusHandler statusHandler) {
if (this.statusHandlers == null) {
this.statusHandlers = new ArrayList<>();
}
this.statusHandlers.add(statusHandler);
return this;
}
@Override
public RestClient.Builder requestInterceptor(ClientHttpRequestInterceptor interceptor) {
Assert.notNull(interceptor, "Interceptor must not be null");
initInterceptors().add(interceptor);
return this;
}
@Override
public RestClient.Builder requestInterceptors(Consumer<List<ClientHttpRequestInterceptor>> interceptorsConsumer) {
interceptorsConsumer.accept(initInterceptors());
return this;
}
private List<ClientHttpRequestInterceptor> initInterceptors() {
if (this.interceptors == null) {
this.interceptors = new ArrayList<>();
}
return this.interceptors;
}
@Override
public RestClient.Builder bufferContent(BiPredicate<URI, HttpMethod> predicate) {
this.bufferingPredicate = predicate;
return this;
}
@Override
public RestClient.Builder requestInitializer(ClientHttpRequestInitializer initializer) {
Assert.notNull(initializer, "Initializer must not be null");
initInitializers().add(initializer);
return this;
}
@Override
public RestClient.Builder requestInitializers(Consumer<List<ClientHttpRequestInitializer>> initializersConsumer) {
initializersConsumer.accept(initInitializers());
return this;
}
private List<ClientHttpRequestInitializer> initInitializers() {
if (this.initializers == null) {
this.initializers = new ArrayList<>();
}
return this.initializers;
}
@Override
public RestClient.Builder requestFactory(ClientHttpRequestFactory requestFactory) {
this.requestFactory = requestFactory;
return this;
}
@Override
@SuppressWarnings("removal")
public RestClient.Builder messageConverters(Consumer<List<HttpMessageConverter<?>>> configurer) {
if (this.messageConverters == null) {
this.messageConverters = new ArrayList<>();
HttpMessageConverters.forClient().registerDefaults().build().forEach(this.messageConverters::add);
}
configurer.accept(this.messageConverters);
validateConverters(this.messageConverters);
return this;
}
@Override
@SuppressWarnings("removal")
public RestClient.Builder messageConverters(Iterable<HttpMessageConverter<?>> messageConverters) {
validateConverters(messageConverters);
List<HttpMessageConverter<?>> converters = new ArrayList<>();
messageConverters.forEach(converters::add);
this.messageConverters = converters;
return this;
}
@Override
public RestClient.Builder configureMessageConverters(Consumer<HttpMessageConverters.ClientBuilder> configurer) {
this.convertersConfigurer = (this.convertersConfigurer != null ?
this.convertersConfigurer.andThen(configurer) : configurer);
return this;
}
@Override
public RestClient.Builder observationRegistry(ObservationRegistry observationRegistry) {
Assert.notNull(observationRegistry, "observationRegistry must not be null");
this.observationRegistry = observationRegistry;
return this;
}
@Override
public RestClient.Builder observationConvention(ClientRequestObservationConvention observationConvention) {
this.observationConvention = observationConvention;
return this;
}
@Override
public RestClient.Builder apply(Consumer<RestClient.Builder> builderConsumer) {
builderConsumer.accept(this);
return this;
}
private void validateConverters(@Nullable Iterable<HttpMessageConverter<?>> converters) {
Assert.notNull(converters, "At least one HttpMessageConverter is required");
Assert.isTrue(converters.iterator().hasNext(), "At least one HttpMessageConverter is required");
converters.forEach(converter -> Assert.notNull(converter,
"The HttpMessageConverter list must not contain null elements"));
}
@Override
public RestClient.Builder clone() {
return new DefaultRestClientBuilder(this);
}
@Override
public RestClient build() {
ClientHttpRequestFactory requestFactory = initRequestFactory();
UriBuilderFactory uriBuilderFactory = initUriBuilderFactory();
HttpHeaders defaultHeaders = copyDefaultHeaders();
MultiValueMap<String, String> defaultCookies = copyDefaultCookies();
List<HttpMessageConverter<?>> converters = initMessageConverters();
return new DefaultRestClient(
requestFactory, this.interceptors, this.bufferingPredicate, this.initializers,
uriBuilderFactory, defaultHeaders, defaultCookies, this.defaultApiVersion,
this.apiVersionInserter, this.defaultRequest,
this.statusHandlers, converters,
this.observationRegistry, this.observationConvention,
new DefaultRestClientBuilder(this));
}
private ClientHttpRequestFactory initRequestFactory() {
if (this.requestFactory != null) {
return this.requestFactory;
}
else if (HTTP_COMPONENTS_CLIENT_PRESENT) {
return new HttpComponentsClientHttpRequestFactory();
}
else if (JETTY_CLIENT_PRESENT) {
return new JettyClientHttpRequestFactory();
}
else if (REACTOR_NETTY_CLIENT_PRESENT) {
return new ReactorClientHttpRequestFactory();
}
else if (JDK_CLIENT_PRESENT) {
// java.net.http module might not be loaded, so we can't default to the JDK HttpClient
return new JdkClientHttpRequestFactory();
}
else {
return new SimpleClientHttpRequestFactory();
}
}
private UriBuilderFactory initUriBuilderFactory() {
if (this.uriBuilderFactory != null) {
return this.uriBuilderFactory;
}
DefaultUriBuilderFactory factory = (this.baseUrl != null ?
new DefaultUriBuilderFactory(this.baseUrl) : new DefaultUriBuilderFactory());
factory.setDefaultUriVariables(this.defaultUriVariables);
return factory;
}
private @Nullable HttpHeaders copyDefaultHeaders() {
if (this.defaultHeaders == null) {
return null;
}
HttpHeaders copy = new HttpHeaders();
this.defaultHeaders.forEach((key, values) -> copy.put(key, new ArrayList<>(values)));
return HttpHeaders.readOnlyHttpHeaders(copy);
}
private @Nullable MultiValueMap<String, String> copyDefaultCookies() {
if (this.defaultCookies == null) {
return null;
}
MultiValueMap<String, String> copy = new LinkedMultiValueMap<>(this.defaultCookies.size());
this.defaultCookies.forEach((key, values) -> copy.put(key, new ArrayList<>(values)));
return CollectionUtils.unmodifiableMultiValueMap(copy);
}
private List<HttpMessageConverter<?>> initMessageConverters() {
HttpMessageConverters.ClientBuilder builder = HttpMessageConverters.forClient();
if (this.messageConverters == null && this.convertersConfigurer == null) {
builder.registerDefaults();
}
else {
if (this.messageConverters != null) {
this.messageConverters.forEach(builder::addCustomConverter);
}
if (this.convertersConfigurer != null) {
this.convertersConfigurer.accept(builder);
}
}
List<HttpMessageConverter<?>> result = new ArrayList<>();
builder.build().forEach(result::add);
return result;
}
}
| DefaultRestClientBuilder |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/ser/impl/PropertySerializerMap.java | {
"start": 6999,
"end": 7460
} | class ____
{
public final ValueSerializer<Object> serializer;
public final PropertySerializerMap map;
public SerializerAndMapResult(ValueSerializer<Object> serializer,
PropertySerializerMap map)
{
this.serializer = serializer;
this.map = map;
}
}
/**
* Trivial container for bundling type + serializer entries.
*/
private final static | SerializerAndMapResult |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/utils/OperatorType.java | {
"start": 1054,
"end": 1189
} | enum ____ {
NestedLoopJoin,
ShuffleHashJoin,
BroadcastHashJoin,
SortMergeJoin,
HashAgg,
SortAgg
}
| OperatorType |
java | apache__camel | components/camel-spring-parent/camel-spring-ai/camel-spring-ai-vector-store/src/main/java/org/apache/camel/component/springai/vectorstore/SpringAiVectorStore.java | {
"start": 874,
"end": 1013
} | class ____ {
public static final String SCHEME = "spring-ai-vector-store";
private SpringAiVectorStore() {
}
}
| SpringAiVectorStore |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/core/ResolvableTypeTests.java | {
"start": 80114,
"end": 80214
} | class ____ extends HashMap<String, RecursiveMap> {
}
@SuppressWarnings("serial")
static | RecursiveMap |
java | mockito__mockito | mockito-core/src/test/java/org/mockito/AnnotationsAreCopiedFromMockedTypeTest.java | {
"start": 5119,
"end": 5200
} | interface ____ {
String value() default "";
}
}
| AnnotationWithCustomValue |
java | quarkusio__quarkus | core/runtime/src/test/java/io/quarkus/devservices/crossclassloader/runtime/ComparableDevServicesConfigTest.java | {
"start": 664,
"end": 5205
} | class ____ {
String configName = "prefix";
UUID uuid = UUID.randomUUID();
@Test
public void identicalWrappersShouldBeEqual() {
DevServiceOwner owner = new DevServiceOwner("someextension", LaunchMode.TEST.name(), configName);
DummyDevServicesConfig globalConfig = new DummyDevServicesConfig("b", 3);
DummyExtensionConfig config = new DummyExtensionConfig("a", 1);
ComparableDevServicesConfig wrapped = new ComparableDevServicesConfig(uuid, owner, globalConfig, config);
assertEquals(wrapped, wrapped);
}
@Test
public void wrappersWrappingIdenticalObjectsShouldBeEqual() {
DevServiceOwner owner = new DevServiceOwner("someextension", LaunchMode.TEST.name(), configName);
DummyDevServicesConfig globalConfig = new DummyDevServicesConfig("b", 3);
DummyExtensionConfig config = new DummyExtensionConfig("a", 1);
assertEquals(new ComparableDevServicesConfig(uuid, owner, globalConfig, config),
new ComparableDevServicesConfig(uuid, owner, globalConfig, config));
}
@Test
public void wrappersWrappingEquivalentObjectsShouldBeEqual() {
DevServiceOwner owner = new DevServiceOwner("someextension", LaunchMode.TEST.name(), configName);
DummyDevServicesConfig globalConfig1 = new DummyDevServicesConfig("b", 3);
DummyExtensionConfig config1 = new DummyExtensionConfig("a", 1);
DummyDevServicesConfig globalConfig2 = new DummyDevServicesConfig("b", 3);
DummyExtensionConfig config2 = new DummyExtensionConfig("a", 1);
assertEquals(new ComparableDevServicesConfig(uuid, owner, globalConfig1, config1),
new ComparableDevServicesConfig(uuid, owner, globalConfig2, config2));
}
@Test
public void wrappersWrappingIdenticalObjectsShouldBeHaveTheSameHashCode() {
DevServiceOwner owner = new DevServiceOwner("someextension", LaunchMode.TEST.name(), configName);
DummyDevServicesConfig globalConfig = new DummyDevServicesConfig("b", 3);
DummyExtensionConfig config = new DummyExtensionConfig("a", 1);
assertEquals(new ComparableDevServicesConfig(uuid, owner, globalConfig, config).hashCode(),
new ComparableDevServicesConfig(uuid, owner, globalConfig, config).hashCode());
}
@Test
public void wrappersWrappingDifferentOwnerExtensionsShouldNotBeEqual() {
DevServiceOwner owner1 = new DevServiceOwner("someextension", LaunchMode.TEST.name(), configName);
DevServiceOwner owner2 = new DevServiceOwner("anotherextension", LaunchMode.TEST.name(), configName);
assertNotEquals(new ComparableDevServicesConfig(uuid, owner1, null, null),
new ComparableDevServicesConfig(uuid, owner2, null, null));
}
@Test
public void wrappersWrappingDifferentOwnerLaunchModesShouldNotBeEqual() {
DevServiceOwner owner1 = new DevServiceOwner("someextension", LaunchMode.TEST.name(), configName);
DevServiceOwner owner2 = new DevServiceOwner("someextension", LaunchMode.DEVELOPMENT.name(), configName);
assertNotEquals(new ComparableDevServicesConfig(uuid, owner1, null, null),
new ComparableDevServicesConfig(uuid, owner2, null, null));
}
@Test
public void wrappersWrappingDifferentIdentifyingConfigShouldNotBeEqual() {
DummyExtensionConfig config1 = new DummyExtensionConfig("a", 1);
DummyExtensionConfig config2 = new DummyExtensionConfig("a", 2);
assertNotEquals(new ComparableDevServicesConfig(uuid, null, null, config1),
new ComparableDevServicesConfig(uuid, null, null, config2));
}
@Test
public void wrappersWrappingDifferentIdentifyingConfigHaveDifferentHashCodes() {
DummyExtensionConfig config1 = new DummyExtensionConfig("a", 1);
DummyExtensionConfig config2 = new DummyExtensionConfig("a", 2);
assertNotEquals(new ComparableDevServicesConfig(uuid, null, null, config1).hashCode(),
new ComparableDevServicesConfig(uuid, null, null, config2).hashCode());
}
@Test
public void nullUuidIsHandled() {
DummyExtensionConfig config = new DummyExtensionConfig("a", 1);
assertNotNull(new ComparableDevServicesConfig(null, null, null, config).hashCode());
assertEquals(new ComparableDevServicesConfig(null, null, null, config).hashCode(),
new ComparableDevServicesConfig(null, null, null, config).hashCode());
}
@ConfigGroup
| ComparableDevServicesConfigTest |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/inject/guice/BindingToUnqualifiedCommonTypeTest.java | {
"start": 893,
"end": 1518
} | class ____ {
private final CompilationTestHelper compilationHelper =
CompilationTestHelper.newInstance(BindingToUnqualifiedCommonType.class, getClass());
@Test
public void positiveCase() {
compilationHelper
.addSourceLines(
"BindingToUnqualifiedCommonTypePositiveCases.java",
"""
package com.google.errorprone.bugpatterns.inject.guice.testdata;
import com.google.inject.AbstractModule;
import com.google.inject.Provides;
/** Tests for {@code BindingToUnqualifiedCommonType} */
public | BindingToUnqualifiedCommonTypeTest |
java | apache__flink | flink-rpc/flink-rpc-akka/src/test/java/org/apache/flink/runtime/rpc/pekko/PekkoRpcActorTest.java | {
"start": 32117,
"end": 33759
} | class ____ extends RpcEndpoint
implements AsyncOperationGateway {
private final CompletableFuture<Void> onStopFuture;
private final OneShotLatch blockAsyncOperation = new OneShotLatch();
private final OneShotLatch enterAsyncOperation = new OneShotLatch();
private final AtomicInteger asyncOperationCounter = new AtomicInteger(0);
protected TerminatingAfterOnStopFutureCompletionEndpoint(
RpcService rpcService, CompletableFuture<Void> onStopFuture) {
super(rpcService);
this.onStopFuture = onStopFuture;
}
@Override
public CompletableFuture<Integer> asyncOperation(Duration timeout) {
asyncOperationCounter.incrementAndGet();
enterAsyncOperation.trigger();
try {
blockAsyncOperation.await();
} catch (InterruptedException e) {
throw new FlinkRuntimeException(e);
}
return CompletableFuture.completedFuture(42);
}
@Override
public CompletableFuture<Void> onStop() {
return onStopFuture;
}
void awaitEnterAsyncOperation() throws InterruptedException {
enterAsyncOperation.await();
}
void triggerUnblockAsyncOperation() {
blockAsyncOperation.trigger();
}
int getNumberAsyncOperationCalls() {
return asyncOperationCounter.get();
}
}
// ------------------------------------------------------------------------
private static final | TerminatingAfterOnStopFutureCompletionEndpoint |
java | spring-projects__spring-boot | module/spring-boot-quartz/src/test/java/org/springframework/boot/quartz/autoconfigure/QuartzAutoConfigurationTests.java | {
"start": 21216,
"end": 21406
} | class ____ extends BaseQuartzConfiguration {
@Bean
Executor executor() {
return mock(Executor.class);
}
}
@Configuration(proxyBeanMethods = false)
static | MockExecutorConfiguration |
java | google__guava | android/guava/src/com/google/common/hash/SipHashFunction.java | {
"start": 1225,
"end": 2974
} | class ____ extends AbstractHashFunction implements Serializable {
static final HashFunction SIP_HASH_24 =
new SipHashFunction(2, 4, 0x0706050403020100L, 0x0f0e0d0c0b0a0908L);
// The number of compression rounds.
private final int c;
// The number of finalization rounds.
private final int d;
// Two 64-bit keys (represent a single 128-bit key).
private final long k0;
private final long k1;
/**
* @param c the number of compression rounds (must be positive)
* @param d the number of finalization rounds (must be positive)
* @param k0 the first half of the key
* @param k1 the second half of the key
*/
SipHashFunction(int c, int d, long k0, long k1) {
checkArgument(
c > 0, "The number of SipRound iterations (c=%s) during Compression must be positive.", c);
checkArgument(
d > 0, "The number of SipRound iterations (d=%s) during Finalization must be positive.", d);
this.c = c;
this.d = d;
this.k0 = k0;
this.k1 = k1;
}
@Override
public int bits() {
return 64;
}
@Override
public Hasher newHasher() {
return new SipHasher(c, d, k0, k1);
}
// TODO(kak): Implement and benchmark the hashFoo() shortcuts.
@Override
public String toString() {
return "Hashing.sipHash" + c + "" + d + "(" + k0 + ", " + k1 + ")";
}
@Override
public boolean equals(@Nullable Object object) {
if (object instanceof SipHashFunction) {
SipHashFunction other = (SipHashFunction) object;
return (c == other.c) && (d == other.d) && (k0 == other.k0) && (k1 == other.k1);
}
return false;
}
@Override
public int hashCode() {
return (int) (getClass().hashCode() ^ c ^ d ^ k0 ^ k1);
}
private static final | SipHashFunction |
java | apache__camel | components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/processor/onexception/SpringOnExceptionUseOriginalMessageTest.java | {
"start": 1078,
"end": 1415
} | class ____ extends OnExceptionUseOriginalMessageTest {
@Override
protected CamelContext createCamelContext() throws Exception {
return createSpringCamelContext(this,
"org/apache/camel/spring/processor/onexception/SpringOnExceptionUseOriginalMessageTest.xml");
}
}
| SpringOnExceptionUseOriginalMessageTest |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/jdk8/LocalTimeTest.java | {
"start": 1176,
"end": 1389
} | class ____ {
private LocalTime date;
public LocalTime getDate() {
return date;
}
public void setDate(LocalTime date) {
this.date = date;
}
}
}
| VO |
java | apache__hadoop | hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSAuthenticationFilter.java | {
"start": 2265,
"end": 4019
} | class ____
extends DelegationTokenAuthenticationFilter {
public static final String CONFIG_PREFIX = KMSConfiguration.CONFIG_PREFIX +
"authentication.";
@Override
protected Properties getConfiguration(String configPrefix,
FilterConfig filterConfig) {
Configuration conf = KMSWebApp.getConfiguration();
return getKMSConfiguration(conf);
}
@VisibleForTesting
Properties getKMSConfiguration(Configuration conf) {
Properties props = new Properties();
Map<String, String> propsWithPrefixMap = conf.getPropsWithPrefix(
CONFIG_PREFIX);
for (Map.Entry<String, String> entry : propsWithPrefixMap.entrySet()) {
props.setProperty(entry.getKey(), entry.getValue());
}
String authType = props.getProperty(AUTH_TYPE);
if (authType.equals(PseudoAuthenticationHandler.TYPE)) {
props.setProperty(AUTH_TYPE,
PseudoDelegationTokenAuthenticationHandler.class.getName());
} else if (authType.equals(KerberosAuthenticationHandler.TYPE)) {
props.setProperty(AUTH_TYPE,
KerberosDelegationTokenAuthenticationHandler.class.getName());
}
props.setProperty(DelegationTokenAuthenticationHandler.TOKEN_KIND,
KMSDelegationToken.TOKEN_KIND_STR);
return props;
}
protected Configuration getProxyuserConfiguration(FilterConfig filterConfig) {
Map<String, String> proxyuserConf = KMSWebApp.getConfiguration().
getValByRegex("hadoop\\.kms\\.proxyuser\\.");
Configuration conf = new Configuration(false);
for (Map.Entry<String, String> entry : proxyuserConf.entrySet()) {
conf.set(entry.getKey().substring("hadoop.kms.".length()),
entry.getValue());
}
return conf;
}
private static | KMSAuthenticationFilter |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/RollupJobCaps.java | {
"start": 1728,
"end": 8864
} | class ____ implements Writeable, ToXContentObject {
private static ParseField JOB_ID = new ParseField("job_id");
private static ParseField ROLLUP_INDEX = new ParseField("rollup_index");
private static ParseField INDEX_PATTERN = new ParseField("index_pattern");
private static ParseField FIELDS = new ParseField("fields");
private final String jobID;
private final String rollupIndex;
private final String indexPattern;
private final Map<String, RollupFieldCaps> fieldCapLookup;
// TODO now that these rollup caps are being used more widely (e.g. search), perhaps we should
// store the RollupJob and translate into FieldCaps on demand for json output. Would make working with
// it internally a lot easier
public RollupJobCaps(RollupJobConfig job) {
jobID = job.getId();
rollupIndex = job.getRollupIndex();
indexPattern = job.getIndexPattern();
fieldCapLookup = createRollupFieldCaps(job);
}
public RollupJobCaps(String jobID, String rollupIndex, String indexPattern, Map<String, RollupFieldCaps> fieldCapLookup) {
this.jobID = jobID;
this.rollupIndex = rollupIndex;
this.indexPattern = indexPattern;
this.fieldCapLookup = Collections.unmodifiableMap(Objects.requireNonNull(fieldCapLookup));
}
public RollupJobCaps(StreamInput in) throws IOException {
this.jobID = in.readString();
this.rollupIndex = in.readString();
this.indexPattern = in.readString();
this.fieldCapLookup = in.readMap(RollupFieldCaps::new);
}
public Map<String, RollupFieldCaps> getFieldCaps() {
return fieldCapLookup;
}
public String getRollupIndex() {
return rollupIndex;
}
public String getIndexPattern() {
return indexPattern;
}
public String getJobID() {
return jobID;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(jobID);
out.writeString(rollupIndex);
out.writeString(indexPattern);
out.writeMap(fieldCapLookup, StreamOutput::writeWriteable);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
{
builder.field(JOB_ID.getPreferredName(), jobID);
builder.field(ROLLUP_INDEX.getPreferredName(), rollupIndex);
builder.field(INDEX_PATTERN.getPreferredName(), indexPattern);
builder.startObject(FIELDS.getPreferredName());
{
for (Map.Entry<String, RollupFieldCaps> fieldCap : fieldCapLookup.entrySet()) {
builder.xContentList(fieldCap.getKey(), fieldCap.getValue());
}
}
builder.endObject();
}
builder.endObject();
return builder;
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
RollupJobCaps that = (RollupJobCaps) other;
return Objects.equals(this.jobID, that.jobID)
&& Objects.equals(this.rollupIndex, that.rollupIndex)
&& Objects.equals(this.fieldCapLookup, that.fieldCapLookup);
}
@Override
public int hashCode() {
return Objects.hash(jobID, rollupIndex, fieldCapLookup, indexPattern);
}
private static Map<String, RollupFieldCaps> createRollupFieldCaps(final RollupJobConfig rollupJobConfig) {
final Map<String, List<Map<String, Object>>> tempFieldCaps = new HashMap<>();
final GroupConfig groupConfig = rollupJobConfig.getGroupConfig();
if (groupConfig != null) {
// Create RollupFieldCaps for the date histogram
final DateHistogramGroupConfig dateHistogram = groupConfig.getDateHistogram();
final Map<String, Object> dateHistogramAggCap = new HashMap<>();
dateHistogramAggCap.put("agg", DateHistogramAggregationBuilder.NAME);
dateHistogramAggCap.put(dateHistogram.getIntervalTypeName(), dateHistogram.getInterval().toString());
if (dateHistogram.getDelay() != null) {
dateHistogramAggCap.put(DateHistogramGroupConfig.DELAY, dateHistogram.getDelay().toString());
}
dateHistogramAggCap.put(DateHistogramGroupConfig.TIME_ZONE, dateHistogram.getTimeZone());
List<Map<String, Object>> dateAggCaps = tempFieldCaps.getOrDefault(dateHistogram.getField(), new ArrayList<>());
dateAggCaps.add(dateHistogramAggCap);
tempFieldCaps.put(dateHistogram.getField(), dateAggCaps);
// Create RollupFieldCaps for the histogram
final HistogramGroupConfig histogram = groupConfig.getHistogram();
if (histogram != null) {
final Map<String, Object> histogramAggCap = new HashMap<>();
histogramAggCap.put("agg", HistogramAggregationBuilder.NAME);
histogramAggCap.put(HistogramGroupConfig.INTERVAL, histogram.getInterval());
Arrays.stream(rollupJobConfig.getGroupConfig().getHistogram().getFields()).forEach(field -> {
List<Map<String, Object>> caps = tempFieldCaps.getOrDefault(field, new ArrayList<>());
caps.add(histogramAggCap);
tempFieldCaps.put(field, caps);
});
}
// Create RollupFieldCaps for the term
final TermsGroupConfig terms = groupConfig.getTerms();
if (terms != null) {
final Map<String, Object> termsAggCap = singletonMap("agg", TermsAggregationBuilder.NAME);
Arrays.stream(rollupJobConfig.getGroupConfig().getTerms().getFields()).forEach(field -> {
List<Map<String, Object>> caps = tempFieldCaps.getOrDefault(field, new ArrayList<>());
caps.add(termsAggCap);
tempFieldCaps.put(field, caps);
});
}
}
// Create RollupFieldCaps for the metrics
final List<MetricConfig> metricsConfig = rollupJobConfig.getMetricsConfig();
if (metricsConfig.size() > 0) {
rollupJobConfig.getMetricsConfig().forEach(metricConfig -> {
final List<Map<String, Object>> metrics = metricConfig.getMetrics()
.stream()
.map(metric -> singletonMap("agg", (Object) metric))
.toList();
metrics.forEach(m -> {
List<Map<String, Object>> caps = tempFieldCaps.getOrDefault(metricConfig.getField(), new ArrayList<>());
caps.add(m);
tempFieldCaps.put(metricConfig.getField(), caps);
});
});
}
return Collections.unmodifiableMap(
tempFieldCaps.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, e -> new RollupFieldCaps(e.getValue())))
);
}
public static | RollupJobCaps |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/onetoone/jointable/OtherEntity.java | {
"start": 264,
"end": 487
} | class ____ {
@Id
private String id;
public OtherEntity() {
}
public OtherEntity(String id) {
this.id = id;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
}
| OtherEntity |
java | apache__flink | flink-core/src/test/java/org/apache/flink/core/fs/RefCountedBufferingFileStreamTest.java | {
"start": 1430,
"end": 5552
} | class ____ {
private static final int BUFFER_SIZE = 10;
@TempDir private java.nio.file.Path temporaryFolder;
@Test
void testSmallWritesGoToBuffer() throws IOException {
RefCountedBufferingFileStream stream = getStreamToTest();
final byte[] contentToWrite = bytesOf("hello");
stream.write(contentToWrite);
assertThat(stream.getPositionInBuffer()).isEqualTo(contentToWrite.length);
assertThat(stream.getPos()).isEqualTo(contentToWrite.length);
stream.close();
stream.release();
}
@Test
void testExceptionWhenWritingToClosedFile() {
assertThatExceptionOfType(IOException.class)
.isThrownBy(
() -> {
RefCountedBufferingFileStream stream = getStreamToTest();
final byte[] contentToWrite = bytesOf("hello");
stream.write(contentToWrite);
assertThat(stream.getPositionInBuffer())
.isEqualTo(contentToWrite.length);
assertThat(stream.getPos()).isEqualTo(contentToWrite.length);
stream.close();
stream.write(contentToWrite);
});
}
@Test
void testBigWritesGoToFile() throws IOException {
RefCountedBufferingFileStream stream = getStreamToTest();
final byte[] contentToWrite = bytesOf("hello big world");
stream.write(contentToWrite);
assertThat(stream.getPositionInBuffer()).isZero();
assertThat(stream.getPos()).isEqualTo(contentToWrite.length);
stream.close();
stream.release();
}
@Test
void testSpillingWhenBufferGetsFull() throws IOException {
RefCountedBufferingFileStream stream = getStreamToTest();
final byte[] firstContentToWrite = bytesOf("hello");
stream.write(firstContentToWrite);
assertThat(stream.getPositionInBuffer()).isEqualTo(firstContentToWrite.length);
assertThat(stream.getPos()).isEqualTo(firstContentToWrite.length);
final byte[] secondContentToWrite = bytesOf(" world!");
stream.write(secondContentToWrite);
assertThat(stream.getPositionInBuffer()).isEqualTo(secondContentToWrite.length);
assertThat(stream.getPos())
.isEqualTo(firstContentToWrite.length + secondContentToWrite.length);
stream.close();
stream.release();
}
@Test
void testFlush() throws IOException {
RefCountedBufferingFileStream stream = getStreamToTest();
final byte[] contentToWrite = bytesOf("hello");
stream.write(contentToWrite);
assertThat(stream.getPositionInBuffer()).isEqualTo(contentToWrite.length);
assertThat(stream.getPos()).isEqualTo(contentToWrite.length);
stream.flush();
assertThat(stream.getPositionInBuffer()).isZero();
assertThat(stream.getPos()).isEqualTo(contentToWrite.length);
final byte[] contentRead = new byte[contentToWrite.length];
new FileInputStream(stream.getInputFile()).read(contentRead, 0, contentRead.length);
assertThat(contentRead).isEqualTo(contentToWrite);
stream.release();
}
// ---------------------------- Utility Classes ----------------------------
private RefCountedBufferingFileStream getStreamToTest() throws IOException {
return new RefCountedBufferingFileStream(getRefCountedFileWithContent(), BUFFER_SIZE);
}
private RefCountedFileWithStream getRefCountedFileWithContent() throws IOException {
final File newFile =
new File(TempDirUtils.newFolder(temporaryFolder), ".tmp_" + UUID.randomUUID());
final OutputStream out =
Files.newOutputStream(newFile.toPath(), StandardOpenOption.CREATE_NEW);
return RefCountedFileWithStream.newFile(newFile, out);
}
private static byte[] bytesOf(String str) {
return str.getBytes(StandardCharsets.UTF_8);
}
}
| RefCountedBufferingFileStreamTest |
java | elastic__elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/termsenum/TermsEnumResponseTests.java | {
"start": 1109,
"end": 4856
} | class ____ extends AbstractBroadcastResponseTestCase<TermsEnumResponse> {
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<TermsEnumResponse, Void> PARSER = new ConstructingObjectParser<>(
"term_enum_results",
true,
arg -> {
BaseBroadcastResponse response = (BaseBroadcastResponse) arg[0];
return new TermsEnumResponse(
(List<String>) arg[1],
response.getTotalShards(),
response.getSuccessfulShards(),
response.getFailedShards(),
Arrays.asList(response.getShardFailures()),
(Boolean) arg[2]
);
}
);
static {
AbstractBroadcastResponseTestCase.declareBroadcastFields(PARSER);
PARSER.declareStringArray(optionalConstructorArg(), new ParseField(TermsEnumResponse.TERMS_FIELD));
PARSER.declareBoolean(optionalConstructorArg(), new ParseField(TermsEnumResponse.COMPLETE_FIELD));
}
protected static List<String> getRandomTerms() {
int termCount = randomIntBetween(0, 100);
Set<String> uniqueTerms = Sets.newHashSetWithExpectedSize(termCount);
while (uniqueTerms.size() < termCount) {
String s = randomAlphaOfLengthBetween(1, 10);
uniqueTerms.add(s);
}
List<String> terms = new ArrayList<>(uniqueTerms);
return terms;
}
private static TermsEnumResponse createRandomTermEnumResponse() {
int totalShards = randomIntBetween(1, 10);
int successfulShards = randomIntBetween(0, totalShards);
int failedShards = totalShards - successfulShards;
List<DefaultShardOperationFailedException> shardFailures = new ArrayList<>(failedShards);
for (int i = 0; i < failedShards; i++) {
ElasticsearchException exc = new ElasticsearchException("some_error_" + randomInt());
String index = "index_" + randomInt(1000);
int shard = randomInt(100);
shardFailures.add(new DefaultShardOperationFailedException(index, shard, exc));
}
return new TermsEnumResponse(getRandomTerms(), totalShards, successfulShards, failedShards, shardFailures, randomBoolean());
}
@Override
protected TermsEnumResponse doParseInstance(XContentParser parser) throws IOException {
return PARSER.apply(parser, null);
}
@Override
protected TermsEnumResponse createTestInstance() {
return createRandomTermEnumResponse();
}
@Override
protected void assertEqualInstances(TermsEnumResponse response, TermsEnumResponse parsedResponse) {
super.assertEqualInstances(response, parsedResponse);
assertEquals(response.getTerms().size(), parsedResponse.getTerms().size());
assertTrue(response.getTerms().containsAll(parsedResponse.getTerms()));
}
@Override
protected TermsEnumResponse createTestInstance(
int totalShards,
int successfulShards,
int failedShards,
List<DefaultShardOperationFailedException> failures
) {
return new TermsEnumResponse(getRandomTerms(), totalShards, successfulShards, failedShards, failures, randomBoolean());
}
@Override
public void testToXContent() {
String s = randomAlphaOfLengthBetween(1, 10);
List<String> terms = new ArrayList<>();
terms.add(s);
TermsEnumResponse response = new TermsEnumResponse(terms, 10, 10, 0, new ArrayList<>(), true);
String output = Strings.toString(response);
assertEquals(Strings.format("""
{"_shards":{"total":10,"successful":10,"failed":0},"terms":["%s"],"complete":true}""", s), output);
}
}
| TermsEnumResponseTests |
java | elastic__elasticsearch | build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/AsciidocSnippetParser.java | {
"start": 634,
"end": 3340
} | class ____ extends SnippetParser {
public static final Pattern SNIPPET_PATTERN = Pattern.compile("-{4,}\\s*");
public static final Pattern TEST_RESPONSE_PATTERN = Pattern.compile("\\/\\/\s*TESTRESPONSE(\\[(.+)\\])?\s*");
public static final Pattern SOURCE_PATTERN = Pattern.compile(
"\\[\"?source\"?(?:\\.[^,]+)?,\\s*\"?([-\\w]+)\"?(,((?!id=).)*(id=\"?([-\\w]+)\"?)?(.*))?].*"
);
public static final String CONSOLE_REGEX = "\\/\\/\s*CONSOLE\s*";
public static final String NOTCONSOLE_REGEX = "\\/\\/\s*NOTCONSOLE\s*";
public static final String TESTSETUP_REGEX = "\\/\\/\s*TESTSETUP\s*";
public static final String TEARDOWN_REGEX = "\\/\\/\s*TEARDOWN\s*";
public AsciidocSnippetParser(Map<String, String> defaultSubstitutions) {
super(defaultSubstitutions);
}
@Override
protected Pattern testResponsePattern() {
return TEST_RESPONSE_PATTERN;
}
protected Pattern testPattern() {
return Pattern.compile("\\/\\/\s*TEST(\\[(.+)\\])?\s*");
}
private int lastLanguageLine = 0;
private String currentName = null;
private String lastLanguage = null;
protected void parseLine(List<Snippet> snippets, int lineNumber, String line) {
if (SNIPPET_PATTERN.matcher(line).matches()) {
if (snippetBuilder == null) {
snippetBuilder = newSnippetBuilder().withLineNumber(lineNumber + 1)
.withName(currentName)
.withSubstitutions(defaultSubstitutions);
if (lastLanguageLine == lineNumber - 1) {
snippetBuilder.withLanguage(lastLanguage);
}
currentName = null;
} else {
snippetBuilder.withEnd(lineNumber + 1);
}
return;
}
Source source = matchSource(line);
if (source.matches) {
lastLanguage = source.language;
lastLanguageLine = lineNumber;
currentName = source.name;
return;
}
handleCommons(snippets, line);
}
protected String getTestSetupRegex() {
return TESTSETUP_REGEX;
}
protected String getTeardownRegex() {
return TEARDOWN_REGEX;
}
protected String getNotconsoleRegex() {
return NOTCONSOLE_REGEX;
}
protected String getConsoleRegex() {
return CONSOLE_REGEX;
}
static Source matchSource(String line) {
Matcher matcher = SOURCE_PATTERN.matcher(line);
if (matcher.matches()) {
return new Source(true, matcher.group(1), matcher.group(5));
}
return new Source(false, null, null);
}
}
| AsciidocSnippetParser |
java | spring-projects__spring-framework | spring-beans/src/main/java/org/springframework/beans/PropertyEditorRegistrySupport.java | {
"start": 3776,
"end": 22458
} | class ____ implements PropertyEditorRegistry {
private @Nullable ConversionService conversionService;
private boolean defaultEditorsActive = false;
private boolean configValueEditorsActive = false;
private @Nullable PropertyEditorRegistrar defaultEditorRegistrar;
@SuppressWarnings("NullAway.Init")
private Map<Class<?>, PropertyEditor> defaultEditors;
private @Nullable Map<Class<?>, PropertyEditor> overriddenDefaultEditors;
private @Nullable Map<Class<?>, PropertyEditor> customEditors;
private @Nullable Map<String, CustomEditorHolder> customEditorsForPath;
private @Nullable Map<Class<?>, PropertyEditor> customEditorCache;
/**
* Specify a {@link ConversionService} to use for converting
* property values, as an alternative to JavaBeans PropertyEditors.
*/
public void setConversionService(@Nullable ConversionService conversionService) {
this.conversionService = conversionService;
}
/**
* Return the associated ConversionService, if any.
*/
public @Nullable ConversionService getConversionService() {
return this.conversionService;
}
//---------------------------------------------------------------------
// Management of default editors
//---------------------------------------------------------------------
/**
* Activate the default editors for this registry instance,
* allowing for lazily registering default editors when needed.
*/
protected void registerDefaultEditors() {
this.defaultEditorsActive = true;
}
/**
* Activate config value editors which are only intended for configuration purposes,
* such as {@link org.springframework.beans.propertyeditors.StringArrayPropertyEditor}.
* <p>Those editors are not registered by default simply because they are in
* general inappropriate for data binding purposes. Of course, you may register
* them individually in any case, through {@link #registerCustomEditor}.
*/
public void useConfigValueEditors() {
this.configValueEditorsActive = true;
}
/**
* Set a registrar for default editors, as a lazy way of overriding default editors.
* <p>This is expected to be a collaborator with {@link PropertyEditorRegistrySupport},
* downcasting the given {@link PropertyEditorRegistry} accordingly and calling
* {@link #overrideDefaultEditor} for registering additional default editors on it.
* @param registrar the registrar to call when default editors are actually needed
* @since 6.2.3
* @see #overrideDefaultEditor
*/
public void setDefaultEditorRegistrar(PropertyEditorRegistrar registrar) {
this.defaultEditorRegistrar = registrar;
}
/**
* Override the default editor for the specified type with the given property editor.
* <p>Note that this is different from registering a custom editor in that the editor
* semantically still is a default editor. A ConversionService will override such a
* default editor, whereas custom editors usually override the ConversionService.
* @param requiredType the type of the property
* @param propertyEditor the editor to register
* @see #registerCustomEditor(Class, PropertyEditor)
*/
public void overrideDefaultEditor(Class<?> requiredType, PropertyEditor propertyEditor) {
if (this.overriddenDefaultEditors == null) {
this.overriddenDefaultEditors = new HashMap<>();
}
this.overriddenDefaultEditors.put(requiredType, propertyEditor);
}
/**
* Retrieve the default editor for the given property type, if any.
* <p>Lazily registers the default editors, if they are active.
* @param requiredType type of the property
* @return the default editor, or {@code null} if none found
* @see #registerDefaultEditors
*/
public @Nullable PropertyEditor getDefaultEditor(Class<?> requiredType) {
if (!this.defaultEditorsActive) {
return null;
}
if (this.overriddenDefaultEditors == null && this.defaultEditorRegistrar != null) {
this.defaultEditorRegistrar.registerCustomEditors(this);
}
if (this.overriddenDefaultEditors != null) {
PropertyEditor editor = this.overriddenDefaultEditors.get(requiredType);
if (editor != null) {
return editor;
}
}
if (this.defaultEditors == null) {
createDefaultEditors();
}
return this.defaultEditors.get(requiredType);
}
/**
* Actually register the default editors for this registry instance.
*/
private void createDefaultEditors() {
this.defaultEditors = new HashMap<>(64);
// Simple editors, without parameterization capabilities.
// The JDK does not contain a default editor for any of these target types.
this.defaultEditors.put(Charset.class, new CharsetEditor());
this.defaultEditors.put(Class.class, new ClassEditor());
this.defaultEditors.put(Class[].class, new ClassArrayEditor());
this.defaultEditors.put(Currency.class, new CurrencyEditor());
this.defaultEditors.put(File.class, new FileEditor());
this.defaultEditors.put(InputStream.class, new InputStreamEditor());
this.defaultEditors.put(InputSource.class, new InputSourceEditor());
this.defaultEditors.put(Locale.class, new LocaleEditor());
this.defaultEditors.put(Path.class, new PathEditor());
this.defaultEditors.put(Pattern.class, new PatternEditor());
this.defaultEditors.put(Properties.class, new PropertiesEditor());
this.defaultEditors.put(Reader.class, new ReaderEditor());
this.defaultEditors.put(Resource[].class, new ResourceArrayPropertyEditor());
this.defaultEditors.put(TimeZone.class, new TimeZoneEditor());
this.defaultEditors.put(URI.class, new URIEditor());
this.defaultEditors.put(URL.class, new URLEditor());
this.defaultEditors.put(UUID.class, new UUIDEditor());
this.defaultEditors.put(ZoneId.class, new ZoneIdEditor());
// Default instances of collection editors.
// Can be overridden by registering custom instances of those as custom editors.
this.defaultEditors.put(Collection.class, new CustomCollectionEditor(Collection.class));
this.defaultEditors.put(Set.class, new CustomCollectionEditor(Set.class));
this.defaultEditors.put(SortedSet.class, new CustomCollectionEditor(SortedSet.class));
this.defaultEditors.put(List.class, new CustomCollectionEditor(List.class));
this.defaultEditors.put(SortedMap.class, new CustomMapEditor(SortedMap.class));
// Default editors for primitive arrays.
this.defaultEditors.put(byte[].class, new ByteArrayPropertyEditor());
this.defaultEditors.put(char[].class, new CharArrayPropertyEditor());
// The JDK does not contain a default editor for char!
this.defaultEditors.put(char.class, new CharacterEditor(false));
this.defaultEditors.put(Character.class, new CharacterEditor(true));
// Spring's CustomBooleanEditor accepts more flag values than the JDK's default editor.
this.defaultEditors.put(boolean.class, new CustomBooleanEditor(false));
this.defaultEditors.put(Boolean.class, new CustomBooleanEditor(true));
// The JDK does not contain default editors for number wrapper types!
// Override JDK primitive number editors with our own CustomNumberEditor.
this.defaultEditors.put(byte.class, new CustomNumberEditor(Byte.class, false));
this.defaultEditors.put(Byte.class, new CustomNumberEditor(Byte.class, true));
this.defaultEditors.put(short.class, new CustomNumberEditor(Short.class, false));
this.defaultEditors.put(Short.class, new CustomNumberEditor(Short.class, true));
this.defaultEditors.put(int.class, new CustomNumberEditor(Integer.class, false));
this.defaultEditors.put(Integer.class, new CustomNumberEditor(Integer.class, true));
this.defaultEditors.put(long.class, new CustomNumberEditor(Long.class, false));
this.defaultEditors.put(Long.class, new CustomNumberEditor(Long.class, true));
this.defaultEditors.put(float.class, new CustomNumberEditor(Float.class, false));
this.defaultEditors.put(Float.class, new CustomNumberEditor(Float.class, true));
this.defaultEditors.put(double.class, new CustomNumberEditor(Double.class, false));
this.defaultEditors.put(Double.class, new CustomNumberEditor(Double.class, true));
this.defaultEditors.put(BigDecimal.class, new CustomNumberEditor(BigDecimal.class, true));
this.defaultEditors.put(BigInteger.class, new CustomNumberEditor(BigInteger.class, true));
// Only register config value editors if explicitly requested.
if (this.configValueEditorsActive) {
StringArrayPropertyEditor sae = new StringArrayPropertyEditor();
this.defaultEditors.put(String[].class, sae);
this.defaultEditors.put(short[].class, sae);
this.defaultEditors.put(int[].class, sae);
this.defaultEditors.put(long[].class, sae);
}
}
/**
* Copy the default editors registered in this instance to the given target registry.
* @param target the target registry to copy to
*/
protected void copyDefaultEditorsTo(PropertyEditorRegistrySupport target) {
target.defaultEditorsActive = this.defaultEditorsActive;
target.configValueEditorsActive = this.configValueEditorsActive;
target.defaultEditors = this.defaultEditors;
target.overriddenDefaultEditors = this.overriddenDefaultEditors;
}
//---------------------------------------------------------------------
// Management of custom editors
//---------------------------------------------------------------------
@Override
public void registerCustomEditor(Class<?> requiredType, PropertyEditor propertyEditor) {
registerCustomEditor(requiredType, null, propertyEditor);
}
@Override
public void registerCustomEditor(@Nullable Class<?> requiredType, @Nullable String propertyPath, PropertyEditor propertyEditor) {
if (requiredType == null && propertyPath == null) {
throw new IllegalArgumentException("Either requiredType or propertyPath is required");
}
if (propertyPath != null) {
if (this.customEditorsForPath == null) {
this.customEditorsForPath = new LinkedHashMap<>(16);
}
this.customEditorsForPath.put(propertyPath, new CustomEditorHolder(propertyEditor, requiredType));
}
else {
if (this.customEditors == null) {
this.customEditors = new LinkedHashMap<>(16);
}
this.customEditors.put(requiredType, propertyEditor);
this.customEditorCache = null;
}
}
@Override
public @Nullable PropertyEditor findCustomEditor(@Nullable Class<?> requiredType, @Nullable String propertyPath) {
Class<?> requiredTypeToUse = requiredType;
if (propertyPath != null) {
if (this.customEditorsForPath != null) {
// Check property-specific editor first.
PropertyEditor editor = getCustomEditor(propertyPath, requiredType);
if (editor == null) {
List<String> strippedPaths = new ArrayList<>();
addStrippedPropertyPaths(strippedPaths, "", propertyPath);
for (Iterator<String> it = strippedPaths.iterator(); it.hasNext() && editor == null;) {
String strippedPath = it.next();
editor = getCustomEditor(strippedPath, requiredType);
}
}
if (editor != null) {
return editor;
}
}
if (requiredType == null) {
requiredTypeToUse = getPropertyType(propertyPath);
}
}
// No property-specific editor -> check type-specific editor.
return getCustomEditor(requiredTypeToUse);
}
/**
* Determine whether this registry contains a custom editor
* for the specified array/collection element.
* @param elementType the target type of the element
* (can be {@code null} if not known)
* @param propertyPath the property path (typically of the array/collection;
* can be {@code null} if not known)
* @return whether a matching custom editor has been found
*/
public boolean hasCustomEditorForElement(@Nullable Class<?> elementType, @Nullable String propertyPath) {
if (propertyPath != null && this.customEditorsForPath != null) {
for (Map.Entry<String, CustomEditorHolder> entry : this.customEditorsForPath.entrySet()) {
if (PropertyAccessorUtils.matchesProperty(entry.getKey(), propertyPath) &&
entry.getValue().getPropertyEditor(elementType) != null) {
return true;
}
}
}
// No property-specific editor -> check type-specific editor.
return (elementType != null && this.customEditors != null && this.customEditors.containsKey(elementType));
}
/**
* Determine the property type for the given property path.
* <p>Called by {@link #findCustomEditor} if no required type has been specified,
* to be able to find a type-specific editor even if just given a property path.
* <p>The default implementation always returns {@code null}.
* BeanWrapperImpl overrides this with the standard {@code getPropertyType}
* method as defined by the BeanWrapper interface.
* @param propertyPath the property path to determine the type for
* @return the type of the property, or {@code null} if not determinable
* @see BeanWrapper#getPropertyType(String)
*/
protected @Nullable Class<?> getPropertyType(String propertyPath) {
return null;
}
/**
* Get custom editor that has been registered for the given property.
* @param propertyName the property path to look for
* @param requiredType the type to look for
* @return the custom editor, or {@code null} if none specific for this property
*/
private @Nullable PropertyEditor getCustomEditor(String propertyName, @Nullable Class<?> requiredType) {
CustomEditorHolder holder =
(this.customEditorsForPath != null ? this.customEditorsForPath.get(propertyName) : null);
return (holder != null ? holder.getPropertyEditor(requiredType) : null);
}
/**
* Get custom editor for the given type. If no direct match found,
* try custom editor for superclass (which will in any case be able
* to render a value as String via {@code getAsText}).
* @param requiredType the type to look for
* @return the custom editor, or {@code null} if none found for this type
* @see java.beans.PropertyEditor#getAsText()
*/
private @Nullable PropertyEditor getCustomEditor(@Nullable Class<?> requiredType) {
if (requiredType == null || this.customEditors == null) {
return null;
}
// Check directly registered editor for type.
PropertyEditor editor = this.customEditors.get(requiredType);
if (editor == null) {
// Check cached editor for type, registered for superclass or interface.
if (this.customEditorCache != null) {
editor = this.customEditorCache.get(requiredType);
}
if (editor == null) {
// Find editor for superclass or interface.
for (Map.Entry<Class<?>, PropertyEditor> entry : this.customEditors.entrySet()) {
Class<?> key = entry.getKey();
if (key.isAssignableFrom(requiredType)) {
editor = entry.getValue();
// Cache editor for search type, to avoid the overhead
// of repeated assignable-from checks.
if (this.customEditorCache == null) {
this.customEditorCache = new HashMap<>();
}
this.customEditorCache.put(requiredType, editor);
if (editor != null) {
break;
}
}
}
}
}
return editor;
}
/**
* Guess the property type of the specified property from the registered
* custom editors (provided that they were registered for a specific type).
* @param propertyName the name of the property
* @return the property type, or {@code null} if not determinable
*/
protected @Nullable Class<?> guessPropertyTypeFromEditors(String propertyName) {
if (this.customEditorsForPath != null) {
CustomEditorHolder editorHolder = this.customEditorsForPath.get(propertyName);
if (editorHolder == null) {
List<String> strippedPaths = new ArrayList<>();
addStrippedPropertyPaths(strippedPaths, "", propertyName);
for (Iterator<String> it = strippedPaths.iterator(); it.hasNext() && editorHolder == null;) {
String strippedName = it.next();
editorHolder = this.customEditorsForPath.get(strippedName);
}
}
if (editorHolder != null) {
return editorHolder.getRegisteredType();
}
}
return null;
}
/**
* Copy the custom editors registered in this instance to the given target registry.
* @param target the target registry to copy to
* @param nestedProperty the nested property path of the target registry, if any.
* If this is non-null, only editors registered for a path below this nested property
* will be copied. If this is null, all editors will be copied.
*/
protected void copyCustomEditorsTo(PropertyEditorRegistry target, @Nullable String nestedProperty) {
String actualPropertyName =
(nestedProperty != null ? PropertyAccessorUtils.getPropertyName(nestedProperty) : null);
if (this.customEditors != null) {
this.customEditors.forEach(target::registerCustomEditor);
}
if (this.customEditorsForPath != null) {
this.customEditorsForPath.forEach((editorPath, editorHolder) -> {
if (nestedProperty != null) {
int pos = PropertyAccessorUtils.getFirstNestedPropertySeparatorIndex(editorPath);
if (pos != -1) {
String editorNestedProperty = editorPath.substring(0, pos);
String editorNestedPath = editorPath.substring(pos + 1);
if (editorNestedProperty.equals(nestedProperty) || editorNestedProperty.equals(actualPropertyName)) {
target.registerCustomEditor(
editorHolder.getRegisteredType(), editorNestedPath, editorHolder.getPropertyEditor());
}
}
}
else {
target.registerCustomEditor(
editorHolder.getRegisteredType(), editorPath, editorHolder.getPropertyEditor());
}
});
}
}
/**
* Add property paths with all variations of stripped keys and/or indexes.
* Invokes itself recursively with nested paths.
* @param strippedPaths the result list to add to
* @param nestedPath the current nested path
* @param propertyPath the property path to check for keys/indexes to strip
*/
private void addStrippedPropertyPaths(List<String> strippedPaths, String nestedPath, String propertyPath) {
int startIndex = propertyPath.indexOf(PropertyAccessor.PROPERTY_KEY_PREFIX_CHAR);
if (startIndex != -1) {
int endIndex = propertyPath.indexOf(PropertyAccessor.PROPERTY_KEY_SUFFIX_CHAR);
if (endIndex != -1) {
String prefix = propertyPath.substring(0, startIndex);
String key = propertyPath.substring(startIndex, endIndex + 1);
String suffix = propertyPath.substring(endIndex + 1);
// Strip the first key.
strippedPaths.add(nestedPath + prefix + suffix);
// Search for further keys to strip, with the first key stripped.
addStrippedPropertyPaths(strippedPaths, nestedPath + prefix, suffix);
// Search for further keys to strip, with the first key not stripped.
addStrippedPropertyPaths(strippedPaths, nestedPath + prefix + key, suffix);
}
}
}
/**
* Holder for a registered custom editor with property name.
* Keeps the PropertyEditor itself plus the type it was registered for.
*/
private static final | PropertyEditorRegistrySupport |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-client/deployment/src/test/java/io/quarkus/rest/client/reactive/converter/ParamConverterProviderTest.java | {
"start": 1100,
"end": 3307
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest TEST = new QuarkusUnitTest();
@TestHTTPResource
URI baseUri;
@Test
void shouldConvertPathParam() {
Client client = RestClientBuilder.newBuilder().baseUri(baseUri)
.build(Client.class);
assertThat(client.get(Param.FIRST)).isEqualTo("1");
assertThat(client.sub().get(Param.SECOND)).isEqualTo("2");
Bean bean = new Bean();
bean.param = Param.FIRST;
assertThat(client.get(bean)).isEqualTo("1");
}
@Test
void shouldConvertQueryParams() {
Client client = RestClientBuilder.newBuilder().baseUri(baseUri)
.build(Client.class);
assertThat(client.getWithQuery(Param.FIRST)).isEqualTo("1");
assertThat(client.sub().getWithQuery(Param.SECOND)).isEqualTo("2");
Bean bean = new Bean();
bean.param = Param.SECOND;
bean.queryParam = Param.FIRST;
assertThat(client.getWithQuery(bean)).isEqualTo("1");
}
@Test
void shouldConvertHeaderParams() {
Client client = RestClientBuilder.newBuilder().baseUri(baseUri)
.build(Client.class);
assertThat(client.getWithHeader(Param.FIRST)).isEqualTo("1");
assertThat(client.sub().getWithHeader(Param.SECOND)).isEqualTo("2");
Bean bean = new Bean();
bean.param = Param.SECOND;
bean.queryParam = Param.SECOND;
bean.headerParam = Param.FIRST;
assertThat(client.getWithHeader(bean)).isEqualTo("1");
}
@Test
void shouldConvertCookieParams() {
Client client = RestClientBuilder.newBuilder().baseUri(baseUri)
.build(Client.class);
assertThat(client.getWithCookie(Param.FIRST)).isEqualTo("1");
assertThat(client.sub().getWithCookie(Param.SECOND)).isEqualTo("2");
Bean bean = new Bean();
bean.param = Param.SECOND;
bean.queryParam = Param.SECOND;
bean.headerParam = Param.SECOND;
bean.cookieParam = Param.FIRST;
assertThat(client.getWithCookie(bean)).isEqualTo("1");
}
@Path("/echo")
@RegisterProvider(ParamConverter.class)
| ParamConverterProviderTest |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/ReservationListInfo.java | {
"start": 1278,
"end": 1388
} | class ____ represent a list of reservations.
*/
@XmlRootElement
@XmlAccessorType(XmlAccessType.FIELD)
public | that |
java | alibaba__nacos | example/src/main/java/com/alibaba/nacos/example/ConfigFuzzyWatchExample.java | {
"start": 1854,
"end": 4193
} | class ____ {
public static void main(String[] args) throws NacosException, InterruptedException {
// Set up properties for Nacos Config Service
Properties properties = new Properties();
properties.setProperty("serverAddr", System.getProperty("serverAddr", "localhost"));
properties.setProperty("namespace", System.getProperty("namespace", "public"));
// Create a Config Service instance
ConfigService configService = ConfigFactory.createConfigService(properties);
int publicConfigNum = 10;
// Publish some configurations for testing
for (int i = 0; i < publicConfigNum; i++) {
boolean isPublishOk = configService.publishConfig("test" + i, "DEFAULT_GROUP", "content");
System.out.println("[publish result] " + isPublishOk);
}
// Define a fuzzy listener to handle configuration changes
FuzzyWatchEventWatcher listener = new AbstractFuzzyWatchEventWatcher() {
@Override
public void onEvent(ConfigFuzzyWatchChangeEvent event) {
System.out.println("[fuzzy listen config change]" + event.toString());
}
};
// Add the fuzzy listener to monitor configurations starting with "test"
configService.fuzzyWatch("test*", "DEFAULT_GROUP", listener);
System.out.println("[Fuzzy listening started.]");
// Publish more configurations to trigger the listener
Thread.sleep(1000);
boolean isPublishOkOne = configService.publishConfig("test-one", "DEFAULT_GROUP", "content");
System.out.println("[publish result] " + isPublishOkOne);
boolean isPublishOkTwo = configService.publishConfig("nacos-test-two", "DEFAULT_GROUP", "content");
System.out.println("[publish result] " + isPublishOkTwo);
boolean isPublishOkThree = configService.publishConfig("test", "DEFAULT_GROUP", "content");
System.out.println("[publish result] " + isPublishOkThree);
// Wait briefly before canceling the fuzzy listening
Thread.sleep(1000);
System.out.println("Cancel fuzzy listen...");
// Sleep to keep the program running for observation
Thread.sleep(3000);
}
}
| ConfigFuzzyWatchExample |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/util/XpathExpectationsHelper.java | {
"start": 1470,
"end": 1580
} | class ____ applying assertions via XPath expressions.
*
* @author Rossen Stoyanchev
* @since 3.2
*/
public | for |
java | apache__camel | components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/CircularComponentCreationTest.java | {
"start": 1280,
"end": 2259
} | class ____ {
@Test
public void testSimple() {
assertThrows(FailedToCreateRouteException.class,
() -> doTest("org/apache/camel/spring/CircularComponentCreationSimpleTest.xml"));
}
@Test
public void testComplex() {
assertDoesNotThrow(() -> doTest("org/apache/camel/spring/CircularComponentCreationComplexTest.xml"));
}
// *******************************
// Test implementation
// *******************************
private void doTest(String path) throws IOException {
AbstractXmlApplicationContext applicationContext = null;
SpringCamelContext ctx = null;
try {
applicationContext = new ClassPathXmlApplicationContext(path);
ctx = new SpringCamelContext(applicationContext);
} finally {
IOHelper.close(applicationContext);
if (ctx != null) {
ctx.close();
}
}
}
}
| CircularComponentCreationTest |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TrafficController.java | {
"start": 14058,
"end": 14440
} | class ____ %d:(\\d+) .*", ROOT_QDISC_HANDLE));
Pattern bytesPattern = Pattern.compile("Sent (\\d+) bytes.*");
int currentClassId = -1;
Map<Integer, Integer> containerClassIdStats = new HashMap<>();
for (String lineSplit : lines) {
String line = lineSplit.trim();
if (!line.isEmpty()) {
//Check if we encountered a stats segment for a container | htb |
java | quarkusio__quarkus | extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/config/unsupportedproperties/UnsupportedPropertiesTest.java | {
"start": 7973,
"end": 8219
} | class ____ extends BaseSessionEventListener {
private long batchCount = 0;
@Override
public void jdbcExecuteBatchStart() {
++batchCount;
}
}
@Entity
public static | BatchCountSpyingEventListener |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/JsonValidatorComponentBuilderFactory.java | {
"start": 1393,
"end": 1926
} | interface ____ {
/**
* JSON Schema Validator (camel-json-validator)
* Validate JSON payloads using NetworkNT JSON Schema.
*
* Category: validation
* Since: 2.20
* Maven coordinates: org.apache.camel:camel-json-validator
*
* @return the dsl builder
*/
static JsonValidatorComponentBuilder jsonValidator() {
return new JsonValidatorComponentBuilderImpl();
}
/**
* Builder for the JSON Schema Validator component.
*/
| JsonValidatorComponentBuilderFactory |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/KubernetesPodsEndpointBuilderFactory.java | {
"start": 16013,
"end": 23062
} | interface ____
extends
EndpointConsumerBuilder {
default KubernetesPodsEndpointConsumerBuilder basic() {
return (KubernetesPodsEndpointConsumerBuilder) this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions (if possible) occurred while the Camel
* consumer is trying to pickup incoming messages, or the likes, will
* now be processed as a message and handled by the routing Error
* Handler. Important: This is only possible if the 3rd party component
* allows Camel to be alerted if an exception was thrown. Some
* components handle this internally only, and therefore
* bridgeErrorHandler is not possible. In other situations we may
* improve the Camel component to hook into the 3rd party component and
* make this possible for future releases. By default the consumer will
* use the org.apache.camel.spi.ExceptionHandler to deal with
* exceptions, that will be logged at WARN or ERROR level and ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer (advanced)
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default AdvancedKubernetesPodsEndpointConsumerBuilder bridgeErrorHandler(boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions (if possible) occurred while the Camel
* consumer is trying to pickup incoming messages, or the likes, will
* now be processed as a message and handled by the routing Error
* Handler. Important: This is only possible if the 3rd party component
* allows Camel to be alerted if an exception was thrown. Some
* components handle this internally only, and therefore
* bridgeErrorHandler is not possible. In other situations we may
* improve the Camel component to hook into the 3rd party component and
* make this possible for future releases. By default the consumer will
* use the org.apache.camel.spi.ExceptionHandler to deal with
* exceptions, that will be logged at WARN or ERROR level and ignored.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: consumer (advanced)
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default AdvancedKubernetesPodsEndpointConsumerBuilder bridgeErrorHandler(String bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option is a: <code>org.apache.camel.spi.ExceptionHandler</code>
* type.
*
* Group: consumer (advanced)
*
* @param exceptionHandler the value to set
* @return the dsl builder
*/
default AdvancedKubernetesPodsEndpointConsumerBuilder exceptionHandler(org.apache.camel.spi.ExceptionHandler exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option will be converted to a
* <code>org.apache.camel.spi.ExceptionHandler</code> type.
*
* Group: consumer (advanced)
*
* @param exceptionHandler the value to set
* @return the dsl builder
*/
default AdvancedKubernetesPodsEndpointConsumerBuilder exceptionHandler(String exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option is a: <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*
* @param exchangePattern the value to set
* @return the dsl builder
*/
default AdvancedKubernetesPodsEndpointConsumerBuilder exchangePattern(org.apache.camel.ExchangePattern exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option will be converted to a
* <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*
* @param exchangePattern the value to set
* @return the dsl builder
*/
default AdvancedKubernetesPodsEndpointConsumerBuilder exchangePattern(String exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
/**
* Connection timeout in milliseconds to use when making requests to the
* Kubernetes API server.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Group: advanced
*
* @param connectionTimeout the value to set
* @return the dsl builder
*/
default AdvancedKubernetesPodsEndpointConsumerBuilder connectionTimeout(Integer connectionTimeout) {
doSetProperty("connectionTimeout", connectionTimeout);
return this;
}
/**
* Connection timeout in milliseconds to use when making requests to the
* Kubernetes API server.
*
* The option will be converted to a <code>java.lang.Integer</code>
* type.
*
* Group: advanced
*
* @param connectionTimeout the value to set
* @return the dsl builder
*/
default AdvancedKubernetesPodsEndpointConsumerBuilder connectionTimeout(String connectionTimeout) {
doSetProperty("connectionTimeout", connectionTimeout);
return this;
}
}
/**
* Builder for endpoint producers for the Kubernetes Pods component.
*/
public | AdvancedKubernetesPodsEndpointConsumerBuilder |
java | alibaba__fastjson | src/test/java/com/alibaba/json/test/performance/case1/IntObjectEncodePerformanceTest.java | {
"start": 336,
"end": 2055
} | class ____ extends TestCase {
private String text;
private final int COUNT = 1000 * 100;
private Object object;
protected void setUp() throws Exception {
String resource = "json/int_array_1000.json";
InputStream is = Thread.currentThread().getContextClassLoader().getResourceAsStream(resource);
text = IOUtils.toString(is);
is.close();
// text = "{\"age\":9223372036854775807}";
// object = JSON.parseObject(text, Long_100_Entity.class);
// object = JSON.parseArray(text);
{
ArrayList<Long> array = new ArrayList<Long>();
for (long i = 0; i < 1000; ++i) {
array.add(i);
}
object = array;
}
}
public void test_encodeObject() throws Exception {
List<Codec> decoders = new ArrayList<Codec>();
// decoders.add(new FastjsonDecoderImpl());
// decoders.add(new JacksonDecoderImpl());
decoders.add(new SimpleJsonCodec());
// decoders.add(new JsonLibDecoderImpl());
for (int i = 0; i < 10; ++i) {
for (Codec decoder : decoders) {
encode(object, decoder);
}
// System.out.println();
}
// System.out.println();
// System.out.println(text);
}
public void encode(Object object, Codec decoder) throws Exception {
long startNano = System.nanoTime();
for (int i = 0; i < COUNT; ++i) {
decoder.encode(object);
}
long nano = System.nanoTime() - startNano;
System.out.println(decoder.getName() + " : \t" + NumberFormat.getInstance().format(nano));
}
}
| IntObjectEncodePerformanceTest |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetchFromPartialMem.java | {
"start": 9144,
"end": 11136
} | class ____
implements InputFormat<NullWritable,NullWritable> {
public FakeIF() { }
public InputSplit[] getSplits(JobConf conf, int numSplits) {
InputSplit[] splits = new InputSplit[numSplits];
for (int i = 0; i < splits.length; ++i) {
splits[i] = new FakeSplit();
}
return splits;
}
public RecordReader<NullWritable,NullWritable> getRecordReader(
InputSplit ignored, JobConf conf, Reporter reporter) {
return new RecordReader<NullWritable,NullWritable>() {
private boolean done = false;
public boolean next(NullWritable key, NullWritable value)
throws IOException {
if (done)
return false;
done = true;
return true;
}
public NullWritable createKey() { return NullWritable.get(); }
public NullWritable createValue() { return NullWritable.get(); }
public long getPos() throws IOException { return 0L; }
public void close() throws IOException { }
public float getProgress() throws IOException { return 0.0f; }
};
}
}
public static Counters runJob(JobConf conf) throws Exception {
conf.setMapperClass(MapMB.class);
conf.setReducerClass(MBValidate.class);
conf.setOutputKeyClass(Text.class);
conf.setOutputValueClass(Text.class);
conf.setNumReduceTasks(1);
conf.setInputFormat(FakeIF.class);
conf.setNumTasksToExecutePerJvm(1);
conf.setInt(JobContext.MAP_MAX_ATTEMPTS, 0);
conf.setInt(JobContext.REDUCE_MAX_ATTEMPTS, 0);
FileInputFormat.setInputPaths(conf, new Path("/in"));
final Path outp = new Path("/out");
FileOutputFormat.setOutputPath(conf, outp);
RunningJob job = null;
try {
job = JobClient.runJob(conf);
assertTrue(job.isSuccessful());
} finally {
FileSystem fs = dfsCluster.getFileSystem();
if (fs.exists(outp)) {
fs.delete(outp, true);
}
}
return job.getCounters();
}
}
| FakeIF |
java | elastic__elasticsearch | x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/file/FileRealmTests.java | {
"start": 1980,
"end": 13324
} | class ____ extends ESTestCase {
private static final RealmConfig.RealmIdentifier REALM_IDENTIFIER = new RealmConfig.RealmIdentifier("file", "file-test");
private static final Answer<AuthenticationResult<User>> VERIFY_PASSWORD_ANSWER = inv -> {
assertThat(inv.getArguments().length, is(3));
@SuppressWarnings("unchecked")
Supplier<User> supplier = (Supplier<User>) inv.getArguments()[2];
return AuthenticationResult.success(supplier.get());
};
private FileUserPasswdStore userPasswdStore;
private FileUserRolesStore userRolesStore;
private Settings globalSettings;
private ThreadPool threadPool;
private ThreadContext threadContext;
@Before
public void init() throws Exception {
userPasswdStore = mock(FileUserPasswdStore.class);
userRolesStore = mock(FileUserRolesStore.class);
globalSettings = Settings.builder()
.put("path.home", createTempDir())
.put("xpack.security.authc.password_hashing.algorithm", getFastStoredHashAlgoForTests().name())
.put(RealmSettings.realmSettingPrefix(REALM_IDENTIFIER) + "order", 0)
.build();
threadPool = mock(ThreadPool.class);
threadContext = new ThreadContext(globalSettings);
when(threadPool.getThreadContext()).thenReturn(threadContext);
}
public void testAuthenticate() throws Exception {
when(userPasswdStore.verifyPassword(eq("user1"), eq(new SecureString("longtestpassword")), anySupplier())).thenAnswer(
VERIFY_PASSWORD_ANSWER
);
when(userRolesStore.roles("user1")).thenReturn(new String[] { "role1", "role2" });
RealmConfig config = getRealmConfig(globalSettings);
FileRealm realm = new FileRealm(config, userPasswdStore, userRolesStore, threadPool);
PlainActionFuture<AuthenticationResult<User>> future = new PlainActionFuture<>();
realm.authenticate(new UsernamePasswordToken("user1", new SecureString("longtestpassword")), future);
final AuthenticationResult<User> result = future.actionGet();
assertThat(result.getStatus(), is(AuthenticationResult.Status.SUCCESS));
User user = result.getValue();
assertThat(user, notNullValue());
assertThat(user.principal(), equalTo("user1"));
assertThat(user.roles(), notNullValue());
assertThat(user.roles().length, equalTo(2));
assertThat(user.roles(), arrayContaining("role1", "role2"));
}
private RealmConfig getRealmConfig(Settings settings) {
return new RealmConfig(REALM_IDENTIFIER, settings, TestEnvironment.newEnvironment(settings), threadContext);
}
public void testAuthenticateCaching() throws Exception {
Settings settings = Settings.builder()
.put(RealmSettings.realmSettingPrefix(REALM_IDENTIFIER) + "cache.hash_algo", randomFrom(Hasher.getAvailableAlgoCacheHash()))
.put(globalSettings)
.build();
RealmConfig config = getRealmConfig(settings);
when(userPasswdStore.verifyPassword(eq("user1"), eq(new SecureString("longtestpassword")), anySupplier())).thenAnswer(
VERIFY_PASSWORD_ANSWER
);
when(userRolesStore.roles("user1")).thenReturn(new String[] { "role1", "role2" });
FileRealm realm = new FileRealm(config, userPasswdStore, userRolesStore, threadPool);
PlainActionFuture<AuthenticationResult<User>> future = new PlainActionFuture<>();
realm.authenticate(new UsernamePasswordToken("user1", new SecureString("longtestpassword")), future);
User user1 = future.actionGet().getValue();
future = new PlainActionFuture<>();
realm.authenticate(new UsernamePasswordToken("user1", new SecureString("longtestpassword")), future);
User user2 = future.actionGet().getValue();
assertThat(user1, sameInstance(user2));
}
public void testAuthenticateCachingRefresh() throws Exception {
RealmConfig config = getRealmConfig(globalSettings);
userPasswdStore = spy(new UserPasswdStore(config));
userRolesStore = spy(new UserRolesStore(config));
when(userPasswdStore.verifyPassword(eq("user1"), eq(new SecureString("longtestpassword")), anySupplier())).thenAnswer(
VERIFY_PASSWORD_ANSWER
);
doReturn(new String[] { "role1", "role2" }).when(userRolesStore).roles("user1");
FileRealm realm = new FileRealm(config, userPasswdStore, userRolesStore, threadPool);
PlainActionFuture<AuthenticationResult<User>> future = new PlainActionFuture<>();
realm.authenticate(new UsernamePasswordToken("user1", new SecureString("longtestpassword")), future);
User user1 = future.actionGet().getValue();
future = new PlainActionFuture<>();
realm.authenticate(new UsernamePasswordToken("user1", new SecureString("longtestpassword")), future);
User user2 = future.actionGet().getValue();
assertThat(user1, sameInstance(user2));
userPasswdStore.notifyRefresh();
future = new PlainActionFuture<>();
realm.authenticate(new UsernamePasswordToken("user1", new SecureString("longtestpassword")), future);
User user3 = future.actionGet().getValue();
assertThat(user2, not(sameInstance(user3)));
future = new PlainActionFuture<>();
realm.authenticate(new UsernamePasswordToken("user1", new SecureString("longtestpassword")), future);
User user4 = future.actionGet().getValue();
assertThat(user3, sameInstance(user4));
userRolesStore.notifyRefresh();
future = new PlainActionFuture<>();
realm.authenticate(new UsernamePasswordToken("user1", new SecureString("longtestpassword")), future);
User user5 = future.actionGet().getValue();
assertThat(user4, not(sameInstance(user5)));
future = new PlainActionFuture<>();
realm.authenticate(new UsernamePasswordToken("user1", new SecureString("longtestpassword")), future);
User user6 = future.actionGet().getValue();
assertThat(user5, sameInstance(user6));
}
public void testToken() throws Exception {
RealmConfig config = getRealmConfig(globalSettings);
when(userPasswdStore.verifyPassword(eq("user1"), eq(new SecureString("longtestpassword")), anySupplier())).thenAnswer(
VERIFY_PASSWORD_ANSWER
);
when(userRolesStore.roles("user1")).thenReturn(new String[] { "role1", "role2" });
FileRealm realm = new FileRealm(config, userPasswdStore, userRolesStore, threadPool);
ThreadContext threadContext = new ThreadContext(Settings.EMPTY);
UsernamePasswordToken.putTokenHeader(threadContext, new UsernamePasswordToken("user1", new SecureString("longtestpassword")));
UsernamePasswordToken token = realm.token(threadContext);
assertThat(token, notNullValue());
assertThat(token.principal(), equalTo("user1"));
assertThat(token.credentials(), notNullValue());
assertThat(new String(token.credentials().getChars()), equalTo("longtestpassword"));
}
public void testLookup() throws Exception {
when(userPasswdStore.userExists("user1")).thenReturn(true);
when(userRolesStore.roles("user1")).thenReturn(new String[] { "role1", "role2" });
RealmConfig config = getRealmConfig(globalSettings);
FileRealm realm = new FileRealm(config, userPasswdStore, userRolesStore, threadPool);
PlainActionFuture<User> future = new PlainActionFuture<>();
realm.lookupUser("user1", future);
User user = future.actionGet();
assertThat(user, notNullValue());
assertThat(user.principal(), equalTo("user1"));
assertThat(user.roles(), notNullValue());
assertThat(user.roles().length, equalTo(2));
assertThat(user.roles(), arrayContaining("role1", "role2"));
}
public void testLookupCaching() throws Exception {
when(userPasswdStore.userExists("user1")).thenReturn(true);
when(userRolesStore.roles("user1")).thenReturn(new String[] { "role1", "role2" });
RealmConfig config = getRealmConfig(globalSettings);
FileRealm realm = new FileRealm(config, userPasswdStore, userRolesStore, threadPool);
PlainActionFuture<User> future = new PlainActionFuture<>();
realm.lookupUser("user1", future);
User user = future.actionGet();
future = new PlainActionFuture<>();
realm.lookupUser("user1", future);
User user1 = future.actionGet();
assertThat(user, sameInstance(user1));
verify(userPasswdStore).userExists("user1");
verify(userRolesStore).roles("user1");
}
public void testLookupCachingWithRefresh() throws Exception {
RealmConfig config = getRealmConfig(globalSettings);
userPasswdStore = spy(new UserPasswdStore(config));
userRolesStore = spy(new UserRolesStore(config));
doReturn(true).when(userPasswdStore).userExists("user1");
doReturn(new String[] { "role1", "role2" }).when(userRolesStore).roles("user1");
FileRealm realm = new FileRealm(config, userPasswdStore, userRolesStore, threadPool);
PlainActionFuture<User> future = new PlainActionFuture<>();
realm.lookupUser("user1", future);
User user1 = future.actionGet();
future = new PlainActionFuture<>();
realm.lookupUser("user1", future);
User user2 = future.actionGet();
assertThat(user1, sameInstance(user2));
userPasswdStore.notifyRefresh();
future = new PlainActionFuture<>();
realm.lookupUser("user1", future);
User user3 = future.actionGet();
assertThat(user2, not(sameInstance(user3)));
future = new PlainActionFuture<>();
realm.lookupUser("user1", future);
User user4 = future.actionGet();
assertThat(user3, sameInstance(user4));
userRolesStore.notifyRefresh();
future = new PlainActionFuture<>();
realm.lookupUser("user1", future);
User user5 = future.actionGet();
assertThat(user4, not(sameInstance(user5)));
future = new PlainActionFuture<>();
realm.lookupUser("user1", future);
User user6 = future.actionGet();
assertThat(user5, sameInstance(user6));
}
public void testUsageStats() throws Exception {
final int userCount = randomIntBetween(0, 1000);
when(userPasswdStore.usersCount()).thenReturn(userCount);
final int order = randomIntBetween(0, 10);
Settings settings = Settings.builder()
.put(globalSettings)
.put(RealmSettings.realmSettingPrefix(REALM_IDENTIFIER) + "order", order)
.build();
RealmConfig config = getRealmConfig(settings);
FileRealm realm = new FileRealm(config, userPasswdStore, userRolesStore, threadPool);
PlainActionFuture<Map<String, Object>> future = new PlainActionFuture<>();
realm.usageStats(future);
Map<String, Object> usage = future.get();
assertThat(usage, is(notNullValue()));
assertThat(usage, hasEntry("name", REALM_IDENTIFIER.getName()));
assertThat(usage, hasEntry("order", order));
assertThat(usage, hasEntry("size", userCount));
}
static | FileRealmTests |
java | spring-projects__spring-security | acl/src/test/java/org/springframework/security/acls/domain/AclAuthorizationStrategyImplTests.java | {
"start": 4185,
"end": 4369
} | class ____ implements GrantedAuthority {
@Override
public String getAuthority() {
return AclAuthorizationStrategyImplTests.this.authority.getAuthority();
}
}
}
| CustomAuthority |
java | grpc__grpc-java | examples/example-orca/src/main/java/io/grpc/examples/orca/CustomBackendMetricsLoadBalancerProvider.java | {
"start": 1904,
"end": 2373
} | class ____ extends ForwardingLoadBalancer {
private LoadBalancer delegate;
public CustomBackendMetricsLoadBalancer(LoadBalancer.Helper helper) {
this.delegate = LoadBalancerRegistry.getDefaultRegistry()
.getProvider("pick_first")
.newLoadBalancer(new CustomBackendMetricsLoadBalancerHelper(helper));
}
@Override
public LoadBalancer delegate() {
return delegate;
}
private final | CustomBackendMetricsLoadBalancer |
java | apache__camel | components/camel-test/camel-test-main-junit5/src/test/java/org/apache/camel/test/main/junit5/legacy/AdviceRouteTest.java | {
"start": 1553,
"end": 2224
} | class
____.addConfiguration(MyConfiguration.class);
}
@Test
void shouldAdviceTheRoute() throws Exception {
// Advice the route by replace the from endpoint
AdviceWith.adviceWith(context, "foo", ad -> ad.replaceFromWith("direct:foo"));
// must start Camel after we are done using advice-with
context.start();
MockEndpoint mock = context.getEndpoint("mock:out", MockEndpoint.class);
mock.expectedBodiesReceived("Hello Will!");
String result = template.requestBody("direct:foo", null, String.class);
mock.assertIsSatisfied();
assertEquals("Hello Will!", result);
}
}
| configuration |
java | apache__camel | test-infra/camel-test-infra-google-pubsub/src/main/java/org/apache/camel/test/infra/google/pubsub/services/GooglePubSubLocalContainerInfraService.java | {
"start": 2919,
"end": 4221
} | class ____ extends PubSubEmulatorContainer {
public TestInfraPubSubEmulatorContainer(boolean fixedPort) {
super(DockerImageName.parse(imageName));
if (fixedPort) {
addFixedExposedPort(PORT, PORT);
} else {
addExposedPort(PORT);
}
}
}
return new TestInfraPubSubEmulatorContainer(ContainerEnvironmentUtil.isFixedPort(this.getClass()));
}
@Override
public void registerProperties() {
System.setProperty(GooglePubSubProperties.SERVICE_ADDRESS, getServiceAddress());
}
@Override
public void initialize() {
LOG.info("Trying to start the GooglePubSub container");
container.start();
registerProperties();
LOG.info("GooglePubSub instance running at {}", getServiceAddress());
}
@Override
public void shutdown() {
LOG.info("Stopping the GooglePubSub container");
container.stop();
}
@Override
public PubSubEmulatorContainer getContainer() {
return container;
}
@Override
public String getServiceAddress() {
return String.format("%s:%d", container.getHost(), container.getFirstMappedPort());
}
}
| TestInfraPubSubEmulatorContainer |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/mover/Mover.java | {
"start": 22620,
"end": 27676
} | class ____ {
final List<StorageType> expected;
final List<StorageType> existing;
StorageTypeDiff(List<StorageType> expected, StorageType[] existing) {
this.expected = new LinkedList<StorageType>(expected);
this.existing = new LinkedList<StorageType>(Arrays.asList(existing));
}
/**
* Remove the overlap between the expected types and the existing types.
* @param ignoreNonMovable ignore non-movable storage types
* by removing them from both expected and existing storage type list
* to prevent non-movable storage from being moved.
* @returns if the existing types or the expected types is empty after
* removing the overlap.
*/
boolean removeOverlap(boolean ignoreNonMovable) {
for(Iterator<StorageType> i = existing.iterator(); i.hasNext(); ) {
final StorageType t = i.next();
if (expected.remove(t)) {
i.remove();
}
}
if (ignoreNonMovable) {
removeNonMovable(existing);
removeNonMovable(expected);
}
return expected.isEmpty() || existing.isEmpty();
}
void removeNonMovable(List<StorageType> types) {
for (Iterator<StorageType> i = types.iterator(); i.hasNext(); ) {
final StorageType t = i.next();
if (!t.isMovable()) {
i.remove();
}
}
}
@Override
public String toString() {
return getClass().getSimpleName() + "{expected=" + expected
+ ", existing=" + existing + "}";
}
}
private static void checkKeytabAndInit(Configuration conf)
throws IOException {
if (conf.getBoolean(DFSConfigKeys.DFS_MOVER_KEYTAB_ENABLED_KEY,
DFSConfigKeys.DFS_MOVER_KEYTAB_ENABLED_DEFAULT)) {
LOG.info("Keytab is configured, will login using keytab.");
UserGroupInformation.setConfiguration(conf);
String addr = conf.get(DFSConfigKeys.DFS_MOVER_ADDRESS_KEY,
DFSConfigKeys.DFS_MOVER_ADDRESS_DEFAULT);
InetSocketAddress socAddr = NetUtils.createSocketAddr(addr, 0,
DFSConfigKeys.DFS_MOVER_ADDRESS_KEY);
SecurityUtil.login(conf, DFSConfigKeys.DFS_MOVER_KEYTAB_FILE_KEY,
DFSConfigKeys.DFS_MOVER_KERBEROS_PRINCIPAL_KEY,
socAddr.getHostName());
}
}
static int run(Map<URI, List<Path>> namenodes, Configuration conf)
throws IOException, InterruptedException {
final long sleeptime =
conf.getTimeDuration(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY,
DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_DEFAULT,
TimeUnit.SECONDS, TimeUnit.MILLISECONDS) * 2 +
conf.getTimeDuration(
DFSConfigKeys.DFS_NAMENODE_REDUNDANCY_INTERVAL_SECONDS_KEY,
DFSConfigKeys.DFS_NAMENODE_REDUNDANCY_INTERVAL_SECONDS_DEFAULT,
TimeUnit.SECONDS, TimeUnit.MILLISECONDS);
AtomicInteger retryCount = new AtomicInteger(0);
// TODO: Need to limit the size of the pinned blocks to limit memory usage
Map<Long, Set<DatanodeInfo>> excludedPinnedBlocks = new HashMap<>();
LOG.info("namenodes = " + namenodes);
DefaultMetricsSystem.initialize("Mover");
JvmMetrics.create("Mover",
conf.get(DFSConfigKeys.DFS_METRICS_SESSION_ID_KEY),
DefaultMetricsSystem.instance());
checkKeytabAndInit(conf);
List<NameNodeConnector> connectors = Collections.emptyList();
try {
connectors = NameNodeConnector.newNameNodeConnectors(namenodes,
Mover.class.getSimpleName(), HdfsServerConstants.MOVER_ID_PATH, conf,
NameNodeConnector.DEFAULT_MAX_IDLE_ITERATIONS);
while (connectors.size() > 0) {
Collections.shuffle(connectors);
Iterator<NameNodeConnector> iter = connectors.iterator();
while (iter.hasNext()) {
NameNodeConnector nnc = iter.next();
final Mover m = new Mover(nnc, conf, retryCount,
excludedPinnedBlocks);
final ExitStatus r = m.run();
if (r == ExitStatus.SUCCESS) {
IOUtils.cleanupWithLogger(LOG, nnc);
iter.remove();
} else if (r != ExitStatus.IN_PROGRESS) {
if (r == ExitStatus.NO_MOVE_PROGRESS) {
System.err.println("Failed to move some blocks after "
+ m.retryMaxAttempts + " retries. Exiting...");
} else if (r == ExitStatus.NO_MOVE_BLOCK) {
System.err.println("Some blocks can't be moved. Exiting...");
} else {
System.err.println("Mover failed. Exiting with status " + r
+ "... ");
}
// must be an error statue, return
return r.getExitCode();
}
}
Thread.sleep(sleeptime);
}
System.out.println("Mover Successful: all blocks satisfy"
+ " the specified storage policy. Exiting...");
return ExitStatus.SUCCESS.getExitCode();
} finally {
for (NameNodeConnector nnc : connectors) {
IOUtils.cleanupWithLogger(LOG, nnc);
}
}
}
public static | StorageTypeDiff |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/access/ElementCollectionAccessTest.java | {
"start": 754,
"end": 1198
} | class ____ {
@Test
public void test(EntityManagerFactoryScope scope) {
scope.inTransaction(entityManager -> {
Book book = new Book();
book.setId(1L);
book.setTitle("High-Performance Java Persistence");
book.getAuthors().add(new Author(
"Vlad",
"Mihalcea"
));
entityManager.persist(book);
});
}
//tag::access-element-collection-mapping-example[]
@Entity(name = "Book")
public static | ElementCollectionAccessTest |
java | quarkusio__quarkus | extensions/vertx-http/deployment/src/main/java/io/quarkus/vertx/http/deployment/RequireSocketHttpBuildItem.java | {
"start": 360,
"end": 515
} | class ____ extends SimpleBuildItem {
public static final RequireSocketHttpBuildItem MARKER = new RequireSocketHttpBuildItem();
}
| RequireSocketHttpBuildItem |
java | spring-projects__spring-security | access/src/main/java/org/springframework/security/access/vote/AbstractAclVoter.java | {
"start": 1333,
"end": 2743
} | class ____ implements AccessDecisionVoter<MethodInvocation> {
@SuppressWarnings("NullAway.Init")
private @Nullable Class<?> processDomainObjectClass;
protected Object getDomainObjectInstance(MethodInvocation invocation) {
Object[] args = invocation.getArguments();
Class<?>[] params = invocation.getMethod().getParameterTypes();
for (int i = 0; i < params.length; i++) {
if (this.processDomainObjectClass.isAssignableFrom(params[i])) {
return args[i];
}
}
throw new AuthorizationServiceException("MethodInvocation: " + invocation
+ " did not provide any argument of type: " + this.processDomainObjectClass);
}
public Class<?> getProcessDomainObjectClass() {
return this.processDomainObjectClass;
}
public void setProcessDomainObjectClass(Class<?> processDomainObjectClass) {
Assert.notNull(processDomainObjectClass, "processDomainObjectClass cannot be set to null");
this.processDomainObjectClass = processDomainObjectClass;
}
/**
* This implementation supports only <code>MethodSecurityInterceptor</code>, because
* it queries the presented <code>MethodInvocation</code>.
* @param clazz the secure object
* @return <code>true</code> if the secure object is <code>MethodInvocation</code>,
* <code>false</code> otherwise
*/
@Override
public boolean supports(Class<?> clazz) {
return (MethodInvocation.class.isAssignableFrom(clazz));
}
}
| AbstractAclVoter |
java | google__guava | android/guava/src/com/google/common/primitives/Longs.java | {
"start": 11838,
"end": 11922
} | class ____ ProGuard eliminate and inline the Longs
* class.
*/
static final | lets |
java | apache__rocketmq | client/src/main/java/org/apache/rocketmq/client/exception/RequestTimeoutException.java | {
"start": 901,
"end": 1958
} | class ____ extends Exception {
private static final long serialVersionUID = -5758410930844185841L;
private int responseCode;
private String errorMessage;
public RequestTimeoutException(String errorMessage, Throwable cause) {
super(errorMessage, cause);
this.responseCode = -1;
this.errorMessage = errorMessage;
}
public RequestTimeoutException(int responseCode, String errorMessage) {
super("CODE: " + UtilAll.responseCode2String(responseCode) + " DESC: "
+ errorMessage);
this.responseCode = responseCode;
this.errorMessage = errorMessage;
}
public int getResponseCode() {
return responseCode;
}
public RequestTimeoutException setResponseCode(final int responseCode) {
this.responseCode = responseCode;
return this;
}
public String getErrorMessage() {
return errorMessage;
}
public void setErrorMessage(final String errorMessage) {
this.errorMessage = errorMessage;
}
}
| RequestTimeoutException |
java | apache__kafka | clients/src/main/java/org/apache/kafka/clients/consumer/CloseOptions.java | {
"start": 998,
"end": 1623
} | class ____ {
/**
* Enum to specify the group membership operation upon leaving group.
*
* <ul>
* <li><b>{@code LEAVE_GROUP}</b>: means the consumer will leave the group.</li>
* <li><b>{@code REMAIN_IN_GROUP}</b>: means the consumer will remain in the group.</li>
* <li><b>{@code DEFAULT}</b>: Applies the default behavior:
* <ul>
* <li>For <b>static members</b>: The consumer will remain in the group.</li>
* <li>For <b>dynamic members</b>: The consumer will leave the group.</li>
* </ul>
* </li>
* </ul>
*/
public | CloseOptions |
java | apache__flink | flink-streaming-java/src/test/java/org/apache/flink/streaming/api/DataStreamTest.java | {
"start": 77344,
"end": 77720
} | class ____ {
private int[] id;
public POJOWithoutHashCode() {}
public POJOWithoutHashCode(int[] id) {
this.id = id;
}
public int[] getId() {
return id;
}
public void setId(int[] id) {
this.id = id;
}
}
/** POJO with hashCode. */
public static | POJOWithoutHashCode |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/compress/ZstdCompression.java | {
"start": 5085,
"end": 5628
} | class ____ implements Compression.Builder<ZstdCompression> {
private int level = ZSTD.defaultLevel();
public Builder level(int level) {
if (level < ZSTD.minLevel() || ZSTD.maxLevel() < level) {
throw new IllegalArgumentException("zstd doesn't support given compression level: " + level);
}
this.level = level;
return this;
}
@Override
public ZstdCompression build() {
return new ZstdCompression(level);
}
}
}
| Builder |
java | apache__dubbo | dubbo-common/src/test/java/org/apache/dubbo/common/utils/StringConstantFieldValuePredicateTest.java | {
"start": 1195,
"end": 1580
} | class ____ {
public static final String S1 = "1";
public static final Object O1 = "2";
public static final Object O2 = 3;
@Test
void test() {
Predicate<String> predicate = of(getClass());
assertTrue(predicate.test("1"));
assertTrue(predicate.test("2"));
assertFalse(predicate.test("3"));
}
}
| StringConstantFieldValuePredicateTest |
java | apache__flink | flink-core/src/test/java/org/apache/flink/api/java/typeutils/RowTypeInfoTest.java | {
"start": 1373,
"end": 6979
} | class ____ extends TypeInformationTestBase<RowTypeInfo> {
private static final TypeInformation<?>[] typeList =
new TypeInformation<?>[] {
BasicTypeInfo.INT_TYPE_INFO,
new RowTypeInfo(BasicTypeInfo.SHORT_TYPE_INFO, BasicTypeInfo.BIG_DEC_TYPE_INFO),
BasicTypeInfo.STRING_TYPE_INFO
};
@Override
protected RowTypeInfo[] getTestData() {
return new RowTypeInfo[] {
new RowTypeInfo(BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO),
new RowTypeInfo(BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.BOOLEAN_TYPE_INFO),
new RowTypeInfo(typeList),
new RowTypeInfo(
new TypeInformation[] {
BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.INT_TYPE_INFO
},
new String[] {"int", "int2"})
};
}
@Test
void testWrongNumberOfFieldNames() {
assertThatThrownBy(() -> new RowTypeInfo(typeList, new String[] {"int", "string"}))
.isInstanceOf(IllegalArgumentException.class);
// number of field names should be equal to number of types, go fail
}
@Test
void testDuplicateCustomFieldNames() {
assertThatThrownBy(
() -> new RowTypeInfo(typeList, new String[] {"int", "string", "string"}))
.isInstanceOf(IllegalArgumentException.class);
// field names should not be the same, go fail
}
@Test
void testCustomFieldNames() {
String[] fieldNames = new String[] {"int", "row", "string"};
RowTypeInfo typeInfo1 = new RowTypeInfo(typeList, new String[] {"int", "row", "string"});
assertThat(typeInfo1.getFieldNames()).isEqualTo(new String[] {"int", "row", "string"});
assertThat(typeInfo1.getTypeAt("string")).isEqualTo(BasicTypeInfo.STRING_TYPE_INFO);
assertThat(typeInfo1.getTypeAt(2)).isEqualTo(BasicTypeInfo.STRING_TYPE_INFO);
assertThat(typeInfo1.getTypeAt("row.0")).isEqualTo(BasicTypeInfo.SHORT_TYPE_INFO);
assertThat(typeInfo1.getTypeAt("row.f1")).isEqualTo(BasicTypeInfo.BIG_DEC_TYPE_INFO);
// change the names in fieldNames
fieldNames[1] = "composite";
RowTypeInfo typeInfo2 = new RowTypeInfo(typeList, fieldNames);
// make sure the field names are deep copied
assertThat(typeInfo1.getFieldNames()).isEqualTo(new String[] {"int", "row", "string"});
assertThat(typeInfo2.getFieldNames())
.isEqualTo(new String[] {"int", "composite", "string"});
}
@Test
void testGetFlatFields() {
RowTypeInfo typeInfo1 = new RowTypeInfo(typeList, new String[] {"int", "row", "string"});
List<FlatFieldDescriptor> result = new ArrayList<>();
typeInfo1.getFlatFields("row.*", 0, result);
assertThat(result).hasSize(2);
assertThat(result.get(0).toString())
.isEqualTo(new FlatFieldDescriptor(1, BasicTypeInfo.SHORT_TYPE_INFO).toString());
assertThat(result.get(1).toString())
.isEqualTo(new FlatFieldDescriptor(2, BasicTypeInfo.BIG_DEC_TYPE_INFO).toString());
result.clear();
typeInfo1.getFlatFields("string", 0, result);
assertThat(result).hasSize(1);
assertThat(result.get(0).toString())
.isEqualTo(new FlatFieldDescriptor(3, BasicTypeInfo.STRING_TYPE_INFO).toString());
}
@Test
void testGetTypeAt() {
RowTypeInfo typeInfo = new RowTypeInfo(typeList);
assertThat(typeInfo.getFieldNames()).isEqualTo(new String[] {"f0", "f1", "f2"});
assertThat(typeInfo.getTypeAt("f2")).isEqualTo(BasicTypeInfo.STRING_TYPE_INFO);
assertThat(typeInfo.getTypeAt("f1.f0")).isEqualTo(BasicTypeInfo.SHORT_TYPE_INFO);
assertThat(typeInfo.getTypeAt("f1.1")).isEqualTo(BasicTypeInfo.BIG_DEC_TYPE_INFO);
}
@Test
void testNestedRowTypeInfo() {
RowTypeInfo typeInfo = new RowTypeInfo(typeList);
assertThat(typeInfo.getTypeAt("f1").toString()).isEqualTo("Row(f0: Short, f1: BigDecimal)");
assertThat(typeInfo.getTypeAt("f1.f0").toString()).isEqualTo("Short");
}
@Test
void testSchemaEquals() {
final RowTypeInfo row1 =
new RowTypeInfo(
new TypeInformation[] {
BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO
},
new String[] {"field1", "field2"});
final RowTypeInfo row2 =
new RowTypeInfo(
new TypeInformation[] {
BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO
},
new String[] {"field1", "field2"});
assertThat(row1.schemaEquals(row2)).isTrue();
final RowTypeInfo other1 =
new RowTypeInfo(
new TypeInformation[] {
BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO
},
new String[] {"otherField", "field2"});
final RowTypeInfo other2 =
new RowTypeInfo(
new TypeInformation[] {
BasicTypeInfo.LONG_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO
},
new String[] {"field1", "field2"});
assertThat(row1.schemaEquals(other1)).isFalse();
assertThat(row1.schemaEquals(other2)).isFalse();
}
}
| RowTypeInfoTest |
java | apache__maven | impl/maven-cli/src/main/java/org/apache/maven/cling/logging/impl/UnsupportedSlf4jBindingConfiguration.java | {
"start": 1072,
"end": 1462
} | class ____ extends BaseSlf4jConfiguration {
/**
* @deprecated the arguments are ignored. Use the no-args constructor.
*/
@Deprecated
public UnsupportedSlf4jBindingConfiguration(String slf4jBinding, Map<URL, Set<Object>> supported) {}
public UnsupportedSlf4jBindingConfiguration() {}
@Override
public void activate() {}
}
| UnsupportedSlf4jBindingConfiguration |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/NullTernaryTest.java | {
"start": 3713,
"end": 4092
} | class ____ {
void conditionalInCondition(Object array, String input) {
int arrayDimensions =
((array != null ? input : null) == null) ? 0 : (array != null ? input : null).length();
}
}
""")
.doTest();
}
@Test
public void expressionSwitch_doesNotCrash() {
testHelper
.addSourceLines(
"Test.java",
"""
| Test |
java | apache__flink | flink-state-backends/flink-statebackend-rocksdb/src/main/java/org/apache/flink/state/rocksdb/RocksDBNativeMetricMonitor.java | {
"start": 5724,
"end": 7039
} | class ____ extends RocksDBNativeView implements Gauge<BigInteger> {
private final RocksDBProperty property;
private final ColumnFamilyHandle handle;
private BigInteger bigInteger;
private RocksDBNativePropertyMetricView(
ColumnFamilyHandle handle, @Nonnull RocksDBProperty property) {
this.handle = handle;
this.property = property;
this.bigInteger = BigInteger.ZERO;
}
public void setValue(long value) {
if (value >= 0L) {
bigInteger = BigInteger.valueOf(value);
} else {
int upper = (int) (value >>> 32);
int lower = (int) value;
bigInteger =
BigInteger.valueOf(Integer.toUnsignedLong(upper))
.shiftLeft(32)
.add(BigInteger.valueOf(Integer.toUnsignedLong(lower)));
}
}
@Override
public BigInteger getValue() {
return bigInteger;
}
@Override
public void update() {
setProperty(this);
}
}
/**
* A gauge which periodically pulls a RocksDB statistics-based native metric for the database.
*/
| RocksDBNativePropertyMetricView |
java | elastic__elasticsearch | x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/patterntext/PatternTextFieldMapper.java | {
"start": 13283,
"end": 14390
} | interface ____ {
BinaryDocValues get(LeafReader leafReader) throws IOException;
}
@Override
protected SyntheticSourceSupport syntheticSourceSupport() {
return new SyntheticSourceSupport.Native(this::getSyntheticFieldLoader);
}
private SourceLoader.SyntheticFieldLoader getSyntheticFieldLoader() {
if (fieldType().disableTemplating()) {
return new StringStoredFieldFieldLoader(fieldType().storedNamed(), fieldType().name(), leafName()) {
@Override
protected void write(XContentBuilder b, Object value) throws IOException {
b.value(((BytesRef) value).utf8ToString());
}
};
}
return new CompositeSyntheticFieldLoader(
leafName(),
fullPath(),
new PatternTextSyntheticFieldLoaderLayer(
fieldType().name(),
leafReader -> PatternTextCompositeValues.from(leafReader, fieldType())
)
);
}
NamedAnalyzer getAnalyzer() {
return analyzer;
}
}
| DocValuesSupplier |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/state/StateSnapshot.java | {
"start": 2677,
"end": 3188
} | interface ____ {
/**
* Writes the data for the specified key-group to the output. You must call {@link
* #getKeyGroupWriter()} once before first calling this method.
*
* @param dov the output.
* @param keyGroupId the key-group to write.
* @throws IOException on write-related problems.
*/
void writeStateInKeyGroup(@Nonnull DataOutputView dov, @Nonnegative int keyGroupId)
throws IOException;
}
}
| StateKeyGroupWriter |
java | quarkusio__quarkus | extensions/mongodb-client/deployment/src/test/java/io/quarkus/mongodb/customization/DefaultCustomizerTest.java | {
"start": 1214,
"end": 1451
} | class ____ implements MongoClientCustomizer {
@Override
public MongoClientSettings.Builder customize(MongoClientSettings.Builder builder) {
return builder.applicationName("my-app");
}
}
}
| MyCustomizer |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/submitted/nested/NestedForEachTest.java | {
"start": 1162,
"end": 4002
} | class ____ {
protected static SqlSessionFactory sqlSessionFactory;
@BeforeAll
static void setUp() throws Exception {
try (Reader reader = Resources.getResourceAsReader("org/apache/ibatis/submitted/nested/MapperConfig.xml")) {
sqlSessionFactory = new SqlSessionFactoryBuilder().build(reader);
}
BaseDataTest.runScript(sqlSessionFactory.getConfiguration().getEnvironment().getDataSource(),
"org/apache/ibatis/submitted/nested/CreateDB.sql");
}
@Test
void simpleSelect() {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
Name name = new Name();
name.setLastName("Flintstone");
Parameter parameter = new Parameter();
parameter.addName(name);
List<Map<String, Object>> answer = sqlSession.selectList("org.apache.ibatis.submitted.nested.Mapper.simpleSelect",
parameter);
assertEquals(3, answer.size());
}
}
@Test
void simpleSelectWithPrimitives() {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
Map<String, Object> parameter = new HashMap<>();
int[] array = { 1, 3, 5 };
parameter.put("ids", array);
List<Map<String, Object>> answer = sqlSession
.selectList("org.apache.ibatis.submitted.nested.Mapper.simpleSelectWithPrimitives", parameter);
assertEquals(3, answer.size());
}
}
@Test
void simpleSelectWithMapperAndPrimitives() {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
Mapper mapper = sqlSession.getMapper(Mapper.class);
List<Map<String, Object>> answer = mapper.simpleSelectWithMapperAndPrimitives(1, 3, 5);
assertEquals(3, answer.size());
}
}
@Test
void nestedSelect() {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
Name name = new Name();
name.setLastName("Flintstone");
name.addFirstName("Fred");
name.addFirstName("Wilma");
Parameter parameter = new Parameter();
parameter.addName(name);
List<Map<String, Object>> answer = sqlSession.selectList("org.apache.ibatis.submitted.nested.Mapper.nestedSelect",
parameter);
assertEquals(2, answer.size());
}
}
@Test
void nestedSelect2() {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
Name name = new Name();
name.setLastName("Flintstone");
name.addFirstName("Fred");
name.addFirstName("Wilma");
Parameter parameter = new Parameter();
parameter.addName(name);
name = new Name();
name.setLastName("Rubble");
name.addFirstName("Betty");
parameter.addName(name);
List<Map<String, Object>> answer = sqlSession.selectList("org.apache.ibatis.submitted.nested.Mapper.nestedSelect",
parameter);
assertEquals(3, answer.size());
}
}
}
| NestedForEachTest |
java | micronaut-projects__micronaut-core | http-netty/src/main/java/io/micronaut/http/netty/stream/DelegateHttpRequest.java | {
"start": 941,
"end": 1991
} | class ____ extends DelegateHttpMessage implements HttpRequest {
protected final HttpRequest request;
/**
* @param request The Http request
*/
DelegateHttpRequest(HttpRequest request) {
super(request);
this.request = request;
}
@Override
public HttpRequest setMethod(HttpMethod method) {
request.setMethod(method);
return this;
}
@Override
public HttpRequest setUri(String uri) {
request.setUri(uri);
return this;
}
@Override
@Deprecated
public HttpMethod getMethod() {
return request.method();
}
@Override
public HttpMethod method() {
return request.method();
}
@Override
@Deprecated
public String getUri() {
return request.uri();
}
@Override
public String uri() {
return request.uri();
}
@Override
public HttpRequest setProtocolVersion(HttpVersion version) {
super.setProtocolVersion(version);
return this;
}
}
| DelegateHttpRequest |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/IgniteMessagingComponentBuilderFactory.java | {
"start": 1904,
"end": 7133
} | interface ____ extends ComponentBuilder<IgniteMessagingComponent> {
/**
* The resource from where to load the configuration. It can be a: URL,
* String or InputStream type.
*
* The option is a: <code>java.lang.Object</code> type.
*
* Group: common
*
* @param configurationResource the value to set
* @return the dsl builder
*/
default IgniteMessagingComponentBuilder configurationResource(java.lang.Object configurationResource) {
doSetProperty("configurationResource", configurationResource);
return this;
}
/**
* To use an existing Ignite instance.
*
* The option is a: <code>org.apache.ignite.Ignite</code>
* type.
*
* Group: common
*
* @param ignite the value to set
* @return the dsl builder
*/
default IgniteMessagingComponentBuilder ignite(org.apache.ignite.Ignite ignite) {
doSetProperty("ignite", ignite);
return this;
}
/**
* Allows the user to set a programmatic ignite configuration.
*
* The option is a:
* <code>org.apache.ignite.configuration.IgniteConfiguration</code> type.
*
* Group: common
*
* @param igniteConfiguration the value to set
* @return the dsl builder
*/
default IgniteMessagingComponentBuilder igniteConfiguration(org.apache.ignite.configuration.IgniteConfiguration igniteConfiguration) {
doSetProperty("igniteConfiguration", igniteConfiguration);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions (if possible) occurred while the Camel
* consumer is trying to pickup incoming messages, or the likes, will
* now be processed as a message and handled by the routing Error
* Handler. Important: This is only possible if the 3rd party component
* allows Camel to be alerted if an exception was thrown. Some
* components handle this internally only, and therefore
* bridgeErrorHandler is not possible. In other situations we may
* improve the Camel component to hook into the 3rd party component and
* make this possible for future releases. By default the consumer will
* use the org.apache.camel.spi.ExceptionHandler to deal with
* exceptions, that will be logged at WARN or ERROR level and ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default IgniteMessagingComponentBuilder bridgeErrorHandler(boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default IgniteMessagingComponentBuilder lazyStartProducer(boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether autowiring is enabled. This is used for automatic autowiring
* options (the option must be marked as autowired) by looking up in the
* registry to find if there is a single instance of matching type,
* which then gets configured on the component. This can be used for
* automatic configuring JDBC data sources, JMS connection factories,
* AWS Clients, etc.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: advanced
*
* @param autowiredEnabled the value to set
* @return the dsl builder
*/
default IgniteMessagingComponentBuilder autowiredEnabled(boolean autowiredEnabled) {
doSetProperty("autowiredEnabled", autowiredEnabled);
return this;
}
}
| IgniteMessagingComponentBuilder |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/ResolvedIndexExpression.java | {
"start": 4040,
"end": 4358
} | enum ____ {
NONE,
SUCCESS,
CONCRETE_RESOURCE_NOT_VISIBLE,
CONCRETE_RESOURCE_UNAUTHORIZED,
}
/**
* Represents local (non-remote) resolution results, including expanded indices, and a {@link LocalIndexResolutionResult}.
*/
public static final | LocalIndexResolutionResult |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/dialect/NullOrdering.java | {
"start": 194,
"end": 448
} | enum ____ {
/**
* Null is treated as the smallest value.
*/
SMALLEST,
/**
* Null is treated as the greatest value.
*/
GREATEST,
/**
* Null is always ordered first.
*/
FIRST,
/**
* Null is always ordered last.
*/
LAST
}
| NullOrdering |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/engine/extension/ExtensionRegistrationViaParametersAndFieldsTests.java | {
"start": 26829,
"end": 26968
} | class ____ {
@RegisterExtension
Extension extension = new InstanceParameterResolver<>(this);
@Nested
| InitializationPerInstanceTestCase |
java | netty__netty | codec-http2/src/main/java/io/netty/handler/codec/http2/HpackHeaderField.java | {
"start": 1410,
"end": 2443
} | class ____ {
// Section 4.1. Calculating Table Size
// The additional 32 octets account for an estimated
// overhead associated with the structure.
static final int HEADER_ENTRY_OVERHEAD = 32;
static long sizeOf(CharSequence name, CharSequence value) {
return name.length() + value.length() + HEADER_ENTRY_OVERHEAD;
}
final CharSequence name;
final CharSequence value;
// This constructor can only be used if name and value are ISO-8859-1 encoded.
HpackHeaderField(CharSequence name, CharSequence value) {
this.name = checkNotNull(name, "name");
this.value = checkNotNull(value, "value");
}
final int size() {
return name.length() + value.length() + HEADER_ENTRY_OVERHEAD;
}
public final boolean equalsForTest(HpackHeaderField other) {
return equalsVariableTime(name, other.name) && equalsVariableTime(value, other.value);
}
@Override
public String toString() {
return name + ": " + value;
}
}
| HpackHeaderField |
java | apache__camel | dsl/camel-jbang/camel-jbang-plugin-kubernetes/src/main/java/org/apache/camel/dsl/jbang/core/commands/kubernetes/support/Capability.java | {
"start": 1013,
"end": 1621
} | enum ____ {
PlatformHttp("platform-http"),
CircuitBreaker("circuit-breaker"),
Health("health"),
Tracing("tracing");
private final String name;
Capability(String name) {
this.name = name;
}
@JsonValue
public String getValue() {
return this.name;
}
@JsonCreator
public static Capability fromValue(String value) {
for (Capability c : values()) {
if (Objects.equals(c.name, value)) {
return c;
}
}
throw new IllegalArgumentException("Unsupported value: " + value);
}
}
| Capability |
java | apache__camel | components/camel-ftp/src/test/java/org/apache/camel/component/file/remote/integration/FtpProducerFileExistOverrideIT.java | {
"start": 1040,
"end": 2088
} | class ____ extends FtpServerTestSupport {
private String getFtpUrl() {
return "ftp://admin@localhost:{{ftp.server.port}}/exist?password=admin&delay=2000&noop=true&fileExist=Override";
}
@Override
public void doPostSetup() throws Exception {
template.sendBodyAndHeader(getFtpUrl(), "Hello World", Exchange.FILE_NAME, "hello.txt");
}
@Test
public void testOverride() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedBodiesReceived("Bye World");
mock.expectedFileExists(service.ftpFile("exist/hello.txt"), "Bye World");
template.sendBodyAndHeader(getFtpUrl(), "Bye World", Exchange.FILE_NAME, "hello.txt");
MockEndpoint.assertIsSatisfied(context);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from(getFtpUrl()).to("mock:result");
}
};
}
}
| FtpProducerFileExistOverrideIT |
java | alibaba__nacos | plugin-default-impl/nacos-default-auth-plugin/src/test/java/com/alibaba/nacos/plugin/auth/impl/persistence/EmbeddedUserPersistServiceImplTest.java | {
"start": 1566,
"end": 3100
} | class ____ {
@Mock
private DatabaseOperate databaseOperate;
private EmbeddedUserPersistServiceImpl embeddedUserPersistService;
@BeforeEach
void setUp() throws Exception {
when(databaseOperate.queryOne(any(String.class), any(Object[].class), eq(Integer.class))).thenReturn(0);
embeddedUserPersistService = new EmbeddedUserPersistServiceImpl(databaseOperate);
}
@Test
void testCreateUser() {
embeddedUserPersistService.createUser("username", "password");
Mockito.verify(databaseOperate).blockUpdate();
}
@Test
void testDeleteUser() {
embeddedUserPersistService.deleteUser("username");
Mockito.verify(databaseOperate).blockUpdate();
}
@Test
void testUpdateUserPassword() {
embeddedUserPersistService.updateUserPassword("username", "password");
Mockito.verify(databaseOperate).blockUpdate();
}
@Test
void testFindUserByUsername() {
User user = embeddedUserPersistService.findUserByUsername("username");
assertNull(user);
}
@Test
void testGetUsers() {
Page<User> users = embeddedUserPersistService.getUsers(1, 10, "nacos");
assertNotNull(users);
}
@Test
void testFindUserLikeUsername() {
List<String> username = embeddedUserPersistService.findUserLikeUsername("username");
assertEquals(0, username.size());
}
}
| EmbeddedUserPersistServiceImplTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/tenantid/Account.java | {
"start": 343,
"end": 543
} | class ____ {
@Id @GeneratedValue Long id;
@TenantId String tenantId;
@ManyToOne(optional = false) Client client;
public Account(Client client) {
this.client = client;
}
Account() {}
}
| Account |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopFloatLongAggregator.java | {
"start": 4385,
"end": 5448
} | class ____ implements AggregatorState {
private final GroupingState internalState;
private SingleState(BigArrays bigArrays, int limit, boolean ascending) {
this.internalState = new GroupingState(bigArrays, limit, ascending);
}
public void add(float value, long outputValue) {
internalState.add(0, value, outputValue);
}
@Override
public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) {
try (var intValues = driverContext.blockFactory().newConstantIntVector(0, 1)) {
internalState.toIntermediate(blocks, offset, intValues, driverContext);
}
}
Block toBlock(BlockFactory blockFactory) {
try (var intValues = blockFactory.newConstantIntVector(0, 1)) {
return internalState.toBlock(blockFactory, intValues);
}
}
@Override
public void close() {
Releasables.closeExpectNoException(internalState);
}
}
}
| SingleState |
java | quarkusio__quarkus | extensions/jackson/runtime/src/main/java/io/quarkus/jackson/runtime/MixinsRecorder.java | {
"start": 265,
"end": 1050
} | class ____ {
public Supplier<ObjectMapperCustomizer> customizerSupplier(Map<Class<?>, Class<?>> mixinsMap) {
return new Supplier<>() {
@Override
public ObjectMapperCustomizer get() {
return new ObjectMapperCustomizer() {
@Override
public void customize(ObjectMapper objectMapper) {
for (var entry : mixinsMap.entrySet()) {
objectMapper.addMixIn(entry.getKey(), entry.getValue());
}
}
@Override
public int priority() {
return DEFAULT_PRIORITY + 1;
}
};
}
};
}
}
| MixinsRecorder |
java | quarkusio__quarkus | integration-tests/maven/src/test/java/io/quarkus/maven/it/DevMojoIT.java | {
"start": 46007,
"end": 47315
} | class ____ {\n" +
" @GET\n" +
" @Produces(MediaType.TEXT_PLAIN)\n" +
" public String foo() {\n" +
" return \"bar\";\n" +
" }\n" +
"}\n";
FileUtils.write(source, myNewResource, StandardCharsets.UTF_8);
// Wait until we get "bar"
await()
.pollDelay(100, TimeUnit.MILLISECONDS)
.atMost(TestUtils.getDefaultTimeout(), TimeUnit.MINUTES)
.until(() -> devModeClient.getHttpResponse("/app/foo").contains("bar"));
}
@Test
public void testThatClassFileAreCleanedUp() throws MavenInvocationException, IOException, InterruptedException {
testDir = initProject("projects/classic", "projects/project-class-file-deletion");
File source = new File(testDir, "src/main/java/org/acme/ClassDeletionResource.java");
String classDeletionResource = "package org.acme;\n" +
"\n" +
"import jakarta.ws.rs.GET;\n" +
"import jakarta.ws.rs.Path;\n" +
"import jakarta.ws.rs.Produces;\n" +
"import jakarta.ws.rs.core.MediaType;\n" +
"\n" +
"@Path(\"/deletion\")\n" +
"public | MyNewResource |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/layout/internal/ListChecker.java | {
"start": 906,
"end": 1081
} | interface ____ {
static final NoopChecker NOOP_CHECKER = new NoopChecker();
boolean check(final String key);
/**
* Does nothing.
*/
public | ListChecker |
java | alibaba__nacos | client/src/main/java/com/alibaba/nacos/client/config/filter/impl/ConfigResponse.java | {
"start": 1376,
"end": 3034
} | class ____ implements IConfigResponse {
private final Map<String, Object> param = new HashMap<>();
private final IConfigContext configContext = new ConfigContext();
public String getTenant() {
return (String) param.get(TENANT);
}
public void setTenant(String tenant) {
param.put(TENANT, tenant);
}
public String getDataId() {
return (String) param.get(DATA_ID);
}
public void setDataId(String dataId) {
param.put(DATA_ID, dataId);
}
public String getGroup() {
return (String) param.get(GROUP);
}
public void setGroup(String group) {
param.put(GROUP, group);
}
public String getContent() {
return (String) param.get(CONTENT);
}
public void setContent(String content) {
param.put(CONTENT, content);
}
public String getConfigType() {
return (String) param.get(CONFIG_TYPE);
}
public void setConfigType(String configType) {
param.put(CONFIG_TYPE, configType);
}
public String getEncryptedDataKey() {
return (String) param.get(ENCRYPTED_DATA_KEY);
}
public void setEncryptedDataKey(String encryptedDataKey) {
param.put(ENCRYPTED_DATA_KEY, encryptedDataKey);
}
@Override
public Object getParameter(String key) {
return param.get(key);
}
@Override
public void putParameter(String key, Object value) {
param.put(key, value);
}
@Override
public IConfigContext getConfigContext() {
return configContext;
}
}
| ConfigResponse |
java | spring-projects__spring-boot | core/spring-boot-testcontainers/src/dockerTest/java/org/springframework/boot/testcontainers/LoadTimeWeaverAwareConsumerImportTestcontainersTests.java | {
"start": 1686,
"end": 2023
} | class ____ implements LoadTimeWeaverAwareConsumerContainers {
@Autowired
private LoadTimeWeaverAwareConsumer consumer;
@Test
void loadTimeWeaverAwareBeanCanUseJdbcUrlFromContainerBasedConnectionDetails() {
assertThat(this.consumer.jdbcUrl).isNotNull();
}
@Configuration
static | LoadTimeWeaverAwareConsumerImportTestcontainersTests |
java | mockito__mockito | mockito-core/src/test/java/org/mockitousage/stubbing/SmartNullsStubbingTest.java | {
"start": 1597,
"end": 1644
} | interface ____ {
void boo();
}
| Bar |
java | apache__camel | components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/StartupShutdownSpringCamelContextOrderTest.java | {
"start": 1263,
"end": 1669
} | class ____ {
@Bean
CamelContext camelContext() {
return new SpringCamelContext();
}
}
@Override
ConfigurableApplicationContext createContext() {
final ConfigurableApplicationContext context = new AnnotationConfigApplicationContext(
CamelContextConfiguration.class, Beans.class);
return context;
}
}
| CamelContextConfiguration |
java | spring-projects__spring-security | oauth2/oauth2-resource-server/src/test/java/org/springframework/security/oauth2/server/resource/web/access/BearerTokenAccessDeniedHandlerTests.java | {
"start": 4251,
"end": 4390
} | class ____ extends AbstractOAuth2Token {
TestingOAuth2Token(String tokenValue) {
super(tokenValue);
}
}
}
}
| TestingOAuth2Token |
java | resilience4j__resilience4j | resilience4j-retry/src/main/java/io/github/resilience4j/retry/IntervalFunction.java | {
"start": 46,
"end": 1379
} | class ____ {
private IntervalFunctionCompanion() {
}
@SuppressWarnings("squid:S2245") // this is not security-sensitive code
static double randomize(final double current, final double randomizationFactor) {
final double delta = randomizationFactor * current;
final double min = current - delta;
final double max = current + delta;
return (min + (Math.random() * (max - min + 1)));
}
static void checkInterval(long interval) {
if (interval < 10) {
throw new IllegalArgumentException(
"Illegal argument interval: " + interval + " millis");
}
}
static void checkMultiplier(double multiplier) {
if (multiplier < 1.0) {
throw new IllegalArgumentException("Illegal argument multiplier: " + multiplier);
}
}
static void checkRandomizationFactor(double randomizationFactor) {
if (randomizationFactor < 0.0 || randomizationFactor >= 1.0) {
throw new IllegalArgumentException(
"Illegal argument randomizationFactor: " + randomizationFactor);
}
}
static void checkAttempt(long attempt) {
if (attempt < 1) {
throw new IllegalArgumentException("Illegal argument attempt: " + attempt);
}
}
} | IntervalFunctionCompanion |
java | apache__flink | flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/factories/TestValuesRuntimeFunctions.java | {
"start": 16429,
"end": 19181
} | class ____ extends RichSinkFunction<RowData>
implements CheckpointedFunction, LineageVertexProvider {
private static final long serialVersionUID = 1L;
protected final String tableName;
protected final DataType consumedDataType;
protected final DataStructureConverter converter;
protected transient ListState<Row> rawResultState;
protected transient List<Row> localRawResult;
protected AbstractExactlyOnceSink(
String tableName, DataType consumedDataType, DataStructureConverter converter) {
this.tableName = tableName;
this.consumedDataType = consumedDataType;
this.converter = converter;
}
@Override
public void initializeState(FunctionInitializationContext context) throws Exception {
this.rawResultState =
context.getOperatorStateStore()
.getListState(
new ListStateDescriptor<>(
"sink-results",
ExternalSerializer.of(consumedDataType)));
this.localRawResult = new ArrayList<>();
if (context.isRestored()) {
for (Row value : rawResultState.get()) {
localRawResult.add(value);
}
}
int taskId = getRuntimeContext().getTaskInfo().getIndexOfThisSubtask();
synchronized (LOCK) {
globalRawResult
.computeIfAbsent(tableName, k -> new HashMap<>())
.put(taskId, localRawResult);
}
}
@Override
public void snapshotState(FunctionSnapshotContext context) throws Exception {
synchronized (LOCK) {
rawResultState.update(localRawResult);
}
}
@Override
public LineageVertex getLineageVertex() {
return createLineageVertex(tableName, getNamespace());
}
abstract String getNamespace();
protected void addLocalRawResult(Row row) {
localRawResult.add(row);
Optional.ofNullable(localRawResultsObservers.get(tableName))
.orElse(Collections.emptyList())
.forEach(
c ->
c.accept(
getRuntimeContext()
.getTaskInfo()
.getIndexOfThisSubtask(),
localRawResult));
}
}
static | AbstractExactlyOnceSink |
java | google__dagger | javatests/dagger/hilt/android/ViewModelScopedTest.java | {
"start": 2892,
"end": 3476
} | class ____ extends Hilt_ViewModelScopedTest_TestActivity {
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (savedInstanceState == null) {
Fragment f =
getSupportFragmentManager()
.getFragmentFactory()
.instantiate(TestFragment.class.getClassLoader(), TestFragment.class.getName());
getSupportFragmentManager().beginTransaction().add(0, f, "tag").commitNow();
}
}
}
@AndroidEntryPoint(Fragment.class)
public static | TestActivity |
java | apache__kafka | streams/upgrade-system-tests-35/src/test/java/org/apache/kafka/streams/tests/StreamsUpgradeTest.java | {
"start": 1653,
"end": 5008
} | class ____ {
@SuppressWarnings("unchecked")
public static void main(final String[] args) throws Exception {
if (args.length < 1) {
System.err.println("StreamsUpgradeTest requires one argument (properties-file) but provided none");
}
final String propFileName = args[0];
final Properties streamsProperties = Utils.loadProps(propFileName);
System.out.println("StreamsTest instance started (StreamsUpgradeTest v3.5)");
System.out.println("props=" + streamsProperties);
final StreamsBuilder builder = new StreamsBuilder();
final KTable<String, Integer> dataTable = builder.table(
"data", Consumed.with(stringSerde, intSerde));
final KStream<String, Integer> dataStream = dataTable.toStream();
dataStream.process(printProcessorSupplier("data"));
dataStream.to("echo");
final boolean runFkJoin = Boolean.parseBoolean(streamsProperties.getProperty(
"test.run_fk_join",
"false"));
if (runFkJoin) {
try {
final KTable<Integer, String> fkTable = builder.table(
"fk", Consumed.with(intSerde, stringSerde));
buildFKTable(dataStream, fkTable);
} catch (final Exception e) {
System.err.println("Caught " + e.getMessage());
}
}
final Properties config = new Properties();
config.setProperty(
StreamsConfig.APPLICATION_ID_CONFIG,
"StreamsUpgradeTest");
config.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 1000);
config.putAll(streamsProperties);
final KafkaStreams streams = new KafkaStreams(builder.build(), config);
streams.start();
Runtime.getRuntime().addShutdownHook(new Thread(() -> {
streams.close();
System.out.println("UPGRADE-TEST-CLIENT-CLOSED");
System.out.flush();
}));
}
private static void buildFKTable(final KStream<String, Integer> primaryTable,
final KTable<Integer, String> otherTable) {
final KStream<String, String> kStream = primaryTable.toTable()
.join(otherTable, v -> v, (k0, v0) -> v0)
.toStream();
kStream.process(printProcessorSupplier("fk"));
kStream.to("fk-result", Produced.with(stringSerde, stringSerde));
}
private static <KIn, VIn, KOut, VOut> ProcessorSupplier<KIn, VIn, KOut, VOut> printProcessorSupplier(final String topic) {
return () -> new ContextualProcessor<KIn, VIn, KOut, VOut>() {
private int numRecordsProcessed = 0;
@Override
public void init(final ProcessorContext<KOut, VOut> context) {
System.out.println("[3.5] initializing processor: topic=" + topic + "taskId=" + context.taskId());
numRecordsProcessed = 0;
}
@Override
public void process(final Record<KIn, VIn> record) {
numRecordsProcessed++;
if (numRecordsProcessed % 100 == 0) {
System.out.println("processed " + numRecordsProcessed + " records from topic=" + topic);
}
}
@Override
public void close() {}
};
}
}
| StreamsUpgradeTest |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImagePreTransactionalStorageInspector.java | {
"start": 1968,
"end": 10753
} | class ____ extends FSImageStorageInspector {
private static final Logger LOG =
LoggerFactory.getLogger(FSImagePreTransactionalStorageInspector.class);
/* Flag if there is at least one storage dir that doesn't contain the newest
* fstime */
private boolean hasOutOfDateStorageDirs = false;
/* Flag set false if there are any "previous" directories found */
private boolean isUpgradeFinalized = true;
private boolean needToSaveAfterRecovery = false;
// Track the name and edits dir with the latest times
private long latestNameCheckpointTime = Long.MIN_VALUE;
private long latestEditsCheckpointTime = Long.MIN_VALUE;
private StorageDirectory latestNameSD = null;
private StorageDirectory latestEditsSD = null;
/** Set to determine if all of storageDirectories share the same checkpoint */
final Set<Long> checkpointTimes = new HashSet<Long>();
private final List<String> imageDirs = new ArrayList<String>();
private final List<String> editsDirs = new ArrayList<String>();
@Override
void inspectDirectory(StorageDirectory sd) throws IOException {
// Was the file just formatted?
if (!sd.getVersionFile().exists()) {
hasOutOfDateStorageDirs = true;
return;
}
boolean imageExists = false;
boolean editsExists = false;
// Determine if sd is image, edits or both
if (sd.getStorageDirType().isOfType(NameNodeDirType.IMAGE)) {
imageExists = NNStorage.getStorageFile(sd, NameNodeFile.IMAGE).exists();
imageDirs.add(sd.getRoot().getCanonicalPath());
}
if (sd.getStorageDirType().isOfType(NameNodeDirType.EDITS)) {
editsExists = NNStorage.getStorageFile(sd, NameNodeFile.EDITS).exists();
editsDirs.add(sd.getRoot().getCanonicalPath());
}
long checkpointTime = readCheckpointTime(sd);
checkpointTimes.add(checkpointTime);
if (sd.getStorageDirType().isOfType(NameNodeDirType.IMAGE) &&
(latestNameCheckpointTime < checkpointTime) && imageExists) {
latestNameCheckpointTime = checkpointTime;
latestNameSD = sd;
}
if (sd.getStorageDirType().isOfType(NameNodeDirType.EDITS) &&
(latestEditsCheckpointTime < checkpointTime) && editsExists) {
latestEditsCheckpointTime = checkpointTime;
latestEditsSD = sd;
}
// check that we have a valid, non-default checkpointTime
if (checkpointTime <= 0L)
hasOutOfDateStorageDirs = true;
// set finalized flag
isUpgradeFinalized = isUpgradeFinalized && !sd.getPreviousDir().exists();
}
/**
* Determine the checkpoint time of the specified StorageDirectory
*
* @param sd StorageDirectory to check
* @return If file exists and can be read, last checkpoint time. If not, 0L.
* @throws IOException On errors processing file pointed to by sd
*/
static long readCheckpointTime(StorageDirectory sd) throws IOException {
File timeFile = NNStorage.getStorageFile(sd, NameNodeFile.TIME);
long timeStamp = 0L;
if (timeFile.exists() && FileUtil.canRead(timeFile)) {
DataInputStream in = new DataInputStream(
Files.newInputStream(timeFile.toPath()));
try {
timeStamp = in.readLong();
in.close();
in = null;
} finally {
IOUtils.cleanupWithLogger(LOG, in);
}
}
return timeStamp;
}
@Override
boolean isUpgradeFinalized() {
return isUpgradeFinalized;
}
@Override
List<FSImageFile> getLatestImages() throws IOException {
// We should have at least one image and one edits dirs
if (latestNameSD == null)
throw new IOException("Image file is not found in " + imageDirs);
if (latestEditsSD == null)
throw new IOException("Edits file is not found in " + editsDirs);
// Make sure we are loading image and edits from same checkpoint
if (latestNameCheckpointTime > latestEditsCheckpointTime
&& latestNameSD != latestEditsSD
&& latestNameSD.getStorageDirType() == NameNodeDirType.IMAGE
&& latestEditsSD.getStorageDirType() == NameNodeDirType.EDITS) {
// This is a rare failure when NN has image-only and edits-only
// storage directories, and fails right after saving images,
// in some of the storage directories, but before purging edits.
// See -NOTE- in saveNamespace().
LOG.error("This is a rare failure scenario!!!");
LOG.error("Image checkpoint time " + latestNameCheckpointTime +
" > edits checkpoint time " + latestEditsCheckpointTime);
LOG.error("Name-node will treat the image as the latest state of " +
"the namespace. Old edits will be discarded.");
} else if (latestNameCheckpointTime != latestEditsCheckpointTime) {
throw new IOException("Inconsistent storage detected, " +
"image and edits checkpoint times do not match. " +
"image checkpoint time = " + latestNameCheckpointTime +
"edits checkpoint time = " + latestEditsCheckpointTime);
}
needToSaveAfterRecovery = doRecovery();
FSImageFile file = new FSImageFile(latestNameSD,
NNStorage.getStorageFile(latestNameSD, NameNodeFile.IMAGE),
HdfsServerConstants.INVALID_TXID);
LinkedList<FSImageFile> ret = new LinkedList<FSImageFile>();
ret.add(file);
return ret;
}
@Override
boolean needToSave() {
return hasOutOfDateStorageDirs ||
checkpointTimes.size() != 1 ||
latestNameCheckpointTime > latestEditsCheckpointTime ||
needToSaveAfterRecovery;
}
boolean doRecovery() throws IOException {
LOG.debug(
"Performing recovery in "+ latestNameSD + " and " + latestEditsSD);
boolean needToSave = false;
File curFile =
NNStorage.getStorageFile(latestNameSD, NameNodeFile.IMAGE);
File ckptFile =
NNStorage.getStorageFile(latestNameSD, NameNodeFile.IMAGE_NEW);
//
// If we were in the midst of a checkpoint
//
if (ckptFile.exists()) {
needToSave = true;
if (NNStorage.getStorageFile(latestEditsSD, NameNodeFile.EDITS_NEW)
.exists()) {
//
// checkpointing migth have uploaded a new
// merged image, but we discard it here because we are
// not sure whether the entire merged image was uploaded
// before the namenode crashed.
//
if (!ckptFile.delete()) {
throw new IOException("Unable to delete " + ckptFile);
}
} else {
//
// checkpointing was in progress when the namenode
// shutdown. The fsimage.ckpt was created and the edits.new
// file was moved to edits. We complete that checkpoint by
// moving fsimage.new to fsimage. There is no need to
// update the fstime file here. renameTo fails on Windows
// if the destination file already exists.
//
if (!ckptFile.renameTo(curFile)) {
if (!curFile.delete())
LOG.warn("Unable to delete dir " + curFile + " before rename");
if (!ckptFile.renameTo(curFile)) {
throw new IOException("Unable to rename " + ckptFile +
" to " + curFile);
}
}
}
}
return needToSave;
}
/**
* @return a list with the paths to EDITS and EDITS_NEW (if it exists)
* in a given storage directory.
*/
static List<File> getEditsInStorageDir(StorageDirectory sd) {
ArrayList<File> files = new ArrayList<File>();
File edits = NNStorage.getStorageFile(sd, NameNodeFile.EDITS);
assert edits.exists() : "Expected edits file at " + edits;
files.add(edits);
File editsNew = NNStorage.getStorageFile(sd, NameNodeFile.EDITS_NEW);
if (editsNew.exists()) {
files.add(editsNew);
}
return files;
}
private List<File> getLatestEditsFiles() {
if (latestNameCheckpointTime > latestEditsCheckpointTime) {
// the image is already current, discard edits
LOG.debug(
"Name checkpoint time is newer than edits, not loading edits.");
return Collections.emptyList();
}
return getEditsInStorageDir(latestEditsSD);
}
@Override
long getMaxSeenTxId() {
return 0L;
}
static Iterable<EditLogInputStream> getEditLogStreams(NNStorage storage)
throws IOException {
FSImagePreTransactionalStorageInspector inspector
= new FSImagePreTransactionalStorageInspector();
storage.inspectStorageDirs(inspector);
List<EditLogInputStream> editStreams = new ArrayList<EditLogInputStream>();
for (File f : inspector.getLatestEditsFiles()) {
editStreams.add(new EditLogFileInputStream(f));
}
return editStreams;
}
}
| FSImagePreTransactionalStorageInspector |
java | apache__hadoop | hadoop-tools/hadoop-benchmark/src/main/java/org/apache/hadoop/benchmark/VectoredReadBenchmark.java | {
"start": 5031,
"end": 7806
} | class ____ extends FileRangeImpl implements
CompletionHandler<Integer, FileRangeCallback> {
private final AsynchronousFileChannel channel;
private final ByteBuffer buffer;
private int completed = 0;
private final Joiner joiner;
FileRangeCallback(AsynchronousFileChannel channel, long offset,
int length, Joiner joiner, ByteBuffer buffer) {
super(offset, length, null);
this.channel = channel;
this.joiner = joiner;
this.buffer = buffer;
}
@Override
public void completed(Integer result, FileRangeCallback attachment) {
final int bytes = result;
if (bytes == -1) {
failed(new EOFException("Read past end of file"), this);
}
completed += bytes;
if (completed < this.getLength()) {
channel.read(buffer, this.getOffset() + completed, this, this);
} else {
buffer.flip();
joiner.finish();
}
}
@Override
public void failed(Throwable exc, FileRangeCallback attachment) {
joiner.failed(exc, this);
}
}
@Benchmark
public void asyncFileChanArray(BufferChoice bufferChoice,
Blackhole blackhole) throws Exception {
java.nio.file.Path path = FileSystems.getDefault().getPath(DATA_PATH.toString());
AsynchronousFileChannel channel = AsynchronousFileChannel.open(path, StandardOpenOption.READ);
List<FileRangeImpl> ranges = new ArrayList<>();
Joiner joiner = new Joiner(100);
for(int m=0; m < 100; ++m) {
ByteBuffer buffer = bufferChoice.allocate.apply(READ_SIZE);
FileRangeCallback range = new FileRangeCallback(channel, m * SEEK_SIZE,
READ_SIZE, joiner, buffer);
ranges.add(range);
channel.read(buffer, range.getOffset(), range, range);
}
joiner.join();
channel.close();
blackhole.consume(ranges);
}
@Benchmark
public void syncRead(FileSystemChoice fsChoice,
Blackhole blackhole) throws Exception {
FSDataInputStream stream = fsChoice.fs.open(DATA_PATH);
List<byte[]> result = new ArrayList<>();
for(int m=0; m < 100; ++m) {
byte[] buffer = new byte[READ_SIZE];
stream.readFully(m * SEEK_SIZE, buffer);
result.add(buffer);
}
blackhole.consume(result);
stream.close();
}
/**
* Run the benchmarks.
* @param args the pathname of a 100MB data file
* @throws Exception any ex.
*/
public static void main(String[] args) throws Exception {
OptionsBuilder opts = new OptionsBuilder();
opts.include("VectoredReadBenchmark");
opts.jvmArgs("-server", "-Xms256m", "-Xmx2g",
"-D" + DATA_PATH_PROPERTY + "=" + args[0]);
opts.forks(1);
new Runner(opts.build()).run();
}
}
| FileRangeCallback |
java | elastic__elasticsearch | x-pack/plugin/mapper-exponential-histogram/src/test/java/org/elasticsearch/xpack/exponentialhistogram/aggregations/metrics/ExponentialHistogramAvgAggregatorTests.java | {
"start": 1816,
"end": 6195
} | class ____ extends ExponentialHistogramAggregatorTestCase {
private static final String FIELD_NAME = "my_histogram";
public void testMatchesNumericDocValues() throws IOException {
List<ExponentialHistogram> histograms = createRandomHistograms(randomIntBetween(1, 1000));
double expectedSum = histograms.stream().mapToDouble(ExponentialHistogram::sum).sum();
long expectedCount = histograms.stream().mapToLong(ExponentialHistogram::valueCount).sum();
double expectedAvg = expectedSum / expectedCount;
testCase(new MatchAllDocsQuery(), iw -> histograms.forEach(histo -> addHistogramDoc(iw, FIELD_NAME, histo)), avg -> {
if (expectedCount > 0) {
assertThat(avg.value(), closeTo(expectedAvg, 0.0001d));
assertThat(AggregationInspectionHelper.hasValue(avg), equalTo(true));
} else {
assertThat(AggregationInspectionHelper.hasValue(avg), equalTo(false));
}
});
}
public void testNoDocs() throws IOException {
testCase(new MatchAllDocsQuery(), iw -> {
// Intentionally not writing any docs
}, avg -> {
assertThat(avg.value(), equalTo(Double.NaN));
assertThat(AggregationInspectionHelper.hasValue(avg), equalTo(false));
});
}
public void testNoMatchingField() throws IOException {
List<ExponentialHistogram> histograms = createRandomHistograms(10);
testCase(new MatchAllDocsQuery(), iw -> histograms.forEach(histo -> addHistogramDoc(iw, "wrong_field", histo)), avg -> {
assertThat(avg.value(), equalTo(Double.NaN));
assertThat(AggregationInspectionHelper.hasValue(avg), equalTo(false));
});
}
public void testQueryFiltering() throws IOException {
List<Map.Entry<ExponentialHistogram, Boolean>> histogramsWithFilter = new ArrayList<>();
do {
histogramsWithFilter.clear();
createRandomHistograms(10).stream().map(histo -> Map.entry(histo, randomBoolean())).forEach(histogramsWithFilter::add);
} while (histogramsWithFilter.stream().noneMatch(Map.Entry::getValue)); // ensure at least one matches
double filteredSum = histogramsWithFilter.stream().filter(Map.Entry::getValue).mapToDouble(entry -> entry.getKey().sum()).sum();
long filteredCnt = histogramsWithFilter.stream().filter(Map.Entry::getValue).mapToLong(entry -> entry.getKey().valueCount()).sum();
double filteredAvg = filteredSum / filteredCnt;
testCase(
new TermQuery(new Term("match", "yes")),
iw -> histogramsWithFilter.forEach(
entry -> addHistogramDoc(
iw,
FIELD_NAME,
entry.getKey(),
new StringField("match", entry.getValue() ? "yes" : "no", Field.Store.NO)
)
),
avg -> {
if (filteredCnt > 0) {
assertThat(avg.value(), closeTo(filteredAvg, 0.0001d));
assertThat(AggregationInspectionHelper.hasValue(avg), equalTo(true));
} else {
assertThat(AggregationInspectionHelper.hasValue(avg), equalTo(false));
}
}
);
}
private void testCase(Query query, CheckedConsumer<RandomIndexWriter, IOException> buildIndex, Consumer<InternalAvg> verify)
throws IOException {
var fieldType = new ExponentialHistogramFieldMapper.ExponentialHistogramFieldType(FIELD_NAME, Collections.emptyMap(), null);
AggregationBuilder aggregationBuilder = createAggBuilderForTypeTest(fieldType, FIELD_NAME);
testCase(buildIndex, verify, new AggTestConfig(aggregationBuilder, fieldType).withQuery(query));
}
@Override
protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) {
return new AvgAggregationBuilder("avg_agg").field(fieldName);
}
@Override
protected List<ValuesSourceType> getSupportedValuesSourceTypes() {
return List.of(
CoreValuesSourceType.NUMERIC,
CoreValuesSourceType.DATE,
CoreValuesSourceType.BOOLEAN,
ExponentialHistogramValuesSourceType.EXPONENTIAL_HISTOGRAM
);
}
}
| ExponentialHistogramAvgAggregatorTests |
java | redisson__redisson | redisson/src/test/java/org/redisson/jcache/JCacheTest.java | {
"start": 23679,
"end": 24676
} | class ____ implements CacheEntryUpdatedListener<String, String>, Serializable {
private Object key;
private Object oldValue;
private Object value;
private CountDownLatch latch;
public UpdatedListener(CountDownLatch latch, Object key, Object oldValue, Object value) {
super();
this.latch = latch;
this.key = key;
this.oldValue = oldValue;
this.value = value;
}
@Override
public void onUpdated(Iterable<CacheEntryEvent<? extends String, ? extends String>> events)
throws CacheEntryListenerException {
CacheEntryEvent<? extends String, ? extends String> entry = events.iterator().next();
assertThat(entry.getKey()).isEqualTo(key);
assertThat(entry.getOldValue()).isEqualTo(oldValue);
assertThat(entry.getValue()).isEqualTo(value);
latch.countDown();
}
}
public static | UpdatedListener |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/context/annotation/ConfigurationClassEnhancer.java | {
"start": 11031,
"end": 11143
} | class ____ (in order for ASM to pick it up when doing common superclass resolution).
*/
private static | generation |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.