language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
elastic__elasticsearch
|
distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/AddFileKeyStoreCommand.java
|
{
"start": 1060,
"end": 3278
}
|
class ____ extends BaseKeyStoreCommand {
private final OptionSpec<String> arguments;
AddFileKeyStoreCommand() {
super("Add a file setting to the keystore", false);
this.forceOption = parser.acceptsAll(
Arrays.asList("f", "force"),
"Overwrite existing setting without prompting, creating keystore if necessary"
);
// jopt simple has issue with multiple non options, so we just get one set of them here
// and convert to File when necessary
// see https://github.com/jopt-simple/jopt-simple/issues/103
this.arguments = parser.nonOptions("(setting path)+");
}
@Override
protected void executeCommand(Terminal terminal, OptionSet options, Environment env) throws Exception {
final List<String> argumentValues = arguments.values(options);
if (argumentValues.size() == 0) {
throw new UserException(ExitCodes.USAGE, "Missing setting name");
}
if (argumentValues.size() % 2 != 0) {
throw new UserException(ExitCodes.USAGE, "settings and filenames must come in pairs");
}
final KeyStoreWrapper keyStore = getKeyStore();
for (int i = 0; i < argumentValues.size(); i += 2) {
final String setting = argumentValues.get(i);
if (keyStore.getSettingNames().contains(setting) && options.has(forceOption) == false) {
if (terminal.promptYesNo("Setting " + setting + " already exists. Overwrite?", false) == false) {
terminal.println("Exiting without modifying keystore.");
return;
}
}
final Path file = getPath(argumentValues.get(i + 1));
if (Files.exists(file) == false) {
throw new UserException(ExitCodes.IO_ERROR, "File [" + file.toString() + "] does not exist");
}
keyStore.setFile(setting, Files.readAllBytes(file));
}
keyStore.save(env.configDir(), getKeyStorePassword().getChars());
}
@SuppressForbidden(reason = "file arg for cli")
private static Path getPath(String file) {
return PathUtils.get(file);
}
}
|
AddFileKeyStoreCommand
|
java
|
micronaut-projects__micronaut-core
|
test-suite/src/test/java/io/micronaut/docs/server/filters/filtermethods/TraceService.java
|
{
"start": 859,
"end": 1157
}
|
class ____ {
private static final Logger LOG = LoggerFactory.getLogger(TraceService.class);
public void trace(HttpRequest<?> request) {
LOG.debug("Tracing request: {}", request.getUri());
// trace logic here, potentially performing I/O <1>
}
}
// end::class[]
|
TraceService
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/merge/SimpleMergeTest.java
|
{
"start": 679,
"end": 1177
}
|
class ____ {
@Test
public void testMergeNewEntity(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
MyEntity newEntity = new MyEntity();
assertThat( newEntity.getId() ).isNull();
MyEntity mergedEntity = session.merge( newEntity );
assertThat( mergedEntity ).isNotSameAs( newEntity );
assertThat( mergedEntity.getId() ).isNotNull();
assertThat( newEntity.getId() ).isNull();
}
);
}
@Entity(name = "MyEntity")
public static
|
SimpleMergeTest
|
java
|
apache__kafka
|
coordinator-common/src/main/java/org/apache/kafka/coordinator/common/runtime/EventAccumulator.java
|
{
"start": 1699,
"end": 1809
}
|
class ____<K, T extends EventAccumulator.Event<K>> implements AutoCloseable {
/**
* The
|
EventAccumulator
|
java
|
spring-projects__spring-security
|
config/src/main/java/org/springframework/security/config/annotation/web/configuration/EnableWebSecurity.java
|
{
"start": 3201,
"end": 3411
}
|
interface ____ {
/**
* Controls debugging support for Spring Security. Default is false.
* @return if true, enables debug support with Spring Security
*/
boolean debug() default false;
}
|
EnableWebSecurity
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/benckmark/pool/Case2.java
|
{
"start": 1230,
"end": 7108
}
|
class ____ extends TestCase {
private String jdbcUrl;
private String user;
private String password;
private String driverClass;
private int initialSize = 10;
private int minPoolSize = 10;
private int maxPoolSize = 50;
private int maxActive = 50;
private String validationQuery = "SELECT 1";
private int threadCount = 100;
private int executeCount = 4;
final int LOOP_COUNT = (1000 * 100) / executeCount;
private boolean testOnBorrow = true;
protected void setUp() throws Exception {
jdbcUrl = "jdbc:fake:dragoon_v25masterdb";
user = "dragoon25";
password = "dragoon25";
driverClass = "com.alibaba.druid.mock.MockDriver";
}
public void test_0() throws Exception {
DruidDataSource dataSource = new DruidDataSource();
dataSource.setInitialSize(initialSize);
dataSource.setMaxActive(maxActive);
dataSource.setMinIdle(minPoolSize);
dataSource.setMaxIdle(maxPoolSize);
dataSource.setPoolPreparedStatements(true);
dataSource.setDriverClassName(driverClass);
dataSource.setUrl(jdbcUrl);
dataSource.setPoolPreparedStatements(true);
dataSource.setUsername(user);
dataSource.setPassword(password);
dataSource.setValidationQuery(validationQuery);
dataSource.setTestOnBorrow(testOnBorrow);
for (int i = 0; i < executeCount; ++i) {
p0(dataSource, "druid", threadCount);
}
System.out.println();
}
public void test_1() throws Exception {
final BasicDataSource dataSource = new BasicDataSource();
dataSource.setInitialSize(initialSize);
dataSource.setMaxActive(maxActive);
dataSource.setMinIdle(minPoolSize);
dataSource.setMaxIdle(maxPoolSize);
dataSource.setPoolPreparedStatements(true);
dataSource.setDriverClassName(driverClass);
dataSource.setUrl(jdbcUrl);
dataSource.setPoolPreparedStatements(true);
dataSource.setUsername(user);
dataSource.setPassword(password);
dataSource.setValidationQuery("SELECT 1");
dataSource.setTestOnBorrow(testOnBorrow);
for (int i = 0; i < executeCount; ++i) {
p0(dataSource, "dbcp", threadCount);
}
System.out.println();
}
// 当testOnBorrow为true时,BoneCP的处理策略不一样,所以略过
public void f_test_2() throws Exception {
BoneCPDataSource dataSource = new BoneCPDataSource();
// dataSource.(10);
// dataSource.setMaxActive(50);
dataSource.setMinConnectionsPerPartition(minPoolSize);
dataSource.setMaxConnectionsPerPartition(maxPoolSize);
dataSource.setDriverClass(driverClass);
dataSource.setJdbcUrl(jdbcUrl);
dataSource.setStatementsCacheSize(100);
// dataSource.setMaxOpenPreparedStatements(100);
dataSource.setUsername(user);
dataSource.setPassword(password);
dataSource.setConnectionTestStatement("SELECT 1");
dataSource.setPartitionCount(1);
dataSource.setAcquireIncrement(5);
dataSource.setIdleConnectionTestPeriod(0L);
// dataSource.setDisableConnectionTracking(true);
for (int i = 0; i < executeCount; ++i) {
p0(dataSource, "boneCP", threadCount);
}
System.out.println();
}
private void p0(final DataSource dataSource, String name, int threadCount) throws Exception {
final CountDownLatch startLatch = new CountDownLatch(1);
final CountDownLatch endLatch = new CountDownLatch(threadCount);
final AtomicLong blockedStat = new AtomicLong();
final AtomicLong waitedStat = new AtomicLong();
for (int i = 0; i < threadCount; ++i) {
Thread thread = new Thread() {
public void run() {
try {
startLatch.await();
long threadId = Thread.currentThread().getId();
long startBlockedCount, startWaitedCount;
{
ThreadInfo threadInfo = ManagementFactory.getThreadMXBean().getThreadInfo(threadId);
startBlockedCount = threadInfo.getBlockedCount();
startWaitedCount = threadInfo.getWaitedCount();
}
for (int i = 0; i < LOOP_COUNT; ++i) {
Connection conn = dataSource.getConnection();
conn.close();
}
ThreadInfo threadInfo = ManagementFactory.getThreadMXBean().getThreadInfo(threadId);
long blockedCount = threadInfo.getBlockedCount() - startBlockedCount;
long waitedCount = threadInfo.getWaitedCount() - startWaitedCount;
blockedStat.addAndGet(blockedCount);
waitedStat.addAndGet(waitedCount);
} catch (Exception ex) {
ex.printStackTrace();
}
endLatch.countDown();
}
};
thread.start();
}
long startMillis = System.currentTimeMillis();
long startYGC = TestUtil.getYoungGC();
long startFullGC = TestUtil.getFullGC();
startLatch.countDown();
endLatch.await();
long millis = System.currentTimeMillis() - startMillis;
long ygc = TestUtil.getYoungGC() - startYGC;
long fullGC = TestUtil.getFullGC() - startFullGC;
System.out.println("thread " + threadCount + " " + name + " millis : "
+ NumberFormat.getInstance().format(millis) + ", YGC " + ygc + " FGC " + fullGC
+ " blockedCount " + blockedStat.get() + " waitedCount " + waitedStat.get());
}
}
|
Case2
|
java
|
google__guava
|
android/guava-testlib/src/com/google/common/collect/testing/SortedSetTestSuiteBuilder.java
|
{
"start": 1971,
"end": 5056
}
|
class ____
@Override
protected List<Class<? extends AbstractTester>> getTesters() {
List<Class<? extends AbstractTester>> testers = copyToList(super.getTesters());
testers.add(SortedSetNavigationTester.class);
return testers;
}
@Override
public TestSuite createTestSuite() {
if (!getFeatures().contains(CollectionFeature.KNOWN_ORDER)) {
List<Feature<?>> features = copyToList(getFeatures());
features.add(CollectionFeature.KNOWN_ORDER);
withFeatures(features);
}
return super.createTestSuite();
}
@Override
protected List<TestSuite> createDerivedSuites(
FeatureSpecificTestSuiteBuilder<?, ? extends OneSizeTestContainerGenerator<Collection<E>, E>>
parentBuilder) {
List<TestSuite> derivedSuites = super.createDerivedSuites(parentBuilder);
if (!parentBuilder.getFeatures().contains(CollectionFeature.SUBSET_VIEW)) {
derivedSuites.add(createSubsetSuite(parentBuilder, Bound.NO_BOUND, Bound.EXCLUSIVE));
derivedSuites.add(createSubsetSuite(parentBuilder, Bound.INCLUSIVE, Bound.NO_BOUND));
derivedSuites.add(createSubsetSuite(parentBuilder, Bound.INCLUSIVE, Bound.EXCLUSIVE));
}
return derivedSuites;
}
/**
* Creates a suite whose set has some elements filtered out of view.
*
* <p>Because the set may be ascending or descending, this test must derive the relative order of
* these extreme values rather than relying on their regular sort ordering.
*/
final TestSuite createSubsetSuite(
FeatureSpecificTestSuiteBuilder<?, ? extends OneSizeTestContainerGenerator<Collection<E>, E>>
parentBuilder,
Bound from,
Bound to) {
TestSortedSetGenerator<E> delegate =
(TestSortedSetGenerator<E>) parentBuilder.getSubjectGenerator().getInnerGenerator();
List<Feature<?>> features = new ArrayList<>(parentBuilder.getFeatures());
Set<Method> suppressing = new HashSet<>(parentBuilder.getSuppressedTests());
features.add(CollectionFeature.SUBSET_VIEW);
if (features.remove(CollectionFeature.ALLOWS_NULL_VALUES)) {
// the null value might be out of bounds, so we can't always construct a subset with nulls
features.add(CollectionFeature.ALLOWS_NULL_QUERIES);
// but add null might still be supported if it happens to be within range of the subset
suppressing.add(CollectionAddTester.getAddNullUnsupportedMethod());
suppressing.add(CollectionAddAllTester.getAddAllNullUnsupportedMethod());
}
return newBuilderUsing(delegate, to, from)
.named(parentBuilder.getName() + " subSet " + from + "-" + to)
.withFeatures(features)
.suppressing(suppressing)
.withSetUp(parentBuilder.getSetUp())
.withTearDown(parentBuilder.getTearDown())
.createTestSuite();
}
/** Like using() but overrideable by NavigableSetTestSuiteBuilder. */
SortedSetTestSuiteBuilder<E> newBuilderUsing(
TestSortedSetGenerator<E> delegate, Bound to, Bound from) {
return using(new SortedSetSubsetTestSetGenerator<E>(delegate, to, from));
}
}
|
literals
|
java
|
apache__flink
|
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/extraction/ExtractionUtils.java
|
{
"start": 20036,
"end": 21411
}
|
class
____ (Class<?>) ((ParameterizedType) type).getRawType();
}
// unsupported: generic arrays, type variables, wildcard types
return null;
}
/** Creates a raw data type. */
@SuppressWarnings({"unchecked", "rawtypes"})
static DataType createRawType(
DataTypeFactory typeFactory,
@Nullable Class<? extends TypeSerializer<?>> rawSerializer,
@Nullable Class<?> conversionClass) {
if (rawSerializer != null) {
return DataTypes.RAW(
(Class) createConversionClass(conversionClass),
instantiateRawSerializer(rawSerializer));
}
return typeFactory.createRawDataType(createConversionClass(conversionClass));
}
static Class<?> createConversionClass(@Nullable Class<?> conversionClass) {
if (conversionClass != null) {
return conversionClass;
}
return Object.class;
}
private static TypeSerializer<?> instantiateRawSerializer(
Class<? extends TypeSerializer<?>> rawSerializer) {
try {
return rawSerializer.newInstance();
} catch (Exception e) {
throw extractionError(
e,
"Cannot instantiate type serializer '%s' for RAW type. "
+ "Make sure the
|
return
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/rules/logical/SubQueryDecorrelator.java
|
{
"start": 59729,
"end": 62410
}
|
class ____ {
private final com.google.common.collect.Multimap<RelNode, CorRef> mapRefRelToCorRef;
private final SortedMap<CorrelationId, RelNode> mapCorToCorRel;
private final Map<RelNode, Set<CorrelationId>> mapSubQueryNodeToCorSet;
// TODO: create immutable copies of all maps
private CorelMap(
com.google.common.collect.Multimap<RelNode, CorRef> mapRefRelToCorRef,
SortedMap<CorrelationId, RelNode> mapCorToCorRel,
Map<RelNode, Set<CorrelationId>> mapSubQueryNodeToCorSet) {
this.mapRefRelToCorRef = mapRefRelToCorRef;
this.mapCorToCorRel = mapCorToCorRel;
this.mapSubQueryNodeToCorSet =
com.google.common.collect.ImmutableMap.copyOf(mapSubQueryNodeToCorSet);
}
@Override
public String toString() {
return "mapRefRelToCorRef="
+ mapRefRelToCorRef
+ "\nmapCorToCorRel="
+ mapCorToCorRel
+ "\nmapSubQueryNodeToCorSet="
+ mapSubQueryNodeToCorSet
+ "\n";
}
@Override
public boolean equals(Object obj) {
return obj == this
|| obj instanceof CorelMap
&& mapRefRelToCorRef.equals(((CorelMap) obj).mapRefRelToCorRef)
&& mapCorToCorRel.equals(((CorelMap) obj).mapCorToCorRel)
&& mapSubQueryNodeToCorSet.equals(
((CorelMap) obj).mapSubQueryNodeToCorSet);
}
@Override
public int hashCode() {
return Objects.hash(mapRefRelToCorRef, mapCorToCorRel, mapSubQueryNodeToCorSet);
}
/** Creates a CorelMap with given contents. */
public static CorelMap of(
com.google.common.collect.SortedSetMultimap<RelNode, CorRef> mapRefRelToCorVar,
SortedMap<CorrelationId, RelNode> mapCorToCorRel,
Map<RelNode, Set<CorrelationId>> mapSubQueryNodeToCorSet) {
return new CorelMap(mapRefRelToCorVar, mapCorToCorRel, mapSubQueryNodeToCorSet);
}
/**
* Returns whether there are any correlating variables in this statement.
*
* @return whether there are any correlating variables
*/
boolean hasCorrelation() {
return !mapCorToCorRel.isEmpty();
}
}
/**
* Frame describing the relational expression after decorrelation and where to find the output
* fields and correlation condition.
*/
private static
|
CorelMap
|
java
|
quarkusio__quarkus
|
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/defaultbean/DefaultClassBeanTest.java
|
{
"start": 1856,
"end": 2322
}
|
class ____ {
@Inject
GreetingBean bean;
@Inject
PingBean ping;
@Inject
Instance<Author> instance;
String hello() {
return bean.greet();
}
String ping() {
return ping.ping();
}
Instance<Author> instance() {
return instance;
}
}
@DefaultBean // This one is overriden by Producer.greetingBean()
@Singleton
static
|
Hello
|
java
|
elastic__elasticsearch
|
x-pack/qa/security-example-spi-extension/src/test/java/org/elasticsearch/example/realm/CustomRoleMappingRealmTests.java
|
{
"start": 1355,
"end": 3429
}
|
class ____ extends ESTestCase {
public void testCachingOfUserLookup() throws Exception {
final Environment env = super.newEnvironment();
final UserRoleMapper roleMapper = mock(UserRoleMapper.class);
final RealmConfig.RealmIdentifier realmIdentifier = new RealmConfig.RealmIdentifier(CustomRoleMappingRealm.TYPE, "test");
final RealmConfig realmConfig = new RealmConfig(
realmIdentifier,
Settings.builder().put(env.settings()).put(getFullSettingKey(realmIdentifier, RealmSettings.ORDER_SETTING), 0).build(),
env,
new ThreadContext(env.settings())
);
CustomRoleMappingRealm realm = new CustomRoleMappingRealm(realmConfig, roleMapper);
final AtomicInteger roleMappingCounter = new AtomicInteger(0);
mockRoleMapping(roleMapper, () -> {
roleMappingCounter.incrementAndGet();
return Set.of("role1", "role2");
});
PlainActionFuture<User> future = new PlainActionFuture<>();
realm.lookupUser(CustomRoleMappingRealm.USERNAME, future);
final User user1 = future.get();
assertThat(user1.principal(), is(CustomRoleMappingRealm.USERNAME));
assertThat(user1.roles(), arrayContainingInAnyOrder("role1", "role2"));
assertThat(roleMappingCounter.get(), is(1));
future = new PlainActionFuture<>();
realm.lookupUser(CustomRoleMappingRealm.USERNAME, future);
final User user2 = future.get();
assertThat(user2, sameInstance(user1));
assertThat(roleMappingCounter.get(), is(1));
}
@SuppressWarnings("unchecked")
private void mockRoleMapping(UserRoleMapper roleMapper, Supplier<Set<String>> supplier) {
doAnswer(inv -> {
ActionListener<Set<String>> listener = (ActionListener<Set<String>>) inv.getArguments()[1];
listener.onResponse(supplier.get());
return null;
}).when(roleMapper).resolveRoles(any(UserRoleMapper.UserData.class), any(ActionListener.class));
}
}
|
CustomRoleMappingRealmTests
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/bulkid/GlobalTemporaryTableMutationStrategyCompositeIdTest.java
|
{
"start": 1185,
"end": 1309
}
|
class ____ extends AbstractMutationStrategyCompositeIdTest {
public static
|
GlobalTemporaryTableMutationStrategyCompositeIdTest
|
java
|
spring-projects__spring-boot
|
loader/spring-boot-loader/src/test/java/org/springframework/boot/loader/zip/ZipLocalFileHeaderRecordTests.java
|
{
"start": 918,
"end": 3941
}
|
class ____ {
@Test
void loadLoadsData() throws Exception {
DataBlock dataBlock = new ByteArrayDataBlock(new byte[] { //
0x50, 0x4b, 0x03, 0x04, //
0x01, 0x00, //
0x02, 0x00, //
0x03, 0x00, //
0x04, 0x00, //
0x05, 0x00, //
0x06, 0x00, 0x00, 0x00, //
0x07, 0x00, 0x00, 0x00, //
0x08, 0x00, 0x00, 0x00, //
0x09, 0x00, //
0x0A, 0x00 }); //
ZipLocalFileHeaderRecord record = ZipLocalFileHeaderRecord.load(dataBlock, 0);
assertThat(record.versionNeededToExtract()).isEqualTo((short) 1);
assertThat(record.generalPurposeBitFlag()).isEqualTo((short) 2);
assertThat(record.compressionMethod()).isEqualTo((short) 3);
assertThat(record.lastModFileTime()).isEqualTo((short) 4);
assertThat(record.lastModFileDate()).isEqualTo((short) 5);
assertThat(record.crc32()).isEqualTo(6);
assertThat(record.compressedSize()).isEqualTo(7);
assertThat(record.uncompressedSize()).isEqualTo(8);
assertThat(record.fileNameLength()).isEqualTo((short) 9);
assertThat(record.extraFieldLength()).isEqualTo((short) 10);
}
@Test
void loadWhenSignatureDoesNotMatchThrowsException() {
DataBlock dataBlock = new ByteArrayDataBlock(new byte[] { //
0x51, 0x4b, 0x03, 0x04, //
0x01, 0x00, //
0x02, 0x00, //
0x03, 0x00, //
0x04, 0x00, //
0x05, 0x00, //
0x06, 0x00, 0x00, 0x00, //
0x07, 0x00, 0x00, 0x00, //
0x08, 0x00, 0x00, 0x00, //
0x09, 0x00, //
0x0A, 0x00 }); //
assertThatIOException().isThrownBy(() -> ZipLocalFileHeaderRecord.load(dataBlock, 0))
.withMessageContaining("'Local File Header Record' not found");
}
@Test
void sizeReturnsSize() {
ZipLocalFileHeaderRecord record = new ZipLocalFileHeaderRecord((short) 1, (short) 2, (short) 3, (short) 4,
(short) 5, 6, 7, 8, (short) 9, (short) 10);
assertThat(record.size()).isEqualTo(49L);
}
@Test
void withExtraFieldLengthReturnsUpdatedInstance() {
ZipLocalFileHeaderRecord record = new ZipLocalFileHeaderRecord((short) 1, (short) 2, (short) 3, (short) 4,
(short) 5, 6, 7, 8, (short) 9, (short) 10)
.withExtraFieldLength((short) 100);
assertThat(record.extraFieldLength()).isEqualTo((short) 100);
}
@Test
void withFileNameLengthReturnsUpdatedInstance() {
ZipLocalFileHeaderRecord record = new ZipLocalFileHeaderRecord((short) 1, (short) 2, (short) 3, (short) 4,
(short) 5, 6, 7, 8, (short) 9, (short) 10)
.withFileNameLength((short) 100);
assertThat(record.fileNameLength()).isEqualTo((short) 100);
}
@Test
void asByteArrayReturnsByteArray() throws Exception {
byte[] bytes = new byte[] { //
0x50, 0x4b, 0x03, 0x04, //
0x01, 0x00, //
0x02, 0x00, //
0x03, 0x00, //
0x04, 0x00, //
0x05, 0x00, //
0x06, 0x00, 0x00, 0x00, //
0x07, 0x00, 0x00, 0x00, //
0x08, 0x00, 0x00, 0x00, //
0x09, 0x00, //
0x0A, 0x00 }; //
ZipLocalFileHeaderRecord record = ZipLocalFileHeaderRecord.load(new ByteArrayDataBlock(bytes), 0);
assertThat(record.asByteArray()).isEqualTo(bytes);
}
}
|
ZipLocalFileHeaderRecordTests
|
java
|
elastic__elasticsearch
|
modules/reindex/src/test/java/org/elasticsearch/reindex/DeleteByQueryMetricsTests.java
|
{
"start": 856,
"end": 1624
}
|
class ____ extends ESTestCase {
private RecordingMeterRegistry recordingMeterRegistry;
private DeleteByQueryMetrics metrics;
@Before
public void createMetrics() {
recordingMeterRegistry = new RecordingMeterRegistry();
metrics = new DeleteByQueryMetrics(recordingMeterRegistry);
}
public void testRecordTookTime() {
int secondsTaken = randomIntBetween(1, 50);
metrics.recordTookTime(secondsTaken);
List<Measurement> measurements = recordingMeterRegistry.getRecorder()
.getMeasurements(InstrumentType.LONG_HISTOGRAM, DELETE_BY_QUERY_TIME_HISTOGRAM);
assertEquals(measurements.size(), 1);
assertEquals(measurements.get(0).getLong(), secondsTaken);
}
}
|
DeleteByQueryMetricsTests
|
java
|
apache__rocketmq
|
tools/src/main/java/org/apache/rocketmq/tools/command/controller/GetControllerMetaDataSubCommand.java
|
{
"start": 1338,
"end": 3227
}
|
class ____ implements SubCommand {
@Override
public String commandName() {
return "getControllerMetaData";
}
@Override
public String commandDesc() {
return "Get controller cluster's metadata.";
}
@Override
public Options buildCommandlineOptions(Options options) {
Option opt = new Option("a", "controllerAddress", true, "the address of controller");
opt.setRequired(true);
options.addOption(opt);
return options;
}
@Override
public void execute(CommandLine commandLine, Options options, RPCHook rpcHook) throws SubCommandException {
DefaultMQAdminExt defaultMQAdminExt = new DefaultMQAdminExt(rpcHook);
defaultMQAdminExt.setInstanceName(Long.toString(System.currentTimeMillis()));
String controllerAddress = commandLine.getOptionValue('a').trim();
try {
defaultMQAdminExt.start();
final GetMetaDataResponseHeader metaData = defaultMQAdminExt.getControllerMetaData(controllerAddress);
System.out.printf("\n#ControllerGroup\t%s", metaData.getGroup());
System.out.printf("\n#ControllerLeaderId\t%s", metaData.getControllerLeaderId());
System.out.printf("\n#ControllerLeaderAddress\t%s", metaData.getControllerLeaderAddress());
final String peers = metaData.getPeers();
if (StringUtils.isNotEmpty(peers)) {
final String[] peerList = peers.split(";");
for (String peer : peerList) {
System.out.printf("\n#Peer:\t%s", peer);
}
}
System.out.printf("\n");
} catch (Exception e) {
throw new SubCommandException(this.getClass().getSimpleName() + " command failed", e);
} finally {
defaultMQAdminExt.shutdown();
}
}
}
|
GetControllerMetaDataSubCommand
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/userguide/util/Article.java
|
{
"start": 411,
"end": 856
}
|
class ____ implements Serializable {
@Id
@GeneratedValue
private Integer id;
@ManyToOne(fetch = FetchType.LAZY)
private Author author;
public Article() {
}
public Article(Author author) {
this.author = author;
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public Author getAuthor() {
return author;
}
public void setAuthor(Author author) {
this.author = author;
}
}
|
Article
|
java
|
spring-projects__spring-framework
|
spring-context/src/testFixtures/java/org/springframework/context/testfixture/context/annotation/InjectionPointConfiguration.java
|
{
"start": 1233,
"end": 1451
}
|
class ____ {
@Bean
@Scope(ConfigurableBeanFactory.SCOPE_PROTOTYPE)
public Class<?> callingClass(InjectionPoint injectionPoint) {
return injectionPoint.getMember().getDeclaringClass();
}
}
}
|
BeansConfiguration
|
java
|
google__guava
|
guava/src/com/google/common/io/CharSink.java
|
{
"start": 2355,
"end": 6719
}
|
class ____ {
/** Constructor for use by subclasses. */
protected CharSink() {}
/**
* Opens a new {@link Writer} for writing to this sink. This method returns a new, independent
* writer each time it is called.
*
* <p>The caller is responsible for ensuring that the returned writer is closed.
*
* @throws IOException if an I/O error occurs while opening the writer
*/
public abstract Writer openStream() throws IOException;
/**
* Opens a new buffered {@link Writer} for writing to this sink. The returned stream is not
* required to be a {@link BufferedWriter} in order to allow implementations to simply delegate to
* {@link #openStream()} when the stream returned by that method does not benefit from additional
* buffering. This method returns a new, independent writer each time it is called.
*
* <p>The caller is responsible for ensuring that the returned writer is closed.
*
* @throws IOException if an I/O error occurs while opening the writer
* @since 15.0 (in 14.0 with return type {@link BufferedWriter})
*/
public Writer openBufferedStream() throws IOException {
Writer writer = openStream();
return (writer instanceof BufferedWriter)
? (BufferedWriter) writer
: new BufferedWriter(writer);
}
/**
* Writes the given character sequence to this sink.
*
* @throws IOException if an I/O error while writing to this sink
*/
public void write(CharSequence charSequence) throws IOException {
checkNotNull(charSequence);
try (Writer out = openStream()) {
out.append(charSequence);
}
}
/**
* Writes the given lines of text to this sink with each line (including the last) terminated with
* the operating system's default line separator. This method is equivalent to {@code
* writeLines(lines, System.getProperty("line.separator"))}.
*
* @throws IOException if an I/O error occurs while writing to this sink
*/
public void writeLines(Iterable<? extends CharSequence> lines) throws IOException {
writeLines(lines, System.getProperty("line.separator"));
}
/**
* Writes the given lines of text to this sink with each line (including the last) terminated with
* the given line separator.
*
* @throws IOException if an I/O error occurs while writing to this sink
*/
public void writeLines(Iterable<? extends CharSequence> lines, String lineSeparator)
throws IOException {
writeLines(lines.iterator(), lineSeparator);
}
/**
* Writes the given lines of text to this sink with each line (including the last) terminated with
* the operating system's default line separator. This method is equivalent to {@code
* writeLines(lines, System.getProperty("line.separator"))}.
*
* @throws IOException if an I/O error occurs while writing to this sink
* @since 22.0 (but only since 33.4.0 in the Android flavor)
*/
public void writeLines(Stream<? extends CharSequence> lines) throws IOException {
writeLines(lines, LINE_SEPARATOR.value());
}
/**
* Writes the given lines of text to this sink with each line (including the last) terminated with
* the given line separator.
*
* @throws IOException if an I/O error occurs while writing to this sink
* @since 22.0 (but only since 33.4.0 in the Android flavor)
*/
public void writeLines(Stream<? extends CharSequence> lines, String lineSeparator)
throws IOException {
writeLines(lines.iterator(), lineSeparator);
}
private void writeLines(Iterator<? extends CharSequence> lines, String lineSeparator)
throws IOException {
checkNotNull(lineSeparator);
try (Writer out = openBufferedStream()) {
while (lines.hasNext()) {
out.append(lines.next()).append(lineSeparator);
}
}
}
/**
* Writes all the text from the given {@link Readable} (such as a {@link Reader}) to this sink.
* Does not close {@code readable} if it is {@code Closeable}.
*
* @return the number of characters written
* @throws IOException if an I/O error occurs while reading from {@code readable} or writing to
* this sink
*/
@CanIgnoreReturnValue
public long writeFrom(Readable readable) throws IOException {
checkNotNull(readable);
try (Writer out = openStream()) {
return CharStreams.copy(readable, out);
}
}
}
|
CharSink
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/chunking/WordBoundaryChunkingSettings.java
|
{
"start": 1036,
"end": 6103
}
|
class ____ implements ChunkingSettings {
public static final String NAME = "WordBoundaryChunkingSettings";
private static final ChunkingStrategy STRATEGY = ChunkingStrategy.WORD;
private static final int MAX_CHUNK_SIZE_LOWER_LIMIT = 10;
private static final Set<String> VALID_KEYS = Set.of(
ChunkingSettingsOptions.STRATEGY.toString(),
ChunkingSettingsOptions.MAX_CHUNK_SIZE.toString(),
ChunkingSettingsOptions.OVERLAP.toString()
);
protected final int maxChunkSize;
protected final int overlap;
public WordBoundaryChunkingSettings(Integer maxChunkSize, Integer overlap) {
this.maxChunkSize = maxChunkSize;
this.overlap = overlap;
}
public WordBoundaryChunkingSettings(StreamInput in) throws IOException {
maxChunkSize = in.readInt();
overlap = in.readInt();
}
@Override
public void validate() {
ValidationException validationException = new ValidationException();
if (maxChunkSize < MAX_CHUNK_SIZE_LOWER_LIMIT) {
validationException.addValidationError(
ChunkingSettingsOptions.MAX_CHUNK_SIZE + "[" + maxChunkSize + "] must be above " + MAX_CHUNK_SIZE_LOWER_LIMIT
);
}
if (overlap > maxChunkSize / 2) {
validationException.addValidationError(
ChunkingSettingsOptions.OVERLAP + "[" + overlap + "] must be less than or equal to half of max chunk size"
);
}
if (validationException.validationErrors().isEmpty() == false) {
throw validationException;
}
}
@Override
public Map<String, Object> asMap() {
return Map.of(
ChunkingSettingsOptions.STRATEGY.toString(),
STRATEGY.toString().toLowerCase(Locale.ROOT),
ChunkingSettingsOptions.MAX_CHUNK_SIZE.toString(),
maxChunkSize,
ChunkingSettingsOptions.OVERLAP.toString(),
overlap
);
}
@Override
public Integer maxChunkSize() {
return maxChunkSize;
}
public int overlap() {
return overlap;
}
public static WordBoundaryChunkingSettings fromMap(Map<String, Object> map) {
ValidationException validationException = new ValidationException();
var invalidSettings = map.keySet().stream().filter(key -> VALID_KEYS.contains(key) == false).toArray();
if (invalidSettings.length > 0) {
validationException.addValidationError(
Strings.format("Word based chunking settings can not have the following settings: %s", Arrays.toString(invalidSettings))
);
}
Integer maxChunkSize = InferenceUtils.extractRequiredPositiveIntegerGreaterThanOrEqualToMin(
map,
ChunkingSettingsOptions.MAX_CHUNK_SIZE.toString(),
MAX_CHUNK_SIZE_LOWER_LIMIT,
ModelConfigurations.CHUNKING_SETTINGS,
validationException
);
Integer overlap = null;
if (maxChunkSize != null) {
overlap = InferenceUtils.extractRequiredPositiveIntegerLessThanOrEqualToMax(
map,
ChunkingSettingsOptions.OVERLAP.toString(),
maxChunkSize / 2,
ModelConfigurations.CHUNKING_SETTINGS,
validationException
);
}
if (validationException.validationErrors().isEmpty() == false) {
throw validationException;
}
return new WordBoundaryChunkingSettings(maxChunkSize, overlap);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
{
builder.field(ChunkingSettingsOptions.STRATEGY.toString(), STRATEGY);
builder.field(ChunkingSettingsOptions.MAX_CHUNK_SIZE.toString(), maxChunkSize);
builder.field(ChunkingSettingsOptions.OVERLAP.toString(), overlap);
}
builder.endObject();
return builder;
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public TransportVersion getMinimalSupportedVersion() {
return TransportVersions.V_8_16_0;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeInt(maxChunkSize);
out.writeInt(overlap);
}
@Override
public ChunkingStrategy getChunkingStrategy() {
return STRATEGY;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
WordBoundaryChunkingSettings that = (WordBoundaryChunkingSettings) o;
return Objects.equals(maxChunkSize, that.maxChunkSize) && Objects.equals(overlap, that.overlap);
}
@Override
public int hashCode() {
return Objects.hash(maxChunkSize, overlap);
}
@Override
public String toString() {
return Strings.toString(this);
}
}
|
WordBoundaryChunkingSettings
|
java
|
google__error-prone
|
core/src/main/java/com/google/errorprone/bugpatterns/RobolectricShadowDirectlyOn.java
|
{
"start": 1996,
"end": 3836
}
|
class ____ extends BugChecker implements MethodInvocationTreeMatcher {
private static final Matcher<ExpressionTree> MATCHER =
MethodMatchers.staticMethod()
.onClass("org.robolectric.shadow.api.Shadow")
.withSignature("<T>directlyOn(T,java.lang.Class<T>)");
@Override
public Description matchMethodInvocation(MethodInvocationTree tree, VisitorState state) {
if (!MATCHER.matches(tree, state)) {
return NO_MATCH;
}
TreePath path = state.getPath().getParentPath();
if (!(path.getLeaf() instanceof MemberSelectTree)) {
return NO_MATCH;
}
path = path.getParentPath();
Tree parentTree = path.getLeaf();
if (!(parentTree instanceof MethodInvocationTree parent)) {
return NO_MATCH;
}
if (!tree.equals(getReceiver(parent))) {
return NO_MATCH;
}
SuggestedFix.Builder fix = SuggestedFix.builder();
MethodSymbol symbol = getSymbol(parent);
String argReplacement =
Streams.concat(
Stream.of(state.getConstantExpression(symbol.getSimpleName().toString())),
Streams.zip(
symbol.getParameters().stream(),
parent.getArguments().stream(),
(p, a) ->
String.format(
"ClassParameter.from(%s.class, %s)",
qualifyType(state, fix, state.getTypes().erasure(p.asType())),
state.getSourceForNode(a))))
.collect(joining(", ", ", ", ""));
fix.replace(state.getEndPosition(tree), state.getEndPosition(parent), "")
.postfixWith(getLast(tree.getArguments()), argReplacement)
.addImport("org.robolectric.util.ReflectionHelpers.ClassParameter");
return describeMatch(tree, fix.build());
}
}
|
RobolectricShadowDirectlyOn
|
java
|
alibaba__nacos
|
config/src/main/java/com/alibaba/nacos/config/server/model/ConfigKey.java
|
{
"start": 770,
"end": 2000
}
|
class ____ implements Serializable {
private static final long serialVersionUID = -1748953484511867580L;
private String appName;
private String dataId;
private String group;
public ConfigKey() {
}
public String getAppName() {
return appName;
}
public void setAppName(String appName) {
this.appName = appName;
}
public String getDataId() {
return dataId;
}
public void setDataId(String dataId) {
this.dataId = dataId;
}
public String getGroup() {
return group;
}
public void setGroup(String group) {
this.group = group;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ConfigKey configKey = (ConfigKey) o;
return Objects.equals(appName, configKey.appName) && Objects.equals(dataId, configKey.dataId) && Objects.equals(
group, configKey.group);
}
@Override
public int hashCode() {
return Objects.hash(appName, dataId, group);
}
}
|
ConfigKey
|
java
|
netty__netty
|
codec-http/src/main/java/io/netty/handler/codec/http/multipart/HttpPostRequestEncoder.java
|
{
"start": 2868,
"end": 44692
}
|
enum ____ {
/**
* Legacy mode which should work for most. It is known to not work with OAUTH. For OAUTH use
* {@link EncoderMode#RFC3986}. The W3C form recommendations this for submitting post form data.
*/
RFC1738,
/**
* Mode which is more new and is used for OAUTH
*/
RFC3986,
/**
* The HTML5 spec disallows mixed mode in multipart/form-data
* requests. More concretely this means that more files submitted
* under the same name will not be encoded using mixed mode, but
* will be treated as distinct fields.
*
* Reference:
* https://www.w3.org/TR/html5/forms.html#multipart-form-data
*/
HTML5
}
private static final String ASTERISK = "*";
private static final String PLUS = "+";
private static final String TILDE = "~";
private static final String ASTERISK_REPLACEMENT = "%2A";
private static final String PLUS_REPLACEMENT = "%20";
private static final String TILDE_REPLACEMENT = "%7E";
/**
* Factory used to create InterfaceHttpData
*/
private final HttpDataFactory factory;
/**
* Request to encode
*/
private final HttpRequest request;
/**
* Default charset to use
*/
private final Charset charset;
/**
* Chunked false by default
*/
private boolean isChunked;
/**
* InterfaceHttpData for Body (without encoding)
*/
private final List<InterfaceHttpData> bodyListDatas;
/**
* The final Multipart List of InterfaceHttpData including encoding
*/
final List<InterfaceHttpData> multipartHttpDatas;
/**
* Does this request is a Multipart request
*/
private final boolean isMultipart;
/**
* If multipart, this is the boundary for the flobal multipart
*/
String multipartDataBoundary;
/**
* If multipart, there could be internal multiparts (mixed) to the global multipart. Only one level is allowed.
*/
String multipartMixedBoundary;
/**
* To check if the header has been finalized
*/
private boolean headerFinalized;
private final EncoderMode encoderMode;
/**
*
* @param request
* the request to encode
* @param multipart
* True if the FORM is a ENCTYPE="multipart/form-data"
* @throws NullPointerException
* for request
* @throws ErrorDataEncoderException
* if the request is a TRACE
*/
public HttpPostRequestEncoder(HttpRequest request, boolean multipart) throws ErrorDataEncoderException {
this(new DefaultHttpDataFactory(DefaultHttpDataFactory.MINSIZE), request, multipart,
HttpConstants.DEFAULT_CHARSET, EncoderMode.RFC1738);
}
/**
*
* @param factory
* the factory used to create InterfaceHttpData
* @param request
* the request to encode
* @param multipart
* True if the FORM is a ENCTYPE="multipart/form-data"
* @throws NullPointerException
* for request and factory
* @throws ErrorDataEncoderException
* if the request is a TRACE
*/
public HttpPostRequestEncoder(HttpDataFactory factory, HttpRequest request, boolean multipart)
throws ErrorDataEncoderException {
this(factory, request, multipart, HttpConstants.DEFAULT_CHARSET, EncoderMode.RFC1738);
}
/**
*
* @param factory
* the factory used to create InterfaceHttpData
* @param request
* the request to encode
* @param multipart
* True if the FORM is a ENCTYPE="multipart/form-data"
* @param charset
* the charset to use as default
* @param encoderMode
* the mode for the encoder to use. See {@link EncoderMode} for the details.
* @throws NullPointerException
* for request or charset or factory
* @throws ErrorDataEncoderException
* if the request is a TRACE
*/
public HttpPostRequestEncoder(
HttpDataFactory factory, HttpRequest request, boolean multipart, Charset charset,
EncoderMode encoderMode)
throws ErrorDataEncoderException {
this.request = checkNotNull(request, "request");
this.charset = checkNotNull(charset, "charset");
this.factory = checkNotNull(factory, "factory");
if (HttpMethod.TRACE.equals(request.method())) {
throw new ErrorDataEncoderException("Cannot create a Encoder if request is a TRACE");
}
// Fill default values
bodyListDatas = new ArrayList<InterfaceHttpData>();
// default mode
isLastChunk = false;
isLastChunkSent = false;
isMultipart = multipart;
multipartHttpDatas = new ArrayList<InterfaceHttpData>();
this.encoderMode = encoderMode;
if (isMultipart) {
initDataMultipart();
}
}
/**
* Clean all HttpDatas (on Disk) for the current request.
*/
public void cleanFiles() {
factory.cleanRequestHttpData(request);
}
/**
* Does the last non empty chunk already encoded so that next chunk will be empty (last chunk)
*/
private boolean isLastChunk;
/**
* Last chunk already sent
*/
private boolean isLastChunkSent;
/**
* The current FileUpload that is currently in encode process
*/
private FileUpload currentFileUpload;
/**
* While adding a FileUpload, is the multipart currently in Mixed Mode
*/
private boolean duringMixedMode;
/**
* Global Body size
*/
private long globalBodySize;
/**
* Global Transfer progress
*/
private long globalProgress;
/**
* True if this request is a Multipart request
*
* @return True if this request is a Multipart request
*/
public boolean isMultipart() {
return isMultipart;
}
/**
* Init the delimiter for Global Part (Data).
*/
private void initDataMultipart() {
multipartDataBoundary = getNewMultipartDelimiter();
}
/**
* Init the delimiter for Mixed Part (Mixed).
*/
private void initMixedMultipart() {
multipartMixedBoundary = getNewMultipartDelimiter();
}
/**
*
* @return a newly generated Delimiter (either for DATA or MIXED)
*/
private static String getNewMultipartDelimiter() {
// construct a generated delimiter
return Long.toHexString(ThreadLocalRandom.current().nextLong());
}
/**
* This getMethod returns a List of all InterfaceHttpData from body part.<br>
* @return the list of InterfaceHttpData from Body part
*/
public List<InterfaceHttpData> getBodyListAttributes() {
return bodyListDatas;
}
/**
* Set the Body HttpDatas list
*
* @throws NullPointerException
* for datas
* @throws ErrorDataEncoderException
* if the encoding is in error or if the finalize were already done
*/
public void setBodyHttpDatas(List<InterfaceHttpData> datas) throws ErrorDataEncoderException {
ObjectUtil.checkNotNull(datas, "datas");
globalBodySize = 0;
bodyListDatas.clear();
currentFileUpload = null;
duringMixedMode = false;
multipartHttpDatas.clear();
for (InterfaceHttpData data : datas) {
addBodyHttpData(data);
}
}
/**
* Add a simple attribute in the body as Name=Value
*
* @param name
* name of the parameter
* @param value
* the value of the parameter
* @throws NullPointerException
* for name
* @throws ErrorDataEncoderException
* if the encoding is in error or if the finalize were already done
*/
public void addBodyAttribute(String name, String value) throws ErrorDataEncoderException {
String svalue = value != null? value : StringUtil.EMPTY_STRING;
Attribute data = factory.createAttribute(request, checkNotNull(name, "name"), svalue);
addBodyHttpData(data);
}
/**
* Add a file as a FileUpload
*
* @param name
* the name of the parameter
* @param file
* the file to be uploaded (if not Multipart mode, only the filename will be included)
* @param contentType
* the associated contentType for the File
* @param isText
* True if this file should be transmitted in Text format (else binary)
* @throws NullPointerException
* for name and file
* @throws ErrorDataEncoderException
* if the encoding is in error or if the finalize were already done
*/
public void addBodyFileUpload(String name, File file, String contentType, boolean isText)
throws ErrorDataEncoderException {
addBodyFileUpload(name, file.getName(), file, contentType, isText);
}
/**
* Add a file as a FileUpload
*
* @param name
* the name of the parameter
* @param file
* the file to be uploaded (if not Multipart mode, only the filename will be included)
* @param filename
* the filename to use for this File part, empty String will be ignored by
* the encoder
* @param contentType
* the associated contentType for the File
* @param isText
* True if this file should be transmitted in Text format (else binary)
* @throws NullPointerException
* for name and file
* @throws ErrorDataEncoderException
* if the encoding is in error or if the finalize were already done
*/
public void addBodyFileUpload(String name, String filename, File file, String contentType, boolean isText)
throws ErrorDataEncoderException {
checkNotNull(name, "name");
checkNotNull(file, "file");
if (filename == null) {
filename = StringUtil.EMPTY_STRING;
}
String scontentType = contentType;
String contentTransferEncoding = null;
if (contentType == null) {
if (isText) {
scontentType = HttpPostBodyUtil.DEFAULT_TEXT_CONTENT_TYPE;
} else {
scontentType = HttpPostBodyUtil.DEFAULT_BINARY_CONTENT_TYPE;
}
}
if (!isText) {
contentTransferEncoding = HttpPostBodyUtil.TransferEncodingMechanism.BINARY.value();
}
FileUpload fileUpload = factory.createFileUpload(request, name, filename, scontentType,
contentTransferEncoding, null, file.length());
try {
fileUpload.setContent(file);
} catch (IOException e) {
throw new ErrorDataEncoderException(e);
}
addBodyHttpData(fileUpload);
}
/**
* Add a series of Files associated with one File parameter
*
* @param name
* the name of the parameter
* @param file
* the array of files
* @param contentType
* the array of content Types associated with each file
* @param isText
* the array of isText attribute (False meaning binary mode) for each file
* @throws IllegalArgumentException
* also throws if array have different sizes
* @throws ErrorDataEncoderException
* if the encoding is in error or if the finalize were already done
*/
public void addBodyFileUploads(String name, File[] file, String[] contentType, boolean[] isText)
throws ErrorDataEncoderException {
if (file.length != contentType.length && file.length != isText.length) {
throw new IllegalArgumentException("Different array length");
}
for (int i = 0; i < file.length; i++) {
addBodyFileUpload(name, file[i], contentType[i], isText[i]);
}
}
/**
* Add the InterfaceHttpData to the Body list
*
* @throws NullPointerException
* for data
* @throws ErrorDataEncoderException
* if the encoding is in error or if the finalize were already done
*/
public void addBodyHttpData(InterfaceHttpData data) throws ErrorDataEncoderException {
if (headerFinalized) {
throw new ErrorDataEncoderException("Cannot add value once finalized");
}
bodyListDatas.add(checkNotNull(data, "data"));
if (!isMultipart) {
if (data instanceof Attribute) {
Attribute attribute = (Attribute) data;
try {
// name=value& with encoded name and attribute
String key = encodeAttribute(attribute.getName(), charset);
String value = encodeAttribute(attribute.getValue(), charset);
Attribute newattribute = factory.createAttribute(request, key, value);
multipartHttpDatas.add(newattribute);
globalBodySize += newattribute.getName().length() + 1 + newattribute.length() + 1;
} catch (IOException e) {
throw new ErrorDataEncoderException(e);
}
} else if (data instanceof FileUpload) {
// since not Multipart, only name=filename => Attribute
FileUpload fileUpload = (FileUpload) data;
// name=filename& with encoded name and filename
String key = encodeAttribute(fileUpload.getName(), charset);
String value = encodeAttribute(fileUpload.getFilename(), charset);
Attribute newattribute = factory.createAttribute(request, key, value);
multipartHttpDatas.add(newattribute);
globalBodySize += newattribute.getName().length() + 1 + newattribute.length() + 1;
}
return;
}
/*
* Logic:
* if not Attribute:
* add Data to body list
* if (duringMixedMode)
* add endmixedmultipart delimiter
* currentFileUpload = null
* duringMixedMode = false;
* add multipart delimiter, multipart body header and Data to multipart list
* reset currentFileUpload, duringMixedMode
* if FileUpload: take care of multiple file for one field => mixed mode
* if (duringMixedMode)
* if (currentFileUpload.name == data.name)
* add mixedmultipart delimiter, mixedmultipart body header and Data to multipart list
* else
* add endmixedmultipart delimiter, multipart body header and Data to multipart list
* currentFileUpload = data
* duringMixedMode = false;
* else
* if (currentFileUpload.name == data.name)
* change multipart body header of previous file into multipart list to
* mixedmultipart start, mixedmultipart body header
* add mixedmultipart delimiter, mixedmultipart body header and Data to multipart list
* duringMixedMode = true
* else
* add multipart delimiter, multipart body header and Data to multipart list
* currentFileUpload = data
* duringMixedMode = false;
* Do not add last delimiter! Could be:
* if duringmixedmode: endmixedmultipart + endmultipart
* else only endmultipart
*/
if (data instanceof Attribute) {
if (duringMixedMode) {
InternalAttribute internal = new InternalAttribute(charset);
internal.addValue("\r\n--" + multipartMixedBoundary + "--");
multipartHttpDatas.add(internal);
multipartMixedBoundary = null;
currentFileUpload = null;
duringMixedMode = false;
}
InternalAttribute internal = new InternalAttribute(charset);
if (!multipartHttpDatas.isEmpty()) {
// previously a data field so CRLF
internal.addValue("\r\n");
}
internal.addValue("--" + multipartDataBoundary + "\r\n");
// content-disposition: form-data; name="field1"
Attribute attribute = (Attribute) data;
internal.addValue(HttpHeaderNames.CONTENT_DISPOSITION + ": " + HttpHeaderValues.FORM_DATA + "; "
+ HttpHeaderValues.NAME + "=\"" + attribute.getName() + "\"\r\n");
// Add Content-Length: xxx
internal.addValue(HttpHeaderNames.CONTENT_LENGTH + ": " +
attribute.length() + "\r\n");
Charset localcharset = attribute.getCharset();
if (localcharset != null) {
// Content-Type: text/plain; charset=charset
internal.addValue(HttpHeaderNames.CONTENT_TYPE + ": " +
HttpPostBodyUtil.DEFAULT_TEXT_CONTENT_TYPE + "; " +
HttpHeaderValues.CHARSET + '='
+ localcharset.name() + "\r\n");
}
// CRLF between body header and data
internal.addValue("\r\n");
multipartHttpDatas.add(internal);
multipartHttpDatas.add(data);
globalBodySize += attribute.length() + internal.size();
} else if (data instanceof FileUpload) {
FileUpload fileUpload = (FileUpload) data;
InternalAttribute internal = new InternalAttribute(charset);
if (!multipartHttpDatas.isEmpty()) {
// previously a data field so CRLF
internal.addValue("\r\n");
}
boolean localMixed;
if (duringMixedMode) {
if (currentFileUpload != null && currentFileUpload.getName().equals(fileUpload.getName())) {
// continue a mixed mode
localMixed = true;
} else {
// end a mixed mode
// add endmixedmultipart delimiter, multipart body header
// and
// Data to multipart list
internal.addValue("--" + multipartMixedBoundary + "--");
multipartHttpDatas.add(internal);
multipartMixedBoundary = null;
// start a new one (could be replaced if mixed start again
// from here
internal = new InternalAttribute(charset);
internal.addValue("\r\n");
localMixed = false;
// new currentFileUpload and no more in Mixed mode
currentFileUpload = fileUpload;
duringMixedMode = false;
}
} else {
if (encoderMode != EncoderMode.HTML5 && currentFileUpload != null
&& currentFileUpload.getName().equals(fileUpload.getName())) {
// create a new mixed mode (from previous file)
// change multipart body header of previous file into
// multipart list to
// mixedmultipart start, mixedmultipart body header
// change Internal (size()-2 position in multipartHttpDatas)
// from (line starting with *)
// --AaB03x
// * Content-Disposition: form-data; name="files";
// filename="file1.txt"
// Content-Type: text/plain
// to (lines starting with *)
// --AaB03x
// * Content-Disposition: form-data; name="files"
// * Content-Type: multipart/mixed; boundary=BbC04y
// *
// * --BbC04y
// * Content-Disposition: attachment; filename="file1.txt"
// Content-Type: text/plain
initMixedMultipart();
InternalAttribute pastAttribute = (InternalAttribute) multipartHttpDatas.get(multipartHttpDatas
.size() - 2);
// remove past size
globalBodySize -= pastAttribute.size();
StringBuilder replacement = new StringBuilder(
139 + multipartDataBoundary.length() + multipartMixedBoundary.length() * 2 +
fileUpload.getFilename().length() + fileUpload.getName().length())
.append("--")
.append(multipartDataBoundary)
.append("\r\n")
.append(HttpHeaderNames.CONTENT_DISPOSITION)
.append(": ")
.append(HttpHeaderValues.FORM_DATA)
.append("; ")
.append(HttpHeaderValues.NAME)
.append("=\"")
.append(fileUpload.getName())
.append("\"\r\n")
.append(HttpHeaderNames.CONTENT_TYPE)
.append(": ")
.append(HttpHeaderValues.MULTIPART_MIXED)
.append("; ")
.append(HttpHeaderValues.BOUNDARY)
.append('=')
.append(multipartMixedBoundary)
.append("\r\n\r\n")
.append("--")
.append(multipartMixedBoundary)
.append("\r\n")
.append(HttpHeaderNames.CONTENT_DISPOSITION)
.append(": ")
.append(HttpHeaderValues.ATTACHMENT);
if (!fileUpload.getFilename().isEmpty()) {
replacement.append("; ")
.append(HttpHeaderValues.FILENAME)
.append("=\"")
.append(currentFileUpload.getFilename())
.append('"');
}
replacement.append("\r\n");
pastAttribute.setValue(replacement.toString(), 1);
pastAttribute.setValue("", 2);
// update past size
globalBodySize += pastAttribute.size();
// now continue
// add mixedmultipart delimiter, mixedmultipart body header
// and
// Data to multipart list
localMixed = true;
duringMixedMode = true;
} else {
// a simple new multipart
// add multipart delimiter, multipart body header and Data
// to multipart list
localMixed = false;
currentFileUpload = fileUpload;
duringMixedMode = false;
}
}
if (localMixed) {
// add mixedmultipart delimiter, mixedmultipart body header and
// Data to multipart list
internal.addValue("--" + multipartMixedBoundary + "\r\n");
if (fileUpload.getFilename().isEmpty()) {
// Content-Disposition: attachment
internal.addValue(HttpHeaderNames.CONTENT_DISPOSITION + ": "
+ HttpHeaderValues.ATTACHMENT + "\r\n");
} else {
// Content-Disposition: attachment; filename="file1.txt"
internal.addValue(HttpHeaderNames.CONTENT_DISPOSITION + ": "
+ HttpHeaderValues.ATTACHMENT + "; "
+ HttpHeaderValues.FILENAME + "=\"" + fileUpload.getFilename() + "\"\r\n");
}
} else {
internal.addValue("--" + multipartDataBoundary + "\r\n");
if (fileUpload.getFilename().isEmpty()) {
// Content-Disposition: form-data; name="files";
internal.addValue(HttpHeaderNames.CONTENT_DISPOSITION + ": " + HttpHeaderValues.FORM_DATA + "; "
+ HttpHeaderValues.NAME + "=\"" + fileUpload.getName() + "\"\r\n");
} else {
// Content-Disposition: form-data; name="files";
// filename="file1.txt"
internal.addValue(HttpHeaderNames.CONTENT_DISPOSITION + ": " + HttpHeaderValues.FORM_DATA + "; "
+ HttpHeaderValues.NAME + "=\"" + fileUpload.getName() + "\"; "
+ HttpHeaderValues.FILENAME + "=\"" + fileUpload.getFilename() + "\"\r\n");
}
}
// Add Content-Length: xxx
internal.addValue(HttpHeaderNames.CONTENT_LENGTH + ": " +
fileUpload.length() + "\r\n");
// Content-Type: image/gif
// Content-Type: text/plain; charset=ISO-8859-1
// Content-Transfer-Encoding: binary
internal.addValue(HttpHeaderNames.CONTENT_TYPE + ": " + fileUpload.getContentType());
String contentTransferEncoding = fileUpload.getContentTransferEncoding();
if (contentTransferEncoding != null
&& contentTransferEncoding.equals(HttpPostBodyUtil.TransferEncodingMechanism.BINARY.value())) {
internal.addValue("\r\n" + HttpHeaderNames.CONTENT_TRANSFER_ENCODING + ": "
+ HttpPostBodyUtil.TransferEncodingMechanism.BINARY.value() + "\r\n\r\n");
} else if (fileUpload.getCharset() != null) {
internal.addValue("; " + HttpHeaderValues.CHARSET + '=' + fileUpload.getCharset().name() + "\r\n\r\n");
} else {
internal.addValue("\r\n\r\n");
}
multipartHttpDatas.add(internal);
multipartHttpDatas.add(data);
globalBodySize += fileUpload.length() + internal.size();
}
}
/**
* Iterator to be used when encoding will be called chunk after chunk
*/
private ListIterator<InterfaceHttpData> iterator;
/**
* Finalize the request by preparing the Header in the request and returns the request ready to be sent.<br>
* Once finalized, no data must be added.<br>
* If the request does not need chunk (isChunked() == false), this request is the only object to send to the remote
* server.
*
* @return the request object (chunked or not according to size of body)
* @throws ErrorDataEncoderException
* if the encoding is in error or if the finalize were already done
*/
public HttpRequest finalizeRequest() throws ErrorDataEncoderException {
// Finalize the multipartHttpDatas
if (!headerFinalized) {
if (isMultipart) {
InternalAttribute internal = new InternalAttribute(charset);
if (duringMixedMode) {
internal.addValue("\r\n--" + multipartMixedBoundary + "--");
}
internal.addValue("\r\n--" + multipartDataBoundary + "--\r\n");
multipartHttpDatas.add(internal);
multipartMixedBoundary = null;
currentFileUpload = null;
duringMixedMode = false;
globalBodySize += internal.size();
}
headerFinalized = true;
} else {
throw new ErrorDataEncoderException("Header already encoded");
}
HttpHeaders headers = request.headers();
List<String> contentTypes = headers.getAll(HttpHeaderNames.CONTENT_TYPE);
List<String> transferEncoding = headers.getAll(HttpHeaderNames.TRANSFER_ENCODING);
if (contentTypes != null) {
headers.remove(HttpHeaderNames.CONTENT_TYPE);
for (String contentType : contentTypes) {
// "multipart/form-data; boundary=--89421926422648"
String lowercased = contentType.toLowerCase();
if (lowercased.startsWith(HttpHeaderValues.MULTIPART_FORM_DATA.toString()) ||
lowercased.startsWith(HttpHeaderValues.APPLICATION_X_WWW_FORM_URLENCODED.toString())) {
// ignore
} else {
headers.add(HttpHeaderNames.CONTENT_TYPE, contentType);
}
}
}
if (isMultipart) {
String value = HttpHeaderValues.MULTIPART_FORM_DATA + "; " + HttpHeaderValues.BOUNDARY + '='
+ multipartDataBoundary;
headers.add(HttpHeaderNames.CONTENT_TYPE, value);
} else {
// Not multipart
headers.add(HttpHeaderNames.CONTENT_TYPE, HttpHeaderValues.APPLICATION_X_WWW_FORM_URLENCODED);
}
// Now consider size for chunk or not
long realSize = globalBodySize;
if (!isMultipart) {
realSize -= 1; // last '&' removed
}
iterator = multipartHttpDatas.listIterator();
headers.set(HttpHeaderNames.CONTENT_LENGTH, String.valueOf(realSize));
if (realSize > HttpPostBodyUtil.chunkSize || isMultipart) {
isChunked = true;
if (transferEncoding != null) {
headers.remove(HttpHeaderNames.TRANSFER_ENCODING);
for (CharSequence v : transferEncoding) {
if (HttpHeaderValues.CHUNKED.contentEqualsIgnoreCase(v)) {
// ignore
} else {
headers.add(HttpHeaderNames.TRANSFER_ENCODING, v);
}
}
}
HttpUtil.setTransferEncodingChunked(request, true);
// wrap to hide the possible content
return new WrappedHttpRequest(request);
} else {
// get the only one body and set it to the request
HttpContent chunk = nextChunk();
if (request instanceof FullHttpRequest) {
FullHttpRequest fullRequest = (FullHttpRequest) request;
ByteBuf chunkContent = chunk.content();
if (fullRequest.content() != chunkContent) {
fullRequest.content().clear().writeBytes(chunkContent);
chunkContent.release();
}
return fullRequest;
} else {
return new WrappedFullHttpRequest(request, chunk);
}
}
}
/**
* @return True if the request is by Chunk
*/
public boolean isChunked() {
return isChunked;
}
/**
* Encode one attribute
*
* @return the encoded attribute
* @throws ErrorDataEncoderException
* if the encoding is in error
*/
private String encodeAttribute(String s, Charset charset) throws ErrorDataEncoderException {
if (s == null) {
return "";
}
try {
String encoded = URLEncoder.encode(s, charset.name());
if (encoderMode == EncoderMode.RFC3986) {
encoded = encoded.replace(ASTERISK, ASTERISK_REPLACEMENT)
.replace(PLUS, PLUS_REPLACEMENT)
.replace(TILDE, TILDE_REPLACEMENT);
}
return encoded;
} catch (UnsupportedEncodingException e) {
throw new ErrorDataEncoderException(charset.name(), e);
}
}
/**
* The ByteBuf currently used by the encoder
*/
private ByteBuf currentBuffer;
/**
* The current InterfaceHttpData to encode (used if more chunks are available)
*/
private InterfaceHttpData currentData;
/**
* If not multipart, does the currentBuffer stands for the Key or for the Value
*/
private boolean isKey = true;
/**
*
* @return the next ByteBuf to send as an HttpChunk and modifying currentBuffer accordingly
*/
private ByteBuf fillByteBuf() {
int length = currentBuffer.readableBytes();
if (length > HttpPostBodyUtil.chunkSize) {
return currentBuffer.readRetainedSlice(HttpPostBodyUtil.chunkSize);
} else {
// to continue
ByteBuf slice = currentBuffer;
currentBuffer = null;
return slice;
}
}
/**
* From the current context (currentBuffer and currentData), returns the next HttpChunk (if possible) trying to get
* sizeleft bytes more into the currentBuffer. This is the Multipart version.
*
* @param sizeleft
* the number of bytes to try to get from currentData
* @return the next HttpChunk or null if not enough bytes were found
* @throws ErrorDataEncoderException
* if the encoding is in error
*/
private HttpContent encodeNextChunkMultipart(int sizeleft) throws ErrorDataEncoderException {
if (currentData == null) {
return null;
}
ByteBuf buffer;
if (currentData instanceof InternalAttribute) {
buffer = ((InternalAttribute) currentData).toByteBuf();
currentData = null;
} else {
try {
buffer = ((HttpData) currentData).getChunk(sizeleft);
} catch (IOException e) {
throw new ErrorDataEncoderException(e);
}
if (buffer.capacity() == 0) {
// end for current InterfaceHttpData, need more data
currentData = null;
return null;
}
}
if (currentBuffer == null) {
currentBuffer = buffer;
} else {
currentBuffer = wrappedBuffer(currentBuffer, buffer);
}
if (currentBuffer.readableBytes() < HttpPostBodyUtil.chunkSize) {
currentData = null;
return null;
}
buffer = fillByteBuf();
return new DefaultHttpContent(buffer);
}
/**
* From the current context (currentBuffer and currentData), returns the next HttpChunk (if possible) trying to get
* sizeleft bytes more into the currentBuffer. This is the UrlEncoded version.
*
* @param sizeleft
* the number of bytes to try to get from currentData
* @return the next HttpChunk or null if not enough bytes were found
* @throws ErrorDataEncoderException
* if the encoding is in error
*/
private HttpContent encodeNextChunkUrlEncoded(int sizeleft) throws ErrorDataEncoderException {
if (currentData == null) {
return null;
}
int size = sizeleft;
ByteBuf buffer;
// Set name=
if (isKey) {
String key = currentData.getName();
buffer = wrappedBuffer(key.getBytes(charset));
isKey = false;
if (currentBuffer == null) {
currentBuffer = wrappedBuffer(buffer, wrappedBuffer("=".getBytes(charset)));
} else {
currentBuffer = wrappedBuffer(currentBuffer, buffer, wrappedBuffer("=".getBytes(charset)));
}
// continue
size -= buffer.readableBytes() + 1;
if (currentBuffer.readableBytes() >= HttpPostBodyUtil.chunkSize) {
buffer = fillByteBuf();
return new DefaultHttpContent(buffer);
}
}
// Put value into buffer
try {
buffer = ((HttpData) currentData).getChunk(size);
} catch (IOException e) {
throw new ErrorDataEncoderException(e);
}
// Figure out delimiter
ByteBuf delimiter = null;
if (buffer.readableBytes() < size) {
isKey = true;
currentData = null;
delimiter = iterator.hasNext() ? wrappedBuffer("&".getBytes(charset)) : null;
}
// End for current InterfaceHttpData, need potentially more data
if (buffer.capacity() == 0) {
isKey = true;
currentData = null;
if (currentBuffer == null) {
if (delimiter == null) {
return null;
} else {
currentBuffer = delimiter;
}
} else {
if (delimiter != null) {
currentBuffer = wrappedBuffer(currentBuffer, delimiter);
}
}
if (currentBuffer.readableBytes() >= HttpPostBodyUtil.chunkSize) {
buffer = fillByteBuf();
return new DefaultHttpContent(buffer);
}
return null;
}
// Put it all together: name=value&
if (currentBuffer == null) {
if (delimiter != null) {
currentBuffer = wrappedBuffer(buffer, delimiter);
} else {
currentBuffer = buffer;
}
} else {
if (delimiter != null) {
currentBuffer = wrappedBuffer(currentBuffer, buffer, delimiter);
} else {
currentBuffer = wrappedBuffer(currentBuffer, buffer);
}
}
if (currentBuffer.readableBytes() >= HttpPostBodyUtil.chunkSize) {
return new DefaultHttpContent(fillByteBuf());
}
return null;
}
@Override
public void close() throws Exception {
// NO since the user can want to reuse (broadcast for instance)
// cleanFiles();
}
@Deprecated
@Override
public HttpContent readChunk(ChannelHandlerContext ctx) throws Exception {
return readChunk(ctx.alloc());
}
/**
* Returns the next available HttpChunk. The caller is responsible to test if this chunk is the last one (isLast()),
* in order to stop calling this getMethod.
*
* @return the next available HttpChunk
* @throws ErrorDataEncoderException
* if the encoding is in error
*/
@Override
public HttpContent readChunk(ByteBufAllocator allocator) throws Exception {
if (isLastChunkSent) {
return null;
} else {
HttpContent nextChunk = nextChunk();
globalProgress += nextChunk.content().readableBytes();
return nextChunk;
}
}
/**
* Returns the next available HttpChunk. The caller is responsible to test if this chunk is the last one (isLast()),
* in order to stop calling this getMethod.
*
* @return the next available HttpChunk
* @throws ErrorDataEncoderException
* if the encoding is in error
*/
private HttpContent nextChunk() throws ErrorDataEncoderException {
if (isLastChunk) {
isLastChunkSent = true;
return LastHttpContent.EMPTY_LAST_CONTENT;
}
// first test if previous buffer is not empty
int size = calculateRemainingSize();
if (size <= 0) {
// NextChunk from buffer
ByteBuf buffer = fillByteBuf();
return new DefaultHttpContent(buffer);
}
// size > 0
if (currentData != null) {
// continue to read data
HttpContent chunk;
if (isMultipart) {
chunk = encodeNextChunkMultipart(size);
} else {
chunk = encodeNextChunkUrlEncoded(size);
}
if (chunk != null) {
// NextChunk from data
return chunk;
}
size = calculateRemainingSize();
}
if (!iterator.hasNext()) {
return lastChunk();
}
while (size > 0 && iterator.hasNext()) {
currentData = iterator.next();
HttpContent chunk;
if (isMultipart) {
chunk = encodeNextChunkMultipart(size);
} else {
chunk = encodeNextChunkUrlEncoded(size);
}
if (chunk == null) {
// not enough
size = calculateRemainingSize();
continue;
}
// NextChunk from data
return chunk;
}
// end since no more data
return lastChunk();
}
private int calculateRemainingSize() {
int size = HttpPostBodyUtil.chunkSize;
if (currentBuffer != null) {
size -= currentBuffer.readableBytes();
}
return size;
}
private HttpContent lastChunk() {
isLastChunk = true;
if (currentBuffer == null) {
isLastChunkSent = true;
// LastChunk with no more data
return LastHttpContent.EMPTY_LAST_CONTENT;
}
// NextChunk as last non empty from buffer
ByteBuf buffer = currentBuffer;
currentBuffer = null;
return new DefaultHttpContent(buffer);
}
@Override
public boolean isEndOfInput() throws Exception {
return isLastChunkSent;
}
@Override
public long length() {
return isMultipart? globalBodySize : globalBodySize - 1;
}
@Override
public long progress() {
return globalProgress;
}
/**
* Exception when an error occurs while encoding
*/
public static
|
EncoderMode
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/fastjson/deserializer/issues3796/TestIssues3796.java
|
{
"start": 275,
"end": 405
}
|
class ____ {
@Test
public void testIssues3796() {
JSON.parseObject("{}", LargeJavaBean.class);
}
}
|
TestIssues3796
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/reuse/ScanReuser.java
|
{
"start": 4868,
"end": 4989
}
|
class ____ not reuse all sources, sources with same digest will be reused by {@link
* SubplanReuser}.
*
* <p>NOTE: This
|
do
|
java
|
junit-team__junit5
|
junit-jupiter-api/src/main/java/org/junit/jupiter/api/extension/AfterAllCallback.java
|
{
"start": 840,
"end": 2444
}
|
class ____.
*
* <h2>Constructor Requirements</h2>
*
* <p>Consult the documentation in {@link Extension} for details on
* constructor requirements.
*
* <h2>Wrapping Behavior</h2>
*
* <p>JUnit Jupiter guarantees <em>wrapping behavior</em> for multiple
* registered extensions that implement lifecycle callbacks such as
* {@link BeforeAllCallback}, {@link AfterAllCallback},
* {@link BeforeClassTemplateInvocationCallback},
* {@link AfterClassTemplateInvocationCallback}, {@link BeforeEachCallback},
* {@link AfterEachCallback}, {@link BeforeTestExecutionCallback}, and
* {@link AfterTestExecutionCallback}.
*
* <p>That means that, given two extensions {@code Extension1} and
* {@code Extension2} with {@code Extension1} registered before
* {@code Extension2}, any "before" callbacks implemented by {@code Extension1}
* are guaranteed to execute before any "before" callbacks implemented by
* {@code Extension2}. Similarly, given the two same two extensions registered
* in the same order, any "after" callbacks implemented by {@code Extension1}
* are guaranteed to execute after any "after" callbacks implemented by
* {@code Extension2}. {@code Extension1} is therefore said to <em>wrap</em>
* {@code Extension2}.
*
* @since 5.0
* @see org.junit.jupiter.api.AfterAll
* @see BeforeAllCallback
* @see BeforeEachCallback
* @see AfterEachCallback
* @see BeforeTestExecutionCallback
* @see AfterTestExecutionCallback
* @see BeforeClassTemplateInvocationCallback
* @see AfterClassTemplateInvocationCallback
*/
@FunctionalInterface
@API(status = STABLE, since = "5.0")
public
|
level
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/CompressionEmulationUtil.java
|
{
"start": 19095,
"end": 22711
}
|
class
____<? extends CompressionCodec> codecClass =
org.apache.hadoop.mapred.FileOutputFormat
.getOutputCompressorClass(jConf,
GzipCodec.class);
// get the codec implementation
CompressionCodec codec = ReflectionUtils.newInstance(codecClass, conf);
// add the appropriate extension
file = file.suffix(codec.getDefaultExtension());
if (isCompressionEmulationEnabled(conf)) {
FSDataOutputStream fileOut = fs.create(file, false);
return new DataOutputStream(codec.createOutputStream(fileOut));
}
}
return fs.create(file, false);
}
/**
* Extracts compression/decompression related configuration parameters from
* the source configuration to the target configuration.
*/
static void configureCompressionEmulation(Configuration source,
Configuration target) {
// enable output compression
target.setBoolean(FileOutputFormat.COMPRESS,
source.getBoolean(FileOutputFormat.COMPRESS, false));
// set the job output compression codec
String jobOutputCompressionCodec =
source.get(FileOutputFormat.COMPRESS_CODEC);
if (jobOutputCompressionCodec != null) {
target.set(FileOutputFormat.COMPRESS_CODEC, jobOutputCompressionCodec);
}
// set the job output compression type
String jobOutputCompressionType =
source.get(FileOutputFormat.COMPRESS_TYPE);
if (jobOutputCompressionType != null) {
target.set(FileOutputFormat.COMPRESS_TYPE, jobOutputCompressionType);
}
// enable map output compression
target.setBoolean(MRJobConfig.MAP_OUTPUT_COMPRESS,
source.getBoolean(MRJobConfig.MAP_OUTPUT_COMPRESS, false));
// set the map output compression codecs
String mapOutputCompressionCodec =
source.get(MRJobConfig.MAP_OUTPUT_COMPRESS_CODEC);
if (mapOutputCompressionCodec != null) {
target.set(MRJobConfig.MAP_OUTPUT_COMPRESS_CODEC,
mapOutputCompressionCodec);
}
// enable input decompression
//TODO replace with mapInputBytes and hdfsBytesRead
Path[] inputs =
org.apache.hadoop.mapred.FileInputFormat
.getInputPaths(new JobConf(source));
boolean needsCompressedInput = false;
CompressionCodecFactory compressionCodecs =
new CompressionCodecFactory(source);
for (Path input : inputs) {
CompressionCodec codec = compressionCodecs.getCodec(input);
if (codec != null) {
needsCompressedInput = true;
}
}
setInputCompressionEmulationEnabled(target, needsCompressedInput);
}
/**
* Get the uncompressed input bytes count from the given possibly compressed
* input bytes count.
* @param possiblyCompressedInputBytes input bytes count. This is compressed
* input size if compression emulation is on.
* @param conf configuration of the Gridmix simulated job
* @return uncompressed input bytes count. Compute this in case if compressed
* input was used
*/
static long getUncompressedInputBytes(long possiblyCompressedInputBytes,
Configuration conf) {
long uncompressedInputBytes = possiblyCompressedInputBytes;
if (CompressionEmulationUtil.isInputCompressionEmulationEnabled(conf)) {
float inputCompressionRatio =
CompressionEmulationUtil.getMapInputCompressionEmulationRatio(conf);
uncompressedInputBytes /= inputCompressionRatio;
}
return uncompressedInputBytes;
}
}
|
Class
|
java
|
apache__camel
|
components/camel-nitrite/src/main/java/org/apache/camel/component/nitrite/operation/repository/UpdateRepositoryOperation.java
|
{
"start": 1418,
"end": 2487
}
|
class ____ extends AbstractPayloadAwareOperation implements RepositoryOperation {
private ObjectFilter filter;
public UpdateRepositoryOperation(ObjectFilter filter) {
this.filter = filter;
}
public UpdateRepositoryOperation(ObjectFilter filter, Object payload) {
super(payload);
this.filter = filter;
}
public UpdateRepositoryOperation(ObjectFilter filter, Expression documentExpression) {
super(documentExpression);
this.filter = filter;
}
@Override
protected void execute(Exchange exchange, NitriteEndpoint endpoint) throws Exception {
ObjectRepository repository = (ObjectRepository) endpoint.getNitriteCollection();
Object payload = getPayload(exchange, endpoint);
if (filter != null) {
exchange.getMessage().setHeader(NitriteConstants.WRITE_RESULT, repository.update(filter, payload));
} else {
exchange.getMessage().setHeader(NitriteConstants.WRITE_RESULT, repository.update(payload));
}
}
}
|
UpdateRepositoryOperation
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/security/token/hadoop/HadoopFSDelegationTokenReceiver.java
|
{
"start": 984,
"end": 1146
}
|
class ____ extends HadoopDelegationTokenReceiver {
@Override
public String serviceName() {
return "hadoopfs";
}
}
|
HadoopFSDelegationTokenReceiver
|
java
|
apache__flink
|
flink-formats/flink-json/src/main/java/org/apache/flink/formats/json/JsonParserToRowDataConverters.java
|
{
"start": 24264,
"end": 25780
}
|
class ____ extends ProjectedConverter {
private static final long serialVersionUID = 1L;
private final Map<String, ProjectedConverter> fieldConverters = new HashMap<>();
// Keep path here for fallback to get nested field from a converted row field.
private final Map<Integer, String[]> outputPosToPath = new HashMap<>();
@Override
public void convertNotNull(JsonParser jp, GenericRowData outputRow) throws IOException {
if (jp.currentToken() != JsonToken.START_OBJECT) {
throw new IllegalStateException("Illegal Json Data...");
}
int arity = fieldConverters.size();
int cnt = 0;
jp.nextToken();
while (jp.currentToken() != JsonToken.END_OBJECT) {
if (cnt >= arity) {
skipToNextField(jp);
continue;
}
String fieldName = jp.getText();
jp.nextToken();
ProjectedConverter converter = fieldConverters.get(fieldName);
if (converter != null) {
converter.convert(jp, outputRow);
jp.nextToken();
cnt++;
} else {
skipToNextField(jp);
}
}
if (cnt < arity && failOnMissingField) {
throw new JsonParseException("Some field is missing in the Json data.");
}
}
}
}
|
RowNestedConverter
|
java
|
apache__camel
|
components/camel-minio/src/test/java/org/apache/camel/component/minio/integration/MinioListObjectsOperationIT.java
|
{
"start": 1908,
"end": 7043
}
|
class ____ extends MinioIntegrationTestSupport {
private static final String BUCKET_NAME = "mycamel2";
@BindToRegistry("minioClient")
MinioClient client = MinioClient.builder()
.endpoint("http://" + service.host(), service.port(), false)
.credentials(service.accessKey(), service.secretKey())
.build();
@EndpointInject
private ProducerTemplate template;
@EndpointInject("mock:result")
private MockEndpoint result;
MinioListObjectsOperationIT() {
}
@SuppressWarnings("unchecked")
@Test
void sendIn() throws Exception {
client.removeBucket(RemoveBucketArgs.builder().bucket(BUCKET_NAME).build());
client.makeBucket(MakeBucketArgs.builder().bucket(BUCKET_NAME).build());
result.expectedMessageCount(1);
template.send("direct:listBuckets",
exchange -> exchange.getIn().setHeader(MinioConstants.MINIO_OPERATION, MinioOperations.listBuckets));
template.send("direct:addObject", ExchangePattern.InOnly, exchange -> {
exchange.getIn().setHeader(MinioConstants.OBJECT_NAME, "CamelUnitTest2");
exchange.getIn().setBody("This is my bucket content.");
exchange.getIn().removeHeader(MinioConstants.MINIO_OPERATION);
});
Exchange exchange = template.request("direct:listObjects", exchange13 -> {
exchange13.getIn().setHeader(MinioConstants.BUCKET_NAME, BUCKET_NAME);
exchange13.getIn().setHeader(MinioConstants.MINIO_OPERATION, MinioOperations.listObjects);
});
Iterable<Result<Item>> respond = (Iterable<Result<Item>>) exchange.getMessage().getBody();
Iterator<Result<Item>> respondSize = respond.iterator();
Iterator<Result<Item>> respondIterator = respond.iterator();
assertEquals(1, Iterators.size(respondSize));
assertEquals("CamelUnitTest2", respondIterator.next().get().objectName());
template.send("direct:deleteObject", ExchangePattern.InOnly, exchange12 -> {
exchange12.getIn().setHeader(MinioConstants.OBJECT_NAME, "CamelUnitTest2");
exchange12.getIn().setHeader(MinioConstants.BUCKET_NAME, BUCKET_NAME);
exchange12.getIn().setHeader(MinioConstants.MINIO_OPERATION, MinioOperations.deleteObject);
});
template.send("direct:deleteBucket", exchange1 -> {
exchange1.getIn().setHeader(MinioConstants.BUCKET_NAME, BUCKET_NAME);
exchange1.getIn().setHeader(MinioConstants.MINIO_OPERATION, MinioOperations.deleteBucket);
});
MockEndpoint.assertIsSatisfied(context);
}
@SuppressWarnings("unchecked")
@Test
void deleteObjectsTest() throws Exception {
client.removeBucket(RemoveBucketArgs.builder().bucket(BUCKET_NAME).build());
client.makeBucket(MakeBucketArgs.builder().bucket(BUCKET_NAME).build());
final List<DeleteObject> objects = new ArrayList<>(20);
// set up the environment
for (int i = 0; i < 20; i++) {
String currentDeleteObjectName = "CamelUnitTest-" + randomAlphanumeric(5);
objects.add(new DeleteObject(currentDeleteObjectName));
template.send("direct:addObject", ExchangePattern.InOnly, new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(MinioConstants.BUCKET_NAME, BUCKET_NAME);
exchange.getIn().setHeader(MinioConstants.OBJECT_NAME, currentDeleteObjectName);
exchange.getIn().setBody("This is my bucket content.");
exchange.getIn().removeHeader(MinioConstants.MINIO_OPERATION);
}
});
}
assertEquals(20, countObjectsInBucket(client, BUCKET_NAME));
// delete all objects of the bucket
template.send("direct:deleteObjects", exchange -> {
exchange.getIn().setHeader(MinioConstants.BUCKET_NAME, BUCKET_NAME);
exchange.getIn().setHeader(MinioConstants.MINIO_OPERATION, MinioOperations.deleteObjects);
exchange.getIn().setBody(RemoveObjectsArgs.builder().bucket(BUCKET_NAME).objects(objects));
});
assertEquals(0, countObjectsInBucket(client, BUCKET_NAME));
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
String minioEndpoint = "minio://" + BUCKET_NAME + "?autoCreateBucket=true";
String minioEndpointPojoEnabled = minioEndpoint + "&pojoRequest=true";
from("direct:listBucket").to(minioEndpoint);
from("direct:addObject").to(minioEndpoint);
from("direct:deleteObject").to(minioEndpoint);
from("direct:deleteObjects").to(minioEndpointPojoEnabled);
from("direct:listObjects").to(minioEndpoint);
from("direct:deleteBucket").to(minioEndpoint).to("mock:result");
}
};
}
}
|
MinioListObjectsOperationIT
|
java
|
apache__camel
|
components/camel-bindy/src/main/java/org/apache/camel/dataformat/bindy/format/factories/ByteFormatFactory.java
|
{
"start": 1024,
"end": 1582
}
|
class ____ extends AbstractFormatFactory {
private final ByteFormat byteFormat = new ByteFormat();
{
supportedClasses.add(byte.class);
supportedClasses.add(Byte.class);
}
@Override
public boolean canBuild(FormattingOptions formattingOptions) {
return super.canBuild(formattingOptions)
&& ObjectHelper.isEmpty(formattingOptions.getPattern());
}
@Override
public Format<?> build(FormattingOptions formattingOptions) {
return byteFormat;
}
private static
|
ByteFormatFactory
|
java
|
spring-projects__spring-framework
|
spring-core/src/test/java/org/springframework/core/io/support/ClassPathManifestEntriesTestApplication.java
|
{
"start": 904,
"end": 1199
}
|
class ____ {
public static void main(String[] args) throws IOException {
PathMatchingResourcePatternResolver resolver = new PathMatchingResourcePatternResolver();
System.out.println("!!!!" + List.of(resolver.getResources("classpath*:/**/*.txt")));
}
}
|
ClassPathManifestEntriesTestApplication
|
java
|
spring-projects__spring-boot
|
core/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/task/TaskExecutionAutoConfigurationTests.java
|
{
"start": 28216,
"end": 28538
}
|
class ____ {
private final ThreadPoolTaskExecutorBuilder builder = new ThreadPoolTaskExecutorBuilder();
@Bean
ThreadPoolTaskExecutorBuilder customThreadPoolTaskExecutorBuilder() {
return this.builder;
}
}
@Configuration(proxyBeanMethods = false)
@EnableAsync
static
|
CustomThreadPoolTaskExecutorBuilderConfig
|
java
|
spring-projects__spring-boot
|
module/spring-boot-restclient/src/main/java/org/springframework/boot/restclient/RestTemplateBuilder.java
|
{
"start": 2883,
"end": 13655
}
|
class ____ {
private final HttpClientSettings clientSettings;
private final boolean detectRequestFactory;
private final @Nullable String rootUri;
private final @Nullable Set<HttpMessageConverter<?>> messageConverters;
private final Set<ClientHttpRequestInterceptor> interceptors;
private final @Nullable ClientHttpRequestFactoryBuilder<?> requestFactoryBuilder;
private final @Nullable UriTemplateHandler uriTemplateHandler;
private final @Nullable ResponseErrorHandler errorHandler;
private final @Nullable BasicAuthentication basicAuthentication;
private final Map<String, List<String>> defaultHeaders;
private final Set<RestTemplateCustomizer> customizers;
private final Set<RestTemplateRequestCustomizer<?>> requestCustomizers;
/**
* Create a new {@link RestTemplateBuilder} instance.
* @param customizers any {@link RestTemplateCustomizer RestTemplateCustomizers} that
* should be applied when the {@link RestTemplate} is built
*/
public RestTemplateBuilder(RestTemplateCustomizer... customizers) {
Assert.notNull(customizers, "'customizers' must not be null");
this.clientSettings = HttpClientSettings.defaults();
this.detectRequestFactory = true;
this.rootUri = null;
this.messageConverters = null;
this.interceptors = Collections.emptySet();
this.requestFactoryBuilder = null;
this.uriTemplateHandler = null;
this.errorHandler = null;
this.basicAuthentication = null;
this.defaultHeaders = Collections.emptyMap();
this.customizers = copiedSetOf(customizers);
this.requestCustomizers = Collections.emptySet();
}
private RestTemplateBuilder(HttpClientSettings clientSettings, boolean detectRequestFactory,
@Nullable String rootUri, @Nullable Set<HttpMessageConverter<?>> messageConverters,
Set<ClientHttpRequestInterceptor> interceptors,
@Nullable ClientHttpRequestFactoryBuilder<?> requestFactoryBuilder,
@Nullable UriTemplateHandler uriTemplateHandler, @Nullable ResponseErrorHandler errorHandler,
@Nullable BasicAuthentication basicAuthentication, Map<String, List<String>> defaultHeaders,
Set<RestTemplateCustomizer> customizers, Set<RestTemplateRequestCustomizer<?>> requestCustomizers) {
this.clientSettings = clientSettings;
this.detectRequestFactory = detectRequestFactory;
this.rootUri = rootUri;
this.messageConverters = messageConverters;
this.interceptors = interceptors;
this.requestFactoryBuilder = requestFactoryBuilder;
this.uriTemplateHandler = uriTemplateHandler;
this.errorHandler = errorHandler;
this.basicAuthentication = basicAuthentication;
this.defaultHeaders = defaultHeaders;
this.customizers = customizers;
this.requestCustomizers = requestCustomizers;
}
/**
* Set if the {@link ClientHttpRequestFactory} should be detected based on the
* classpath. Default if {@code true}.
* @param detectRequestFactory if the {@link ClientHttpRequestFactory} should be
* detected
* @return a new builder instance
*/
public RestTemplateBuilder detectRequestFactory(boolean detectRequestFactory) {
return new RestTemplateBuilder(this.clientSettings, detectRequestFactory, this.rootUri, this.messageConverters,
this.interceptors, this.requestFactoryBuilder, this.uriTemplateHandler, this.errorHandler,
this.basicAuthentication, this.defaultHeaders, this.customizers, this.requestCustomizers);
}
/**
* Set a root URL that should be applied to each request that starts with {@code '/'}.
* The root URL will only apply when {@code String} variants of the
* {@link RestTemplate} methods are used for specifying the request URL.
* @param rootUri the root URI or {@code null}
* @return a new builder instance
*/
public RestTemplateBuilder rootUri(@Nullable String rootUri) {
return new RestTemplateBuilder(this.clientSettings, this.detectRequestFactory, rootUri, this.messageConverters,
this.interceptors, this.requestFactoryBuilder, this.uriTemplateHandler, this.errorHandler,
this.basicAuthentication, this.defaultHeaders, this.customizers, this.requestCustomizers);
}
/**
* Set the {@link HttpMessageConverter HttpMessageConverters} that should be used with
* the {@link RestTemplate}. Setting this value will replace any previously configured
* converters and any converters configured on the builder will replace RestTemplate's
* default converters.
* @param messageConverters the converters to set
* @return a new builder instance
* @see #additionalMessageConverters(HttpMessageConverter...)
*/
public RestTemplateBuilder messageConverters(HttpMessageConverter<?>... messageConverters) {
Assert.notNull(messageConverters, "'messageConverters' must not be null");
return messageConverters(Arrays.asList(messageConverters));
}
/**
* Set the {@link HttpMessageConverter HttpMessageConverters} that should be used with
* the {@link RestTemplate}. Setting this value will replace any previously configured
* converters and any converters configured on the builder will replace RestTemplate's
* default converters.
* @param messageConverters the converters to set
* @return a new builder instance
* @see #additionalMessageConverters(HttpMessageConverter...)
*/
public RestTemplateBuilder messageConverters(Iterable<? extends HttpMessageConverter<?>> messageConverters) {
Assert.notNull(messageConverters, "'messageConverters' must not be null");
return new RestTemplateBuilder(this.clientSettings, this.detectRequestFactory, this.rootUri,
copiedSetOf(messageConverters), this.interceptors, this.requestFactoryBuilder, this.uriTemplateHandler,
this.errorHandler, this.basicAuthentication, this.defaultHeaders, this.customizers,
this.requestCustomizers);
}
/**
* Add additional {@link HttpMessageConverter HttpMessageConverters} that should be
* used with the {@link RestTemplate}. Any converters configured on the builder will
* replace RestTemplate's default converters.
* @param messageConverters the converters to add
* @return a new builder instance
* @see #messageConverters(HttpMessageConverter...)
*/
public RestTemplateBuilder additionalMessageConverters(HttpMessageConverter<?>... messageConverters) {
Assert.notNull(messageConverters, "'messageConverters' must not be null");
return additionalMessageConverters(Arrays.asList(messageConverters));
}
/**
* Add additional {@link HttpMessageConverter HttpMessageConverters} that should be
* used with the {@link RestTemplate}. Any converters configured on the builder will
* replace RestTemplate's default converters.
* @param messageConverters the converters to add
* @return a new builder instance
* @see #messageConverters(HttpMessageConverter...)
*/
public RestTemplateBuilder additionalMessageConverters(
Collection<? extends HttpMessageConverter<?>> messageConverters) {
Assert.notNull(messageConverters, "'messageConverters' must not be null");
return new RestTemplateBuilder(this.clientSettings, this.detectRequestFactory, this.rootUri,
append(this.messageConverters, messageConverters), this.interceptors, this.requestFactoryBuilder,
this.uriTemplateHandler, this.errorHandler, this.basicAuthentication, this.defaultHeaders,
this.customizers, this.requestCustomizers);
}
/**
* Set the {@link HttpMessageConverter HttpMessageConverters} that should be used with
* the {@link RestTemplate} to the default set. Calling this method will replace any
* previously defined converters.
* @return a new builder instance
* @see #messageConverters(HttpMessageConverter...)
*/
public RestTemplateBuilder defaultMessageConverters() {
return new RestTemplateBuilder(this.clientSettings, this.detectRequestFactory, this.rootUri,
copiedSetOf(new RestTemplate().getMessageConverters()), this.interceptors, this.requestFactoryBuilder,
this.uriTemplateHandler, this.errorHandler, this.basicAuthentication, this.defaultHeaders,
this.customizers, this.requestCustomizers);
}
/**
* Set the {@link ClientHttpRequestInterceptor ClientHttpRequestInterceptors} that
* should be used with the {@link RestTemplate}. Setting this value will replace any
* previously defined interceptors.
* @param interceptors the interceptors to set
* @return a new builder instance
* @see #additionalInterceptors(ClientHttpRequestInterceptor...)
*/
public RestTemplateBuilder interceptors(ClientHttpRequestInterceptor... interceptors) {
Assert.notNull(interceptors, "'interceptors' must not be null");
return interceptors(Arrays.asList(interceptors));
}
/**
* Set the {@link ClientHttpRequestInterceptor ClientHttpRequestInterceptors} that
* should be used with the {@link RestTemplate}. Setting this value will replace any
* previously defined interceptors.
* @param interceptors the interceptors to set
* @return a new builder instance
* @see #additionalInterceptors(ClientHttpRequestInterceptor...)
*/
public RestTemplateBuilder interceptors(Collection<ClientHttpRequestInterceptor> interceptors) {
Assert.notNull(interceptors, "'interceptors' must not be null");
return new RestTemplateBuilder(this.clientSettings, this.detectRequestFactory, this.rootUri,
this.messageConverters, copiedSetOf(interceptors), this.requestFactoryBuilder, this.uriTemplateHandler,
this.errorHandler, this.basicAuthentication, this.defaultHeaders, this.customizers,
this.requestCustomizers);
}
/**
* Add additional {@link ClientHttpRequestInterceptor ClientHttpRequestInterceptors}
* that should be used with the {@link RestTemplate}.
* @param interceptors the interceptors to add
* @return a new builder instance
* @see #interceptors(ClientHttpRequestInterceptor...)
*/
public RestTemplateBuilder additionalInterceptors(ClientHttpRequestInterceptor... interceptors) {
Assert.notNull(interceptors, "'interceptors' must not be null");
return additionalInterceptors(Arrays.asList(interceptors));
}
/**
* Add additional {@link ClientHttpRequestInterceptor ClientHttpRequestInterceptors}
* that should be used with the {@link RestTemplate}.
* @param interceptors the interceptors to add
* @return a new builder instance
* @see #interceptors(ClientHttpRequestInterceptor...)
*/
public RestTemplateBuilder additionalInterceptors(Collection<? extends ClientHttpRequestInterceptor> interceptors) {
Assert.notNull(interceptors, "'interceptors' must not be null");
return new RestTemplateBuilder(this.clientSettings, this.detectRequestFactory, this.rootUri,
this.messageConverters, append(this.interceptors, interceptors), this.requestFactoryBuilder,
this.uriTemplateHandler, this.errorHandler, this.basicAuthentication, this.defaultHeaders,
this.customizers, this.requestCustomizers);
}
/**
* Set the {@link ClientHttpRequestFactory}
|
RestTemplateBuilder
|
java
|
FasterXML__jackson-databind
|
src/main/java/tools/jackson/databind/ext/javatime/deser/key/YearMonthKeyDeserializer.java
|
{
"start": 467,
"end": 1296
}
|
class ____ extends Jsr310KeyDeserializer {
public static final YearMonthKeyDeserializer INSTANCE = new YearMonthKeyDeserializer();
// parser copied from YearMonth
private static final DateTimeFormatter FORMATTER = new DateTimeFormatterBuilder()
.appendValue(YEAR, 4, 10, SignStyle.EXCEEDS_PAD)
.appendLiteral('-')
.appendValue(MONTH_OF_YEAR, 2)
.toFormatter();
private YearMonthKeyDeserializer() { } // singleton
@Override
protected YearMonth deserialize(String key, DeserializationContext ctxt)
throws JacksonException
{
try {
return YearMonth.parse(key, FORMATTER);
} catch (DateTimeException e) {
return _handleDateTimeException(ctxt, YearMonth.class, e, key);
}
}
}
|
YearMonthKeyDeserializer
|
java
|
google__guice
|
core/src/com/google/inject/internal/aop/UnsafeClassDefiner.java
|
{
"start": 3227,
"end": 3498
}
|
class ____ {
static final ClassDefiner CLASS_LOADER_DEFINE_CLASS =
tryPrivileged(
() -> accessDefineClass(ClassLoader.class), "Cannot access ClassLoader.defineClass");
}
// initialization-on-demand...
private static
|
ClassLoaderDefineClassHolder
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/component/bean/BeanWithAnnotationInheritedTest.java
|
{
"start": 1306,
"end": 4518
}
|
class ____ extends ContextTestSupport {
@Test
public void testWithAnnotationsFromOneInterface() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedBodiesReceived("x1y1");
template.requestBody("direct:in1", "whatever");
mock.assertIsSatisfied();
}
@Test
public void testWithAnnotationsFromTwoInterfaces() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedBodiesReceived("x2y2");
template.requestBody("direct:in2", "whatever");
mock.assertIsSatisfied();
}
@Test
public void testWithAnnotationsFromSuperclassAndInterface() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedBodiesReceived("x3y3");
template.requestBody("direct:in3", "whatever");
mock.assertIsSatisfied();
}
@Test
public void testWithAnnotationsFromImplementationClassAndInterface() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedBodiesReceived("x4y4");
template.requestBody("direct:in4", "whatever");
mock.assertIsSatisfied();
}
@Test
public void testWithAnnotationsFromOneInterfaceInheritedByProxy() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedBodiesReceived("x5y5");
template.requestBody("direct:in5", "whatever");
mock.assertIsSatisfied();
}
@Override
protected Registry createCamelRegistry() throws Exception {
Registry answer = super.createCamelRegistry();
answer.bind("b", new B());
answer.bind("p", Proxy.newProxyInstance(I1.class.getClassLoader(), new Class[] { I1.class }, new InvocationHandler() {
@Override
public Object invoke(Object proxy, Method method, Object[] args) {
if (method.getName().equals("m1")) {
return args[0].toString() + args[1].toString();
} else {
return null;
}
}
}));
return answer;
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("direct:in1").setHeader("foo", constant("x1")).setHeader("bar", constant("y1")).to("bean:b?method=m1")
.to("mock:result");
from("direct:in2").setHeader("foo", constant("x2")).setHeader("bar", constant("y2")).to("bean:b?method=m2")
.to("mock:result");
from("direct:in3").setHeader("foo", constant("x3")).setHeader("bar", constant("y3")).to("bean:b?method=m3")
.to("mock:result");
from("direct:in4").setHeader("foo", constant("x4")).setHeader("bar", constant("y4")).to("bean:b?method=m4")
.to("mock:result");
from("direct:in5").setHeader("foo", constant("x5")).setHeader("bar", constant("y5")).to("bean:p?method=m1")
.to("mock:result");
}
};
}
private
|
BeanWithAnnotationInheritedTest
|
java
|
grpc__grpc-java
|
util/src/main/java/io/grpc/util/MultiChildLoadBalancer.java
|
{
"start": 1904,
"end": 9327
}
|
class ____ extends LoadBalancer {
private static final Logger logger = Logger.getLogger(MultiChildLoadBalancer.class.getName());
private static final int OFFSET_SEED = new Random().nextInt();
// Modify by replacing the list to release memory when no longer used.
private List<ChildLbState> childLbStates = new ArrayList<>(0);
private final Helper helper;
// Set to true if currently in the process of handling resolved addresses.
protected boolean resolvingAddresses;
protected final LoadBalancerProvider pickFirstLbProvider = new PickFirstLoadBalancerProvider();
protected ConnectivityState currentConnectivityState;
protected MultiChildLoadBalancer(Helper helper) {
this.helper = checkNotNull(helper, "helper");
logger.log(Level.FINE, "Created");
}
/**
* Using the state of all children will calculate the current connectivity state,
* update fields, generate a picker and then call
* {@link Helper#updateBalancingState(ConnectivityState, SubchannelPicker)}.
*/
protected abstract void updateOverallBalancingState();
/**
* Override to utilize parsing of the policy configuration or alternative helper/lb generation.
* Override this if keys are not Endpoints or if child policies have configuration. Null map
* values preserve the child without delivering the child an update.
*/
protected Map<Object, ResolvedAddresses> createChildAddressesMap(
ResolvedAddresses resolvedAddresses) {
Map<Object, ResolvedAddresses> childAddresses =
Maps.newLinkedHashMapWithExpectedSize(resolvedAddresses.getAddresses().size());
for (EquivalentAddressGroup eag : resolvedAddresses.getAddresses()) {
ResolvedAddresses addresses = resolvedAddresses.toBuilder()
.setAddresses(Collections.singletonList(eag))
.setAttributes(Attributes.newBuilder().set(IS_PETIOLE_POLICY, true).build())
.setLoadBalancingPolicyConfig(null)
.build();
childAddresses.put(new Endpoint(eag), addresses);
}
return childAddresses;
}
/**
* Override to create an instance of a subclass.
*/
protected ChildLbState createChildLbState(Object key) {
return new ChildLbState(key, pickFirstLbProvider);
}
/**
* Override to completely replace the default logic or to do additional activities.
*/
@Override
public Status acceptResolvedAddresses(ResolvedAddresses resolvedAddresses) {
logger.log(Level.FINE, "Received resolution result: {0}", resolvedAddresses);
try {
resolvingAddresses = true;
// process resolvedAddresses to update children
Map<Object, ResolvedAddresses> newChildAddresses = createChildAddressesMap(resolvedAddresses);
// Handle error case
if (newChildAddresses.isEmpty()) {
Status unavailableStatus = Status.UNAVAILABLE.withDescription(
"NameResolver returned no usable address. " + resolvedAddresses);
handleNameResolutionError(unavailableStatus);
return unavailableStatus;
}
return updateChildrenWithResolvedAddresses(newChildAddresses);
} finally {
resolvingAddresses = false;
}
}
/**
* Handle the name resolution error.
*
* <p/>Override if you need special handling.
*/
@Override
public void handleNameResolutionError(Status error) {
if (currentConnectivityState != READY) {
helper.updateBalancingState(
TRANSIENT_FAILURE, new FixedResultPicker(PickResult.withError(error)));
}
}
@Override
public void shutdown() {
logger.log(Level.FINE, "Shutdown");
for (ChildLbState state : childLbStates) {
state.shutdown();
}
childLbStates.clear();
}
private Status updateChildrenWithResolvedAddresses(
Map<Object, ResolvedAddresses> newChildAddresses) {
// Create a map with the old values
Map<Object, ChildLbState> oldStatesMap =
Maps.newLinkedHashMapWithExpectedSize(childLbStates.size());
for (ChildLbState state : childLbStates) {
oldStatesMap.put(state.getKey(), state);
}
// Move ChildLbStates from the map to a new list (preserving the new map's order)
Status status = Status.OK;
List<ChildLbState> newChildLbStates = new ArrayList<>(newChildAddresses.size());
for (Map.Entry<Object, ResolvedAddresses> entry : newChildAddresses.entrySet()) {
ChildLbState childLbState = oldStatesMap.remove(entry.getKey());
if (childLbState == null) {
childLbState = createChildLbState(entry.getKey());
}
newChildLbStates.add(childLbState);
}
// Use a random start position for child updates to weakly "shuffle" connection creation order.
// The network will often add noise to the creation order, but this avoids giving earlier
// children a consistent head start.
for (ChildLbState childLbState : offsetIterable(newChildLbStates, OFFSET_SEED)) {
ResolvedAddresses addresses = newChildAddresses.get(childLbState.getKey());
if (addresses != null) {
// update child LB
Status newStatus = childLbState.lb.acceptResolvedAddresses(addresses);
if (!newStatus.isOk()) {
status = newStatus;
}
}
}
childLbStates = newChildLbStates;
// Update the picker and our connectivity state
updateOverallBalancingState();
// Remaining entries in map are orphaned
for (ChildLbState childLbState : oldStatesMap.values()) {
childLbState.shutdown();
}
return status;
}
@VisibleForTesting
static <T> Iterable<T> offsetIterable(Collection<T> c, int seed) {
int pos;
if (c.isEmpty()) {
pos = 0;
} else {
pos = UnsignedInts.remainder(seed, c.size());
}
return Iterables.concat(
Iterables.skip(c, pos),
Iterables.limit(c, pos));
}
@Nullable
protected static ConnectivityState aggregateState(
@Nullable ConnectivityState overallState, ConnectivityState childState) {
if (overallState == null) {
return childState;
}
if (overallState == READY || childState == READY) {
return READY;
}
if (overallState == CONNECTING || childState == CONNECTING) {
return CONNECTING;
}
if (overallState == IDLE || childState == IDLE) {
return IDLE;
}
return overallState;
}
protected final Helper getHelper() {
return helper;
}
@VisibleForTesting
public final Collection<ChildLbState> getChildLbStates() {
return childLbStates;
}
/**
* Filters out non-ready child load balancers (subchannels).
*/
protected final List<ChildLbState> getReadyChildren() {
List<ChildLbState> activeChildren = new ArrayList<>();
for (ChildLbState child : getChildLbStates()) {
if (child.getCurrentState() == READY) {
activeChildren.add(child);
}
}
return activeChildren;
}
/**
* This represents the state of load balancer children. Each endpoint (represented by an
* EquivalentAddressGroup or EDS string) will have a separate ChildLbState which in turn will
* have a single child LoadBalancer created from the provided factory.
*
* <p>A ChildLbStateHelper is the glue between ChildLbState and the helpers associated with the
* petiole policy above and the PickFirstLoadBalancer's helper below.
*
* <p>If you wish to store additional state information related to each subchannel, then extend
* this class.
*/
public
|
MultiChildLoadBalancer
|
java
|
google__guava
|
android/guava-tests/test/com/google/common/reflect/TypeTokenSubtypeTest.java
|
{
"start": 9483,
"end": 17875
}
|
class ____ extends SubtypeTester {
@TestSubtype(suppressGetSupertype = true, suppressGetSubtype = true)
public Iterable<?> noBounds(List<?> list) {
return isSubtype(list);
}
@TestSubtype(suppressGetSupertype = true, suppressGetSubtype = true)
public Iterable<List<?>> listOfListOfWildcard(List<List<?>> listOfList) {
return isSubtype(listOfList);
}
@TestSubtype(suppressGetSupertype = true, suppressGetSubtype = true)
public Iterable<? extends List<?>> listOfWildcardListOfWildcard(
List<? extends List<?>> listOfList) {
return isSubtype(listOfList);
}
@TestSubtype(suppressGetSupertype = true, suppressGetSubtype = true)
public Enum<? extends Enum<?>> implicitlyBoundedEnumIsSubtypeOfExplicitlyBoundedEnum(
Enum<?> e) {
return isSubtype(e);
}
@TestSubtype(suppressGetSupertype = true, suppressGetSubtype = true)
public Enum<?> implicitlyBoundedEnum(Enum<?> e) {
return isSubtype(e);
}
@TestSubtype(suppressGetSupertype = true, suppressGetSubtype = true)
public Enum<?> explicitlyBoundedEnumIsSubtypeOfImplicitlyBoundedEnum(
Enum<? extends Enum<?>> obj) {
return isSubtype(obj);
}
@TestSubtype(suppressGetSupertype = true, suppressGetSubtype = true)
public Iterable<Enum<?>> listOfEnums(List<Enum<?>> listOfEnums) {
return isSubtype(listOfEnums);
}
@TestSubtype(suppressGetSupertype = true, suppressGetSubtype = true)
public UseList<? extends List<Enum<? extends Enum<?>>>>
wildcardBoundUsesImplicitlyRecursiveBoundedWildcard(UseList<? extends List<Enum<?>>> arg) {
return isSubtype(arg);
}
@TestSubtype(suppressGetSupertype = true, suppressGetSubtype = true)
public UseList<? extends List<Enum<? extends Enum<?>>>>
wildcardBoundHasImplicitBoundAtsInvariantPosition(UseList<? extends List<Enum<?>>> arg) {
return isSubtype(arg);
}
@TestSubtype(suppressGetSupertype = true, suppressGetSubtype = true)
public Iterable<Enum<?>> listOfEnumsWithExplicitBoundIsSubtypeOfIterableOfEnumWithImplicitBound(
List<Enum<? extends Enum<?>>> listOfEnums) {
return isSubtype(listOfEnums);
}
@TestSubtype(suppressGetSupertype = true, suppressGetSubtype = true)
public Iterable<Enum<?>> nestedExplicitEnumBoundIsSubtypeOfImplicitEnumBound(
List<Enum<? extends Enum<? extends Enum<?>>>> listOfEnums) {
return isSubtype(listOfEnums);
}
@TestSubtype(suppressGetSupertype = true, suppressGetSubtype = true)
public Iterable<Enum<? extends Enum<? extends Enum<?>>>>
implicitEnumBoundIsSubtypeOfNestedExplicitEnumBound(List<Enum<?>> listOfEnums) {
return isSubtype(listOfEnums);
}
@TestSubtype(suppressGetSupertype = true, suppressGetSubtype = true)
public Iterable<Enum<? extends Enum<?>>>
listOfEnumsWithImplicitBoundIsSubtypeOfIterableOfEnumWithExplicitBound(
List<Enum<?>> listOfEnums) {
return isSubtype(listOfEnums);
}
@TestSubtype(suppressGetSupertype = true, suppressGetSubtype = true)
public List<Enum<? extends Enum<?>>> listOfSubEnumsIsNotSubtypeOfListOfEnums(
List<MyEnum> listOfEnums) {
return notSubtype(listOfEnums);
}
@TestSubtype(suppressGetSupertype = true, suppressGetSubtype = true)
public List<MyTypeBoundUsesImplicitBound<? extends Enum<?>>> typeVariableBoundOmitsItsOwnBound(
List<MyTypeBoundUsesImplicitBound<?>> arg) {
return isSubtype(arg);
}
@TestSubtype(suppressGetSupertype = true, suppressGetSubtype = true)
public List<MyTypeBoundUsesImplicitBound<? extends MyEnum>>
wildcardUpperBoundIsNotSubtypeOfTypeVariableBound(
List<MyTypeBoundUsesImplicitBound<?>> arg) {
return notSubtype(arg);
}
@TestSubtype(suppressGetSupertype = true, suppressGetSubtype = true)
public List<List<? extends Iterable<UseList<? extends List<?>>>>>
wildcardBoundUsesParameterizedTypeWithImplicitBound(
List<List<? extends Iterable<UseList<?>>>> arg) {
return isSubtype(arg);
}
@TestSubtype(suppressGetSupertype = true, suppressGetSubtype = true)
public List<List<? extends Iterable<Enum<? extends Enum<?>>>>>
wildcardBoundUsesRecursiveParameterizedTypeWithImplicitBound(
List<List<? extends Iterable<Enum<?>>>> arg) {
return isSubtype(arg);
}
@TestSubtype(suppressGetSupertype = true, suppressGetSubtype = true)
public List<List<? extends Iterable<MyTypeBoundUsesImplicitBound<? extends Enum<?>>>>>
wildcardBoundUsesParameterizedTypeDefinedWithImplicitBound(
List<List<? extends Iterable<MyTypeBoundUsesImplicitBound<?>>>> arg) {
return isSubtype(arg);
}
@TestSubtype(suppressGetSupertype = true, suppressGetSubtype = true)
public Iterable<UseIterable<? extends Iterable<?>>>
wildcardOfImplicitBoundedIsSubtypeOfWildcardOfExplicitlyBounded(
List<UseIterable<?>> withImplicitBounds) {
return isSubtype(withImplicitBounds);
}
@TestSubtype(suppressGetSupertype = true, suppressGetSubtype = true)
public Iterable<UseSerializableIterable<? extends Iterable<?>>>
wildcardOfImplicitBoundedIsSubtypeOfWildcardOfExplicitlyPartialBounded(
List<UseSerializableIterable<?>> withImplicitBounds) {
return isSubtype(withImplicitBounds);
}
@TestSubtype(suppressGetSupertype = true, suppressGetSubtype = true)
public Iterable<UseList<? extends Iterable<?>>> useListOfIterableWildcard(
List<UseList<?>> withImplicitBounds) {
return isSubtype(withImplicitBounds);
}
@TestSubtype(suppressGetSupertype = true, suppressGetSubtype = true)
public Iterable<UseIterable<?>> listOfExplicitBoundedIsSubtypeOfListOfImplicitlyBounded(
List<UseIterable<? extends Iterable<?>>> withExplicitBounds) {
return isSubtype(withExplicitBounds);
}
@TestSubtype(suppressGetSupertype = true, suppressGetSubtype = true)
public Iterable<UseIterable<? extends Iterable<?>>>
wildcardOfImplicitBoundedIsNotSubtypeOfNonWildcardOfExplicitlyBounded(
List<? extends UseIterable<?>> withImplicitBounds) {
return notSubtype(withImplicitBounds);
}
@TestSubtype(suppressGetSupertype = true, suppressGetSubtype = true)
public Iterable<UseIterable<? extends List<?>>>
wildcardOfImplicitBoundedIsNotSubtypeOfWildcardWithNarrowerBounds(
List<UseIterable<?>> withImplicitBounds) {
return notSubtype(withImplicitBounds);
}
@TestSubtype(suppressGetSupertype = true, suppressGetSubtype = true)
public <T> Iterable<? extends T> supertypeWithWildcardUpperBound(List<T> list) {
return isSubtype(list);
}
@TestSubtype(suppressGetSupertype = true, suppressGetSubtype = true)
public <T> Iterable<? extends T> supertypeWithWildcardUpperBound_notMatch(List<String> list) {
return notSubtype(list);
}
@TestSubtype(suppressGetSupertype = true, suppressGetSubtype = true)
public <T> Iterable<? super T> supertypeWithWildcardULowerBound(List<T> list) {
return isSubtype(list);
}
@TestSubtype(suppressGetSupertype = true, suppressGetSubtype = true)
public <T> Iterable<? extends T> supertypeWithWildcardULowerBound_notMatch(List<String> list) {
return notSubtype(list);
}
@TestSubtype(suppressGetSupertype = true, suppressGetSubtype = true)
public <T> Iterable<?> wildcardsMatchByUpperBound(List<? extends T> list) {
return isSubtype(list);
}
@TestSubtype(suppressGetSupertype = true, suppressGetSubtype = true)
public <T> Iterable<? extends T> wildCardsDoNotMatchByUpperBound(List<?> list) {
return notSubtype(list);
}
@TestSubtype(suppressGetSupertype = true, suppressGetSubtype = true)
public <T> Iterable<? super String> wildcardsMatchByLowerBound(
List<? super CharSequence> list) {
return isSubtype(list);
}
@TestSubtype(suppressGetSupertype = true, suppressGetSubtype = true)
public <T> Iterable<? super CharSequence> wildCardsDoNotMatchByLowerBound(
List<? super String> list) {
return notSubtype(list);
}
// Can't test getSupertype() or getSubtype() because JDK reflection doesn't consider
// Foo<?> and Foo<? extends Bar> equal for
|
WildcardSubtypingTests
|
java
|
spring-projects__spring-framework
|
spring-messaging/src/test/java/org/springframework/messaging/converter/MessageConverterTests.java
|
{
"start": 1452,
"end": 5654
}
|
class ____ {
private TestMessageConverter converter = new TestMessageConverter();
@Test
void supportsTargetClass() {
Message<String> message = MessageBuilder.withPayload("ABC").build();
assertThat(this.converter.fromMessage(message, String.class)).isEqualTo("success-from");
assertThat(this.converter.fromMessage(message, Integer.class)).isNull();
}
@Test
void supportsMimeType() {
Message<String> message = MessageBuilder.withPayload(
"ABC").setHeader(MessageHeaders.CONTENT_TYPE, MimeTypeUtils.TEXT_PLAIN).build();
assertThat(this.converter.fromMessage(message, String.class)).isEqualTo("success-from");
}
@Test
void supportsMimeTypeNotSupported() {
Message<String> message = MessageBuilder.withPayload(
"ABC").setHeader(MessageHeaders.CONTENT_TYPE, MimeTypeUtils.APPLICATION_JSON).build();
assertThat(this.converter.fromMessage(message, String.class)).isNull();
}
@Test
void supportsMimeTypeNotSpecified() {
Message<String> message = MessageBuilder.withPayload("ABC").build();
assertThat(this.converter.fromMessage(message, String.class)).isEqualTo("success-from");
}
@Test
void supportsMimeTypeNoneConfigured() {
Message<String> message = MessageBuilder.withPayload(
"ABC").setHeader(MessageHeaders.CONTENT_TYPE, MimeTypeUtils.APPLICATION_JSON).build();
this.converter = new TestMessageConverter(new MimeType[0]);
assertThat(this.converter.fromMessage(message, String.class)).isEqualTo("success-from");
}
@Test
void canConvertFromStrictContentTypeMatch() {
this.converter = new TestMessageConverter(MimeTypeUtils.TEXT_PLAIN);
this.converter.setStrictContentTypeMatch(true);
Message<String> message = MessageBuilder.withPayload("ABC").build();
assertThat(this.converter.canConvertFrom(message, String.class)).isFalse();
message = MessageBuilder.withPayload("ABC")
.setHeader(MessageHeaders.CONTENT_TYPE, MimeTypeUtils.TEXT_PLAIN).build();
assertThat(this.converter.canConvertFrom(message, String.class)).isTrue();
}
@Test
void setStrictContentTypeMatchWithNoSupportedMimeTypes() {
this.converter = new TestMessageConverter(new MimeType[0]);
assertThatIllegalArgumentException().isThrownBy(() -> this.converter.setStrictContentTypeMatch(true));
}
@Test
void toMessageWithHeaders() {
Map<String, Object> map = new HashMap<>();
map.put("foo", "bar");
MessageHeaders headers = new MessageHeaders(map);
Message<?> message = this.converter.toMessage("ABC", headers);
assertThat(message.getHeaders().getId()).isNotNull();
assertThat(message.getHeaders().getTimestamp()).isNotNull();
assertThat(message.getHeaders().get(MessageHeaders.CONTENT_TYPE)).isEqualTo(MimeTypeUtils.TEXT_PLAIN);
assertThat(message.getHeaders().get("foo")).isEqualTo("bar");
}
@Test
void toMessageWithMutableMessageHeaders() {
SimpMessageHeaderAccessor accessor = SimpMessageHeaderAccessor.create(SimpMessageType.MESSAGE);
accessor.setHeader("foo", "bar");
accessor.setNativeHeader("fooNative", "barNative");
accessor.setLeaveMutable(true);
MessageHeaders headers = accessor.getMessageHeaders();
Message<?> message = this.converter.toMessage("ABC", headers);
assertThat(message.getHeaders()).isSameAs(headers);
assertThat(message.getHeaders().getId()).isNull();
assertThat(message.getHeaders().getTimestamp()).isNull();
assertThat(message.getHeaders().get(MessageHeaders.CONTENT_TYPE)).isEqualTo(MimeTypeUtils.TEXT_PLAIN);
}
@Test
void toMessageContentTypeHeader() {
Message<?> message = this.converter.toMessage("ABC", null);
assertThat(message.getHeaders().get(MessageHeaders.CONTENT_TYPE)).isEqualTo(MimeTypeUtils.TEXT_PLAIN);
}
@Test // gh-29768
public void toMessageDefaultContentType() {
DefaultContentTypeResolver resolver = new DefaultContentTypeResolver();
resolver.setDefaultMimeType(MimeTypeUtils.TEXT_PLAIN);
TestMessageConverter converter = new TestMessageConverter();
converter.setContentTypeResolver(resolver);
converter.setStrictContentTypeMatch(true);
Message<?> message = converter.toMessage("ABC", null);
assertThat(message.getHeaders().get(MessageHeaders.CONTENT_TYPE)).isEqualTo(MimeTypeUtils.TEXT_PLAIN);
}
private static
|
MessageConverterTests
|
java
|
elastic__elasticsearch
|
modules/lang-expression/src/main/java/org/elasticsearch/script/expression/CountMethodValueSource.java
|
{
"start": 953,
"end": 2218
}
|
class ____ extends FieldDataBasedDoubleValuesSource {
CountMethodValueSource(IndexFieldData<?> fieldData) {
super(fieldData);
}
@Override
public DoubleValues getValues(LeafReaderContext ctx, DoubleValues scores) {
LeafNumericFieldData leafData = (LeafNumericFieldData) fieldData.load(ctx);
final SortedNumericDoubleValues values = leafData.getDoubleValues();
return new DoubleValues() {
@Override
public double doubleValue() {
return values.docValueCount();
}
@Override
public boolean advanceExact(int doc) throws IOException {
return values.advanceExact(doc);
}
};
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
CountMethodValueSource that = (CountMethodValueSource) o;
return fieldData.equals(that.fieldData);
}
@Override
public String toString() {
return "count: field(" + fieldData.getFieldName() + ")";
}
@Override
public int hashCode() {
return 31 * getClass().hashCode() + fieldData.hashCode();
}
}
|
CountMethodValueSource
|
java
|
apache__camel
|
core/camel-core-model/src/main/java/org/apache/camel/model/language/JsonPathExpression.java
|
{
"start": 9063,
"end": 9226
}
|
enum ____ {
DEFAULT_PATH_LEAF_TO_NULL,
ALWAYS_RETURN_LIST,
AS_PATH_LIST,
SUPPRESS_EXCEPTIONS,
REQUIRE_PROPERTIES
}
}
|
Option
|
java
|
apache__spark
|
core/src/test/java/org/apache/spark/memory/TestMemoryConsumer.java
|
{
"start": 981,
"end": 1822
}
|
class ____ extends MemoryConsumer {
public TestMemoryConsumer(TaskMemoryManager memoryManager, MemoryMode mode) {
super(memoryManager, 1024L, mode);
}
public TestMemoryConsumer(TaskMemoryManager memoryManager) {
this(memoryManager, MemoryMode.ON_HEAP);
}
@Override
public long spill(long size, MemoryConsumer trigger) throws IOException {
long used = getUsed();
free(used);
return used;
}
public void use(long size) {
long got = taskMemoryManager.acquireExecutionMemory(size, this);
used.getAndAdd(got);
}
public void free(long size) {
used.getAndAdd(-size);
taskMemoryManager.releaseExecutionMemory(size, this);
}
@VisibleForTesting
public void freePage(MemoryBlock page) {
used.getAndAdd(-page.size());
taskMemoryManager.freePage(page, this);
}
}
|
TestMemoryConsumer
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/datatypes/NodeName.java
|
{
"start": 2355,
"end": 5234
}
|
class ____ a hardcoded name
if (!getName().equals(name)) {
throw new RuntimeException("State name mismatch! Expected '"
+ getName() + "' but found '" + name + "'.");
}
}
}
public NodeName(String nodeName) {
this.nodeName = nodeName;
ParsedHost pHost = ParsedHost.parse(nodeName);
if (pHost == null) {
this.rackName = null;
this.hostName = nodeName;
} else {
//TODO check for null and improve .. possibly call NodeName(r,h)
this.rackName = pHost.getRackName();
this.hostName = pHost.getNodeName();
}
}
public NodeName(String rName, String hName) {
rName = (rName == null || rName.length() == 0) ? null : rName;
hName = (hName == null || hName.length() == 0) ? null : hName;
if (hName == null) {
nodeName = rName;
rackName = rName;
} else if (rName == null) {
nodeName = hName;
ParsedHost pHost = ParsedHost.parse(nodeName);
if (pHost == null) {
this.rackName = null;
this.hostName = hName;
} else {
this.rackName = pHost.getRackName();
this.hostName = pHost.getNodeName();
}
} else {
rackName = rName;
this.hostName = hName;
this.nodeName = "/" + rName + "/" + hName;
}
}
public String getHostName() {
return hostName;
}
public String getRackName() {
return rackName;
}
@Override
public String getValue() {
return nodeName;
}
@Override
public String getAnonymizedValue(StatePool statePool, Configuration conf) {
if (this.getValue().equals(ROOT.getValue())) {
return getValue();
}
if (anonymizedNodeName == null) {
anonymize(statePool);
}
return anonymizedNodeName;
}
private void anonymize(StatePool pool) {
StringBuilder buf = new StringBuilder();
NodeNameState state = (NodeNameState) pool.getState(getClass());
if (state == null) {
state = new NodeNameState();
pool.addState(getClass(), state);
}
if (rackName != null && hostName != null) {
buf.append('/');
buf.append(anonymize(rackName, state.getRackNameState()));
buf.append('/');
buf.append(anonymize(hostName, state.getHostNameState()));
} else {
if (state.getRackNameState().contains(nodeName) || rackName != null) {
buf.append(anonymize(nodeName, state.getRackNameState()));
} else {
buf.append(anonymize(nodeName, state.getHostNameState()));
}
}
anonymizedNodeName = buf.toString();
}
//TODO There is no caching for saving memory.
private static String anonymize(String data, WordList wordList) {
if (data == null) {
return null;
}
if (!wordList.contains(data)) {
wordList.add(data);
}
return wordList.getName() + wordList.indexOf(data);
}
}
|
has
|
java
|
apache__flink
|
flink-filesystems/flink-hadoop-fs/src/main/java/org/apache/flink/runtime/fs/hdfs/HadoopDataInputStream.java
|
{
"start": 1305,
"end": 7001
}
|
class ____ extends FSDataInputStream implements ByteBufferReadable {
/**
* Minimum amount of bytes to skip forward before we issue a seek instead of discarding read.
*
* <p>The current value is just a magic number. In the long run, this value could become
* configurable, but for now it is a conservative, relatively small value that should bring safe
* improvements for small skips (e.g. in reading meta data), that would hurt the most with
* frequent seeks.
*
* <p>The optimal value depends on the DFS implementation and configuration plus the underlying
* filesystem. For now, this number is chosen "big enough" to provide improvements for smaller
* seeks, and "small enough" to avoid disadvantages over real seeks. While the minimum should be
* the page size, a true optimum per system would be the amounts of bytes the can be consumed
* sequentially within the seektime. Unfortunately, seektime is not constant and devices, OS,
* and DFS potentially also use read buffers and read-ahead.
*/
public static final int MIN_SKIP_BYTES = 1024 * 1024;
/** The internal stream. */
private final org.apache.hadoop.fs.FSDataInputStream fsDataInputStream;
/**
* Creates a new data input stream from the given Hadoop input stream.
*
* @param fsDataInputStream The Hadoop input stream
*/
public HadoopDataInputStream(org.apache.hadoop.fs.FSDataInputStream fsDataInputStream) {
this.fsDataInputStream = checkNotNull(fsDataInputStream);
}
@Override
public void seek(long seekPos) throws IOException {
// We do some optimizations to avoid that some implementations of distributed FS perform
// expensive seeks when they are actually not needed.
long delta = seekPos - getPos();
if (delta > 0L && delta <= MIN_SKIP_BYTES) {
// Instead of a small forward seek, we skip over the gap
skipFully(delta);
} else if (delta != 0L) {
// For larger gaps and backward seeks, we do a real seek
forceSeek(seekPos);
} // Do nothing if delta is zero.
}
@Override
public long getPos() throws IOException {
return fsDataInputStream.getPos();
}
@Override
public int read() throws IOException {
return fsDataInputStream.read();
}
@Override
public void close() throws IOException {
fsDataInputStream.close();
}
@Override
public int read(@Nonnull byte[] buffer, int offset, int length) throws IOException {
return fsDataInputStream.read(buffer, offset, length);
}
@Override
public int available() throws IOException {
return fsDataInputStream.available();
}
@Override
public long skip(long n) throws IOException {
return fsDataInputStream.skip(n);
}
/**
* Gets the wrapped Hadoop input stream.
*
* @return The wrapped Hadoop input stream.
*/
public org.apache.hadoop.fs.FSDataInputStream getHadoopInputStream() {
return fsDataInputStream;
}
/**
* Positions the stream to the given location. In contrast to {@link #seek(long)}, this method
* will always issue a "seek" command to the dfs and may not replace it by {@link #skip(long)}
* for small seeks.
*
* <p>Notice that the underlying DFS implementation can still decide to do skip instead of seek.
*
* @param seekPos the position to seek to.
* @throws IOException
*/
public void forceSeek(long seekPos) throws IOException {
fsDataInputStream.seek(seekPos);
}
/**
* Skips over a given amount of bytes in the stream.
*
* @param bytes the number of bytes to skip.
* @throws IOException
*/
public void skipFully(long bytes) throws IOException {
while (bytes > 0) {
bytes -= fsDataInputStream.skip(bytes);
}
}
@Override
public int read(ByteBuffer byteBuffer) throws IOException {
// TODO: Use org.apache.hadoop.fs.FSDataInputStream#read(ByteBuffer) to improve the
// performance after updating hadoop version to 3.3.0 and above.
if (byteBuffer.hasArray()) {
int len = byteBuffer.remaining();
fsDataInputStream.readFully(byteBuffer.array(), byteBuffer.arrayOffset(), len);
return len;
} else {
// Fallback to read byte then put
int c = read();
if (c == -1) {
return -1;
}
byteBuffer.put((byte) c);
int n = 1, len = byteBuffer.remaining() + 1;
for (; n < len; n++) {
c = read();
if (c == -1) {
break;
}
byteBuffer.put((byte) c);
}
return n;
}
}
@Override
public int read(long position, ByteBuffer byteBuffer) throws IOException {
// TODO: Use org.apache.hadoop.fs.FSDataInputStream#read(long, ByteBuffer) to improve the
// performance after updating hadoop version to 3.3.0 and above.
if (byteBuffer.hasArray()) {
int len = byteBuffer.remaining();
fsDataInputStream.readFully(
position, byteBuffer.array(), byteBuffer.arrayOffset(), len);
return len;
} else {
// Fallback to positionable read bytes then put
byte[] tmp = new byte[byteBuffer.remaining()];
fsDataInputStream.readFully(position, tmp, 0, tmp.length);
byteBuffer.put(tmp);
return tmp.length;
}
}
}
|
HadoopDataInputStream
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/refaster/testdata/input/AnyOfTemplateExample.java
|
{
"start": 770,
"end": 986
}
|
class ____ {
public void foo(double d) {
if (Math.signum(d) == 0.0) {
System.out.println("zero");
}
if (0.0 == Math.signum(d)) {
System.out.println("also zero");
}
}
}
|
AnyOfTemplateExample
|
java
|
apache__spark
|
streaming/src/test/java/test/org/apache/spark/streaming/Java8APISuite.java
|
{
"start": 1987,
"end": 34188
}
|
class ____ extends LocalJavaStreamingContext implements Serializable {
@Test
public void testMap() {
List<List<String>> inputData = Arrays.asList(
Arrays.asList("hello", "world"),
Arrays.asList("goodnight", "moon"));
List<List<Integer>> expected = Arrays.asList(
Arrays.asList(5, 5),
Arrays.asList(9, 4));
JavaDStream<String> stream = JavaTestUtils.attachTestInputStream(ssc, inputData, 1);
JavaDStream<Integer> letterCount = stream.map(String::length);
JavaTestUtils.attachTestOutputStream(letterCount);
List<List<Integer>> result = JavaTestUtils.runStreams(ssc, 2, 2);
assertOrderInvariantEquals(expected, result);
}
@Test
public void testFilter() {
List<List<String>> inputData = Arrays.asList(
Arrays.asList("giants", "dodgers"),
Arrays.asList("yankees", "red sox"));
List<List<String>> expected = Arrays.asList(
Arrays.asList("giants"),
Arrays.asList("yankees"));
JavaDStream<String> stream = JavaTestUtils.attachTestInputStream(ssc, inputData, 1);
JavaDStream<String> filtered = stream.filter(s -> s.contains("a"));
JavaTestUtils.attachTestOutputStream(filtered);
List<List<String>> result = JavaTestUtils.runStreams(ssc, 2, 2);
assertOrderInvariantEquals(expected, result);
}
@Test
public void testMapPartitions() {
List<List<String>> inputData = Arrays.asList(
Arrays.asList("giants", "dodgers"),
Arrays.asList("yankees", "red sox"));
List<List<String>> expected = Arrays.asList(
Arrays.asList("GIANTSDODGERS"),
Arrays.asList("YANKEESRED SOX"));
JavaDStream<String> stream = JavaTestUtils.attachTestInputStream(ssc, inputData, 1);
JavaDStream<String> mapped = stream.mapPartitions(in -> {
String out = "";
while (in.hasNext()) {
out = out + in.next().toUpperCase(Locale.ROOT);
}
return Arrays.asList(out).iterator();
});
JavaTestUtils.attachTestOutputStream(mapped);
List<List<String>> result = JavaTestUtils.runStreams(ssc, 2, 2);
Assertions.assertEquals(expected, result);
}
@Test
public void testReduce() {
List<List<Integer>> inputData = Arrays.asList(
Arrays.asList(1, 2, 3),
Arrays.asList(4, 5, 6),
Arrays.asList(7, 8, 9));
List<List<Integer>> expected = Arrays.asList(
Arrays.asList(6),
Arrays.asList(15),
Arrays.asList(24));
JavaDStream<Integer> stream = JavaTestUtils.attachTestInputStream(ssc, inputData, 1);
JavaDStream<Integer> reduced = stream.reduce((x, y) -> x + y);
JavaTestUtils.attachTestOutputStream(reduced);
List<List<Integer>> result = JavaTestUtils.runStreams(ssc, 3, 3);
Assertions.assertEquals(expected, result);
}
@Test
public void testReduceByWindow() {
List<List<Integer>> inputData = Arrays.asList(
Arrays.asList(1, 2, 3),
Arrays.asList(4, 5, 6),
Arrays.asList(7, 8, 9));
List<List<Integer>> expected = Arrays.asList(
Arrays.asList(6),
Arrays.asList(21),
Arrays.asList(39),
Arrays.asList(24));
JavaDStream<Integer> stream = JavaTestUtils.attachTestInputStream(ssc, inputData, 1);
JavaDStream<Integer> reducedWindowed = stream.reduceByWindow(
(x, y) -> x + y, (x, y) -> x - y, new Duration(2000), new Duration(1000));
JavaTestUtils.attachTestOutputStream(reducedWindowed);
List<List<Integer>> result = JavaTestUtils.runStreams(ssc, 4, 4);
Assertions.assertEquals(expected, result);
}
@Test
public void testTransform() {
List<List<Integer>> inputData = Arrays.asList(
Arrays.asList(1, 2, 3),
Arrays.asList(4, 5, 6),
Arrays.asList(7, 8, 9));
List<List<Integer>> expected = Arrays.asList(
Arrays.asList(3, 4, 5),
Arrays.asList(6, 7, 8),
Arrays.asList(9, 10, 11));
JavaDStream<Integer> stream = JavaTestUtils.attachTestInputStream(ssc, inputData, 1);
JavaDStream<Integer> transformed = stream.transform(in -> in.map(i -> i + 2));
JavaTestUtils.attachTestOutputStream(transformed);
List<List<Integer>> result = JavaTestUtils.runStreams(ssc, 3, 3);
assertOrderInvariantEquals(expected, result);
}
@Test
public void testVariousTransform() {
// tests whether all variations of transform can be called from Java
List<List<Integer>> inputData = Arrays.asList(Arrays.asList(1));
JavaDStream<Integer> stream = JavaTestUtils.attachTestInputStream(ssc, inputData, 1);
List<List<Tuple2<String, Integer>>> pairInputData =
Arrays.asList(Arrays.asList(new Tuple2<>("x", 1)));
JavaPairDStream<String, Integer> pairStream = JavaPairDStream.fromJavaDStream(
JavaTestUtils.attachTestInputStream(ssc, pairInputData, 1));
JavaDStream<Integer> transformed1 = stream.transform(in -> null);
JavaDStream<Integer> transformed2 = stream.transform((x, time) -> null);
JavaPairDStream<String, Integer> transformed3 = stream.transformToPair(x -> null);
JavaPairDStream<String, Integer> transformed4 = stream.transformToPair((x, time) -> null);
JavaDStream<Integer> pairTransformed1 = pairStream.transform(x -> null);
JavaDStream<Integer> pairTransformed2 = pairStream.transform((x, time) -> null);
JavaPairDStream<String, String> pairTransformed3 = pairStream.transformToPair(x -> null);
JavaPairDStream<String, String> pairTransformed4 =
pairStream.transformToPair((x, time) -> null);
}
@Test
public void testTransformWith() {
List<List<Tuple2<String, String>>> stringStringKVStream1 = Arrays.asList(
Arrays.asList(
new Tuple2<>("california", "dodgers"),
new Tuple2<>("new york", "yankees")),
Arrays.asList(
new Tuple2<>("california", "sharks"),
new Tuple2<>("new york", "rangers")));
List<List<Tuple2<String, String>>> stringStringKVStream2 = Arrays.asList(
Arrays.asList(
new Tuple2<>("california", "giants"),
new Tuple2<>("new york", "mets")),
Arrays.asList(
new Tuple2<>("california", "ducks"),
new Tuple2<>("new york", "islanders")));
List<Set<Tuple2<String, Tuple2<String, String>>>> expected = Arrays.asList(
Set.of(
new Tuple2<>("california",
new Tuple2<>("dodgers", "giants")),
new Tuple2<>("new york",
new Tuple2<>("yankees", "mets"))),
Set.of(
new Tuple2<>("california",
new Tuple2<>("sharks", "ducks")),
new Tuple2<>("new york",
new Tuple2<>("rangers", "islanders"))));
JavaDStream<Tuple2<String, String>> stream1 = JavaTestUtils.attachTestInputStream(
ssc, stringStringKVStream1, 1);
JavaPairDStream<String, String> pairStream1 = JavaPairDStream.fromJavaDStream(stream1);
JavaDStream<Tuple2<String, String>> stream2 = JavaTestUtils.attachTestInputStream(
ssc, stringStringKVStream2, 1);
JavaPairDStream<String, String> pairStream2 = JavaPairDStream.fromJavaDStream(stream2);
JavaPairDStream<String, Tuple2<String, String>> joined =
pairStream1.transformWithToPair(pairStream2,(x, y, z) -> x.join(y));
JavaTestUtils.attachTestOutputStream(joined);
List<List<Tuple2<String, Tuple2<String, String>>>> result = JavaTestUtils.runStreams(ssc, 2, 2);
List<Set<Tuple2<String, Tuple2<String, String>>>> unorderedResult = new ArrayList<>();
for (List<Tuple2<String, Tuple2<String, String>>> res : result) {
unorderedResult.add(new HashSet<>(res));
}
Assertions.assertEquals(expected, unorderedResult);
}
@Test
public void testVariousTransformWith() {
// tests whether all variations of transformWith can be called from Java
List<List<Integer>> inputData1 = Arrays.asList(Arrays.asList(1));
List<List<String>> inputData2 = Arrays.asList(Arrays.asList("x"));
JavaDStream<Integer> stream1 = JavaTestUtils.attachTestInputStream(ssc, inputData1, 1);
JavaDStream<String> stream2 = JavaTestUtils.attachTestInputStream(ssc, inputData2, 1);
List<List<Tuple2<String, Integer>>> pairInputData1 =
Arrays.asList(Arrays.asList(new Tuple2<>("x", 1)));
List<List<Tuple2<Double, Character>>> pairInputData2 =
Arrays.asList(Arrays.asList(new Tuple2<>(1.0, 'x')));
JavaPairDStream<String, Integer> pairStream1 = JavaPairDStream.fromJavaDStream(
JavaTestUtils.attachTestInputStream(ssc, pairInputData1, 1));
JavaPairDStream<Double, Character> pairStream2 = JavaPairDStream.fromJavaDStream(
JavaTestUtils.attachTestInputStream(ssc, pairInputData2, 1));
JavaDStream<Double> transformed1 = stream1.transformWith(stream2, (x, y, z) -> null);
JavaDStream<Double> transformed2 = stream1.transformWith(pairStream1,(x, y, z) -> null);
JavaPairDStream<Double, Double> transformed3 =
stream1.transformWithToPair(stream2,(x, y, z) -> null);
JavaPairDStream<Double, Double> transformed4 =
stream1.transformWithToPair(pairStream1,(x, y, z) -> null);
JavaDStream<Double> pairTransformed1 = pairStream1.transformWith(stream2,(x, y, z) -> null);
JavaDStream<Double> pairTransformed2_ =
pairStream1.transformWith(pairStream1,(x, y, z) -> null);
JavaPairDStream<Double, Double> pairTransformed3 =
pairStream1.transformWithToPair(stream2,(x, y, z) -> null);
JavaPairDStream<Double, Double> pairTransformed4 =
pairStream1.transformWithToPair(pairStream2,(x, y, z) -> null);
}
@Test
public void testStreamingContextTransform() {
List<List<Integer>> stream1input = Arrays.asList(
Arrays.asList(1),
Arrays.asList(2)
);
List<List<Integer>> stream2input = Arrays.asList(
Arrays.asList(3),
Arrays.asList(4)
);
List<List<Tuple2<Integer, String>>> pairStream1input = Arrays.asList(
Arrays.asList(new Tuple2<>(1, "x")),
Arrays.asList(new Tuple2<>(2, "y"))
);
List<List<Tuple2<Integer, Tuple2<Integer, String>>>> expected = Arrays.asList(
Arrays.asList(new Tuple2<>(1, new Tuple2<>(1, "x"))),
Arrays.asList(new Tuple2<>(2, new Tuple2<>(2, "y")))
);
JavaDStream<Integer> stream1 = JavaTestUtils.attachTestInputStream(ssc, stream1input, 1);
JavaDStream<Integer> stream2 = JavaTestUtils.attachTestInputStream(ssc, stream2input, 1);
JavaPairDStream<Integer, String> pairStream1 = JavaPairDStream.fromJavaDStream(
JavaTestUtils.attachTestInputStream(ssc, pairStream1input, 1));
List<JavaDStream<?>> listOfDStreams1 = Arrays.asList(stream1, stream2);
// This is just to test whether this transform to JavaStream compiles
JavaDStream<Long> transformed1 = ssc.transform(
listOfDStreams1, (List<JavaRDD<?>> listOfRDDs, Time time) -> {
Assertions.assertEquals(2, listOfRDDs.size());
return null;
});
List<JavaDStream<?>> listOfDStreams2 =
Arrays.asList(stream1, stream2, pairStream1.toJavaDStream());
JavaPairDStream<Integer, Tuple2<Integer, String>> transformed2 = ssc.transformToPair(
listOfDStreams2, (List<JavaRDD<?>> listOfRDDs, Time time) -> {
Assertions.assertEquals(3, listOfRDDs.size());
JavaRDD<Integer> rdd1 = (JavaRDD<Integer>) listOfRDDs.get(0);
JavaRDD<Integer> rdd2 = (JavaRDD<Integer>) listOfRDDs.get(1);
JavaRDD<Tuple2<Integer, String>> rdd3 = (JavaRDD<Tuple2<Integer, String>>) listOfRDDs.get(2);
JavaPairRDD<Integer, String> prdd3 = JavaPairRDD.fromJavaRDD(rdd3);
PairFunction<Integer, Integer, Integer> mapToTuple =
(Integer i) -> new Tuple2<>(i, i);
return rdd1.union(rdd2).mapToPair(mapToTuple).join(prdd3);
});
JavaTestUtils.attachTestOutputStream(transformed2);
List<List<Tuple2<Integer, Tuple2<Integer, String>>>> result =
JavaTestUtils.runStreams(ssc, 2, 2);
Assertions.assertEquals(expected, result);
}
@Test
public void testFlatMap() {
List<List<String>> inputData = Arrays.asList(
Arrays.asList("go", "giants"),
Arrays.asList("boo", "dodgers"),
Arrays.asList("athletics"));
List<List<String>> expected = Arrays.asList(
Arrays.asList("g", "o", "g", "i", "a", "n", "t", "s"),
Arrays.asList("b", "o", "o", "d", "o", "d", "g", "e", "r", "s"),
Arrays.asList("a", "t", "h", "l", "e", "t", "i", "c", "s"));
JavaDStream<String> stream = JavaTestUtils.attachTestInputStream(ssc, inputData, 1);
JavaDStream<String> flatMapped = stream.flatMap(
s -> Arrays.asList(s.split("(?!^)")).iterator());
JavaTestUtils.attachTestOutputStream(flatMapped);
List<List<String>> result = JavaTestUtils.runStreams(ssc, 3, 3);
assertOrderInvariantEquals(expected, result);
}
@Test
public void testPairFlatMap() {
List<List<String>> inputData = Arrays.asList(
Arrays.asList("giants"),
Arrays.asList("dodgers"),
Arrays.asList("athletics"));
List<List<Tuple2<Integer, String>>> expected = Arrays.asList(
Arrays.asList(
new Tuple2<>(6, "g"),
new Tuple2<>(6, "i"),
new Tuple2<>(6, "a"),
new Tuple2<>(6, "n"),
new Tuple2<>(6, "t"),
new Tuple2<>(6, "s")),
Arrays.asList(
new Tuple2<>(7, "d"),
new Tuple2<>(7, "o"),
new Tuple2<>(7, "d"),
new Tuple2<>(7, "g"),
new Tuple2<>(7, "e"),
new Tuple2<>(7, "r"),
new Tuple2<>(7, "s")),
Arrays.asList(
new Tuple2<>(9, "a"),
new Tuple2<>(9, "t"),
new Tuple2<>(9, "h"),
new Tuple2<>(9, "l"),
new Tuple2<>(9, "e"),
new Tuple2<>(9, "t"),
new Tuple2<>(9, "i"),
new Tuple2<>(9, "c"),
new Tuple2<>(9, "s")));
JavaDStream<String> stream = JavaTestUtils.attachTestInputStream(ssc, inputData, 1);
JavaPairDStream<Integer, String> flatMapped = stream.flatMapToPair(s -> {
List<Tuple2<Integer, String>> out = new ArrayList<>();
for (String letter : s.split("(?!^)")) {
out.add(new Tuple2<>(s.length(), letter));
}
return out.iterator();
});
JavaTestUtils.attachTestOutputStream(flatMapped);
List<List<Tuple2<Integer, String>>> result = JavaTestUtils.runStreams(ssc, 3, 3);
Assertions.assertEquals(expected, result);
}
/*
* Performs an order-invariant comparison of lists representing two RDD streams. This allows
* us to account for ordering variation within individual RDD's which occurs during windowing.
*/
public static <T extends Comparable<T>> void assertOrderInvariantEquals(
List<List<T>> expected, List<List<T>> actual) {
expected.forEach(Collections::sort);
List<List<T>> sortedActual = new ArrayList<>();
actual.forEach(list -> {
List<T> sortedList = new ArrayList<>(list);
Collections.sort(sortedList);
sortedActual.add(sortedList);
});
Assertions.assertEquals(expected, sortedActual);
}
@Test
public void testPairFilter() {
List<List<String>> inputData = Arrays.asList(
Arrays.asList("giants", "dodgers"),
Arrays.asList("yankees", "red sox"));
List<List<Tuple2<String, Integer>>> expected = Arrays.asList(
Arrays.asList(new Tuple2<>("giants", 6)),
Arrays.asList(new Tuple2<>("yankees", 7)));
JavaDStream<String> stream = JavaTestUtils.attachTestInputStream(ssc, inputData, 1);
JavaPairDStream<String, Integer> pairStream =
stream.mapToPair(x -> new Tuple2<>(x, x.length()));
JavaPairDStream<String, Integer> filtered = pairStream.filter(x -> x._1().contains("a"));
JavaTestUtils.attachTestOutputStream(filtered);
List<List<Tuple2<String, Integer>>> result = JavaTestUtils.runStreams(ssc, 2, 2);
Assertions.assertEquals(expected, result);
}
List<List<Tuple2<String, String>>> stringStringKVStream = Arrays.asList(
Arrays.asList(new Tuple2<>("california", "dodgers"),
new Tuple2<>("california", "giants"),
new Tuple2<>("new york", "yankees"),
new Tuple2<>("new york", "mets")),
Arrays.asList(new Tuple2<>("california", "sharks"),
new Tuple2<>("california", "ducks"),
new Tuple2<>("new york", "rangers"),
new Tuple2<>("new york", "islanders")));
List<List<Tuple2<String, Integer>>> stringIntKVStream = Arrays.asList(
Arrays.asList(
new Tuple2<>("california", 1),
new Tuple2<>("california", 3),
new Tuple2<>("new york", 4),
new Tuple2<>("new york", 1)),
Arrays.asList(
new Tuple2<>("california", 5),
new Tuple2<>("california", 5),
new Tuple2<>("new york", 3),
new Tuple2<>("new york", 1)));
@Test
public void testPairMap() { // Maps pair -> pair of different type
List<List<Tuple2<String, Integer>>> inputData = stringIntKVStream;
List<List<Tuple2<Integer, String>>> expected = Arrays.asList(
Arrays.asList(
new Tuple2<>(1, "california"),
new Tuple2<>(3, "california"),
new Tuple2<>(4, "new york"),
new Tuple2<>(1, "new york")),
Arrays.asList(
new Tuple2<>(5, "california"),
new Tuple2<>(5, "california"),
new Tuple2<>(3, "new york"),
new Tuple2<>(1, "new york")));
JavaDStream<Tuple2<String, Integer>> stream =
JavaTestUtils.attachTestInputStream(ssc, inputData, 1);
JavaPairDStream<String, Integer> pairStream = JavaPairDStream.fromJavaDStream(stream);
JavaPairDStream<Integer, String> reversed = pairStream.mapToPair(Tuple2::swap);
JavaTestUtils.attachTestOutputStream(reversed);
List<List<Tuple2<Integer, String>>> result = JavaTestUtils.runStreams(ssc, 2, 2);
Assertions.assertEquals(expected, result);
}
@Test
public void testPairMapPartitions() { // Maps pair -> pair of different type
List<List<Tuple2<String, Integer>>> inputData = stringIntKVStream;
List<List<Tuple2<Integer, String>>> expected = Arrays.asList(
Arrays.asList(
new Tuple2<>(1, "california"),
new Tuple2<>(3, "california"),
new Tuple2<>(4, "new york"),
new Tuple2<>(1, "new york")),
Arrays.asList(
new Tuple2<>(5, "california"),
new Tuple2<>(5, "california"),
new Tuple2<>(3, "new york"),
new Tuple2<>(1, "new york")));
JavaDStream<Tuple2<String, Integer>> stream =
JavaTestUtils.attachTestInputStream(ssc, inputData, 1);
JavaPairDStream<String, Integer> pairStream = JavaPairDStream.fromJavaDStream(stream);
JavaPairDStream<Integer, String> reversed = pairStream.mapPartitionsToPair(in -> {
LinkedList<Tuple2<Integer, String>> out = new LinkedList<>();
while (in.hasNext()) {
Tuple2<String, Integer> next = in.next();
out.add(next.swap());
}
return out.iterator();
});
JavaTestUtils.attachTestOutputStream(reversed);
List<List<Tuple2<Integer, String>>> result = JavaTestUtils.runStreams(ssc, 2, 2);
Assertions.assertEquals(expected, result);
}
@Test
public void testPairMap2() { // Maps pair -> single
List<List<Tuple2<String, Integer>>> inputData = stringIntKVStream;
List<List<Integer>> expected = Arrays.asList(
Arrays.asList(1, 3, 4, 1),
Arrays.asList(5, 5, 3, 1));
JavaDStream<Tuple2<String, Integer>> stream =
JavaTestUtils.attachTestInputStream(ssc, inputData, 1);
JavaPairDStream<String, Integer> pairStream = JavaPairDStream.fromJavaDStream(stream);
JavaDStream<Integer> reversed = pairStream.map(Tuple2::_2);
JavaTestUtils.attachTestOutputStream(reversed);
List<List<Integer>> result = JavaTestUtils.runStreams(ssc, 2, 2);
Assertions.assertEquals(expected, result);
}
@Test
public void testPairToPairFlatMapWithChangingTypes() { // Maps pair -> pair
List<List<Tuple2<String, Integer>>> inputData = Arrays.asList(
Arrays.asList(
new Tuple2<>("hi", 1),
new Tuple2<>("ho", 2)),
Arrays.asList(
new Tuple2<>("hi", 1),
new Tuple2<>("ho", 2)));
List<List<Tuple2<Integer, String>>> expected = Arrays.asList(
Arrays.asList(
new Tuple2<>(1, "h"),
new Tuple2<>(1, "i"),
new Tuple2<>(2, "h"),
new Tuple2<>(2, "o")),
Arrays.asList(
new Tuple2<>(1, "h"),
new Tuple2<>(1, "i"),
new Tuple2<>(2, "h"),
new Tuple2<>(2, "o")));
JavaDStream<Tuple2<String, Integer>> stream =
JavaTestUtils.attachTestInputStream(ssc, inputData, 1);
JavaPairDStream<String, Integer> pairStream = JavaPairDStream.fromJavaDStream(stream);
JavaPairDStream<Integer, String> flatMapped = pairStream.flatMapToPair(in -> {
List<Tuple2<Integer, String>> out = new LinkedList<>();
for (Character s : in._1().toCharArray()) {
out.add(new Tuple2<>(in._2(), s.toString()));
}
return out.iterator();
});
JavaTestUtils.attachTestOutputStream(flatMapped);
List<List<Tuple2<Integer, String>>> result = JavaTestUtils.runStreams(ssc, 2, 2);
Assertions.assertEquals(expected, result);
}
@Test
public void testPairReduceByKey() {
List<List<Tuple2<String, Integer>>> inputData = stringIntKVStream;
List<List<Tuple2<String, Integer>>> expected = Arrays.asList(
Arrays.asList(
new Tuple2<>("california", 4),
new Tuple2<>("new york", 5)),
Arrays.asList(
new Tuple2<>("california", 10),
new Tuple2<>("new york", 4)));
JavaDStream<Tuple2<String, Integer>> stream = JavaTestUtils.attachTestInputStream(
ssc, inputData, 1);
JavaPairDStream<String, Integer> pairStream = JavaPairDStream.fromJavaDStream(stream);
JavaPairDStream<String, Integer> reduced = pairStream.reduceByKey((x, y) -> x + y);
JavaTestUtils.attachTestOutputStream(reduced);
List<List<Tuple2<String, Integer>>> result = JavaTestUtils.runStreams(ssc, 2, 2);
Assertions.assertEquals(expected, result);
}
@Test
public void testCombineByKey() {
List<List<Tuple2<String, Integer>>> inputData = stringIntKVStream;
List<List<Tuple2<String, Integer>>> expected = Arrays.asList(
Arrays.asList(
new Tuple2<>("california", 4),
new Tuple2<>("new york", 5)),
Arrays.asList(
new Tuple2<>("california", 10),
new Tuple2<>("new york", 4)));
JavaDStream<Tuple2<String, Integer>> stream = JavaTestUtils.attachTestInputStream(
ssc, inputData, 1);
JavaPairDStream<String, Integer> pairStream = JavaPairDStream.fromJavaDStream(stream);
JavaPairDStream<String, Integer> combined = pairStream.combineByKey(i -> i,
(x, y) -> x + y, (x, y) -> x + y, new HashPartitioner(2));
JavaTestUtils.attachTestOutputStream(combined);
List<List<Tuple2<String, Integer>>> result = JavaTestUtils.runStreams(ssc, 2, 2);
Assertions.assertEquals(expected, result);
}
@Test
public void testReduceByKeyAndWindow() {
List<List<Tuple2<String, Integer>>> inputData = stringIntKVStream;
List<List<Tuple2<String, Integer>>> expected = Arrays.asList(
Arrays.asList(new Tuple2<>("california", 4),
new Tuple2<>("new york", 5)),
Arrays.asList(new Tuple2<>("california", 14),
new Tuple2<>("new york", 9)),
Arrays.asList(new Tuple2<>("california", 10),
new Tuple2<>("new york", 4)));
JavaDStream<Tuple2<String, Integer>> stream =
JavaTestUtils.attachTestInputStream(ssc, inputData, 1);
JavaPairDStream<String, Integer> pairStream = JavaPairDStream.fromJavaDStream(stream);
JavaPairDStream<String, Integer> reduceWindowed =
pairStream.reduceByKeyAndWindow((x, y) -> x + y, new Duration(2000), new Duration(1000));
JavaTestUtils.attachTestOutputStream(reduceWindowed);
List<List<Tuple2<String, Integer>>> result = JavaTestUtils.runStreams(ssc, 3, 3);
Assertions.assertEquals(expected, result);
}
@Test
public void testUpdateStateByKey() {
List<List<Tuple2<String, Integer>>> inputData = stringIntKVStream;
List<List<Tuple2<String, Integer>>> expected = Arrays.asList(
Arrays.asList(new Tuple2<>("california", 4),
new Tuple2<>("new york", 5)),
Arrays.asList(new Tuple2<>("california", 14),
new Tuple2<>("new york", 9)),
Arrays.asList(new Tuple2<>("california", 14),
new Tuple2<>("new york", 9)));
JavaDStream<Tuple2<String, Integer>> stream =
JavaTestUtils.attachTestInputStream(ssc, inputData, 1);
JavaPairDStream<String, Integer> pairStream = JavaPairDStream.fromJavaDStream(stream);
JavaPairDStream<String, Integer> updated = pairStream.updateStateByKey((values, state) -> {
int out = 0;
if (state.isPresent()) {
out = out + state.get();
}
for (Integer v : values) {
out = out + v;
}
return Optional.of(out);
});
JavaTestUtils.attachTestOutputStream(updated);
List<List<Tuple2<String, Integer>>> result = JavaTestUtils.runStreams(ssc, 3, 3);
Assertions.assertEquals(expected, result);
}
@Test
public void testReduceByKeyAndWindowWithInverse() {
List<List<Tuple2<String, Integer>>> inputData = stringIntKVStream;
List<List<Tuple2<String, Integer>>> expected = Arrays.asList(
Arrays.asList(new Tuple2<>("california", 4),
new Tuple2<>("new york", 5)),
Arrays.asList(new Tuple2<>("california", 14),
new Tuple2<>("new york", 9)),
Arrays.asList(new Tuple2<>("california", 10),
new Tuple2<>("new york", 4)));
JavaDStream<Tuple2<String, Integer>> stream =
JavaTestUtils.attachTestInputStream(ssc, inputData, 1);
JavaPairDStream<String, Integer> pairStream = JavaPairDStream.fromJavaDStream(stream);
JavaPairDStream<String, Integer> reduceWindowed =
pairStream.reduceByKeyAndWindow((x, y) -> x + y, (x, y) -> x - y, new Duration(2000),
new Duration(1000));
JavaTestUtils.attachTestOutputStream(reduceWindowed);
List<List<Tuple2<String, Integer>>> result = JavaTestUtils.runStreams(ssc, 3, 3);
Assertions.assertEquals(expected, result);
}
@Test
public void testPairTransform() {
List<List<Tuple2<Integer, Integer>>> inputData = Arrays.asList(
Arrays.asList(
new Tuple2<>(3, 5),
new Tuple2<>(1, 5),
new Tuple2<>(4, 5),
new Tuple2<>(2, 5)),
Arrays.asList(
new Tuple2<>(2, 5),
new Tuple2<>(3, 5),
new Tuple2<>(4, 5),
new Tuple2<>(1, 5)));
List<List<Tuple2<Integer, Integer>>> expected = Arrays.asList(
Arrays.asList(
new Tuple2<>(1, 5),
new Tuple2<>(2, 5),
new Tuple2<>(3, 5),
new Tuple2<>(4, 5)),
Arrays.asList(
new Tuple2<>(1, 5),
new Tuple2<>(2, 5),
new Tuple2<>(3, 5),
new Tuple2<>(4, 5)));
JavaDStream<Tuple2<Integer, Integer>> stream = JavaTestUtils.attachTestInputStream(
ssc, inputData, 1);
JavaPairDStream<Integer, Integer> pairStream = JavaPairDStream.fromJavaDStream(stream);
JavaPairDStream<Integer, Integer> sorted = pairStream.transformToPair(in -> in.sortByKey());
JavaTestUtils.attachTestOutputStream(sorted);
List<List<Tuple2<Integer, Integer>>> result = JavaTestUtils.runStreams(ssc, 2, 2);
Assertions.assertEquals(expected, result);
}
@Test
public void testPairToNormalRDDTransform() {
List<List<Tuple2<Integer, Integer>>> inputData = Arrays.asList(
Arrays.asList(
new Tuple2<>(3, 5),
new Tuple2<>(1, 5),
new Tuple2<>(4, 5),
new Tuple2<>(2, 5)),
Arrays.asList(
new Tuple2<>(2, 5),
new Tuple2<>(3, 5),
new Tuple2<>(4, 5),
new Tuple2<>(1, 5)));
List<List<Integer>> expected = Arrays.asList(
Arrays.asList(3, 1, 4, 2),
Arrays.asList(2, 3, 4, 1));
JavaDStream<Tuple2<Integer, Integer>> stream = JavaTestUtils.attachTestInputStream(
ssc, inputData, 1);
JavaPairDStream<Integer, Integer> pairStream = JavaPairDStream.fromJavaDStream(stream);
JavaDStream<Integer> firstParts = pairStream.transform(in -> in.map(x -> x._1()));
JavaTestUtils.attachTestOutputStream(firstParts);
List<List<Integer>> result = JavaTestUtils.runStreams(ssc, 2, 2);
Assertions.assertEquals(expected, result);
}
@Test
public void testMapValues() {
List<List<Tuple2<String, String>>> inputData = stringStringKVStream;
List<List<Tuple2<String, String>>> expected = Arrays.asList(
Arrays.asList(new Tuple2<>("california", "DODGERS"),
new Tuple2<>("california", "GIANTS"),
new Tuple2<>("new york", "YANKEES"),
new Tuple2<>("new york", "METS")),
Arrays.asList(new Tuple2<>("california", "SHARKS"),
new Tuple2<>("california", "DUCKS"),
new Tuple2<>("new york", "RANGERS"),
new Tuple2<>("new york", "ISLANDERS")));
JavaDStream<Tuple2<String, String>> stream = JavaTestUtils.attachTestInputStream(
ssc, inputData, 1);
JavaPairDStream<String, String> pairStream = JavaPairDStream.fromJavaDStream(stream);
JavaPairDStream<String, String> mapped =
pairStream.mapValues(s -> s.toUpperCase(Locale.ROOT));
JavaTestUtils.attachTestOutputStream(mapped);
List<List<Tuple2<String, String>>> result = JavaTestUtils.runStreams(ssc, 2, 2);
Assertions.assertEquals(expected, result);
}
@Test
public void testFlatMapValues() {
List<List<Tuple2<String, String>>> inputData = stringStringKVStream;
List<List<Tuple2<String, String>>> expected = Arrays.asList(
Arrays.asList(new Tuple2<>("california", "dodgers1"),
new Tuple2<>("california", "dodgers2"),
new Tuple2<>("california", "giants1"),
new Tuple2<>("california", "giants2"),
new Tuple2<>("new york", "yankees1"),
new Tuple2<>("new york", "yankees2"),
new Tuple2<>("new york", "mets1"),
new Tuple2<>("new york", "mets2")),
Arrays.asList(new Tuple2<>("california", "sharks1"),
new Tuple2<>("california", "sharks2"),
new Tuple2<>("california", "ducks1"),
new Tuple2<>("california", "ducks2"),
new Tuple2<>("new york", "rangers1"),
new Tuple2<>("new york", "rangers2"),
new Tuple2<>("new york", "islanders1"),
new Tuple2<>("new york", "islanders2")));
JavaDStream<Tuple2<String, String>> stream = JavaTestUtils.attachTestInputStream(
ssc, inputData, 1);
JavaPairDStream<String, String> pairStream = JavaPairDStream.fromJavaDStream(stream);
JavaPairDStream<String, String> flatMapped =
pairStream.flatMapValues(in -> Arrays.asList(in + "1", in + "2").iterator());
JavaTestUtils.attachTestOutputStream(flatMapped);
List<List<Tuple2<String, String>>> result = JavaTestUtils.runStreams(ssc, 2, 2);
Assertions.assertEquals(expected, result);
}
/**
* This test is only for testing the APIs. It's not necessary to run it.
*/
public void testMapWithStateAPI() {
JavaPairRDD<String, Boolean> initialRDD = null;
JavaPairDStream<String, Integer> wordsDstream = null;
Function4<Time, String, Optional<Integer>, State<Boolean>, Optional<Double>> mapFn =
(time, key, value, state) -> {
// Use all State's methods here
state.exists();
state.get();
state.isTimingOut();
state.remove();
state.update(true);
return Optional.of(2.0);
};
JavaMapWithStateDStream<String, Integer, Boolean, Double> stateDstream =
wordsDstream.mapWithState(
StateSpec.function(mapFn)
.initialState(initialRDD)
.numPartitions(10)
.partitioner(new HashPartitioner(10))
.timeout(Durations.seconds(10)));
JavaPairDStream<String, Boolean> emittedRecords = stateDstream.stateSnapshots();
Function3<String, Optional<Integer>, State<Boolean>, Double> mapFn2 =
(key, value, state) -> {
state.exists();
state.get();
state.isTimingOut();
state.remove();
state.update(true);
return 2.0;
};
JavaMapWithStateDStream<String, Integer, Boolean, Double> stateDstream2 =
wordsDstream.mapWithState(
StateSpec.function(mapFn2)
.initialState(initialRDD)
.numPartitions(10)
.partitioner(new HashPartitioner(10))
.timeout(Durations.seconds(10)));
JavaPairDStream<String, Boolean> mappedDStream = stateDstream2.stateSnapshots();
}
}
|
Java8APISuite
|
java
|
elastic__elasticsearch
|
modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java
|
{
"start": 75384,
"end": 75648
}
|
class ____ A
while (superClass != null) {
if (classesToPainlessClassBuilders.containsKey(superClass)) {
break;
} else {
// this ensures all interfaces from a sub
|
of
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/operators/util/CloseableInputProvider.java
|
{
"start": 1044,
"end": 1352
}
|
interface ____<E> extends Closeable {
/**
* Gets the iterator over this input.
*
* @return The iterator provided by this iterator provider.
* @throws InterruptedException
*/
MutableObjectIterator<E> getIterator() throws InterruptedException, IOException;
}
|
CloseableInputProvider
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatVariableSkipIntervalTests.java
|
{
"start": 1334,
"end": 9867
}
|
class ____ extends BaseDocValuesFormatTestCase {
@Override
protected Codec getCodec() {
// small interval size to test with many intervals
return TestUtil.alwaysDocValuesFormat(new ES87TSDBDocValuesFormatTests.TestES87TSDBDocValuesFormat(random().nextInt(4, 16)));
}
public void testSkipIndexIntervalSize() {
IllegalArgumentException ex = expectThrows(
IllegalArgumentException.class,
() -> new ES87TSDBDocValuesFormat(random().nextInt(Integer.MIN_VALUE, 2))
);
assertTrue(ex.getMessage().contains("skipIndexIntervalSize must be > 1"));
}
public void testSkipperAllEqualValue() throws IOException {
final IndexWriterConfig config = new IndexWriterConfig().setCodec(getCodec());
try (Directory directory = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), directory, config)) {
final int numDocs = atLeast(100);
for (int i = 0; i < numDocs; i++) {
final Document doc = new Document();
doc.add(NumericDocValuesField.indexedField("dv", 0L));
writer.addDocument(doc);
}
writer.forceMerge(1);
try (IndexReader reader = writer.getReader()) {
assertEquals(1, reader.leaves().size());
final DocValuesSkipper skipper = reader.leaves().get(0).reader().getDocValuesSkipper("dv");
assertNotNull(skipper);
skipper.advance(0);
assertEquals(0L, skipper.minValue(0));
assertEquals(0L, skipper.maxValue(0));
assertEquals(numDocs, skipper.docCount(0));
skipper.advance(skipper.maxDocID(0) + 1);
assertEquals(DocIdSetIterator.NO_MORE_DOCS, skipper.minDocID(0));
}
}
}
// break on different value
public void testSkipperFewValuesSorted() throws IOException {
final IndexWriterConfig config = new IndexWriterConfig().setCodec(getCodec());
boolean reverse = random().nextBoolean();
config.setIndexSort(new Sort(new SortField("dv", SortField.Type.LONG, reverse)));
try (Directory directory = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), directory, config)) {
final int intervals = random().nextInt(2, 10);
final int[] numDocs = new int[intervals];
for (int i = 0; i < intervals; i++) {
numDocs[i] = random().nextInt(10) + 16;
for (int j = 0; j < numDocs[i]; j++) {
final Document doc = new Document();
doc.add(NumericDocValuesField.indexedField("dv", i));
writer.addDocument(doc);
}
}
writer.forceMerge(1);
try (IndexReader reader = writer.getReader()) {
assertEquals(1, reader.leaves().size());
final DocValuesSkipper skipper = reader.leaves().get(0).reader().getDocValuesSkipper("dv");
assertNotNull(skipper);
assertEquals(Arrays.stream(numDocs).sum(), skipper.docCount());
skipper.advance(0);
if (reverse) {
for (int i = intervals - 1; i >= 0; i--) {
assertEquals(i, skipper.minValue(0));
assertEquals(i, skipper.maxValue(0));
assertEquals(numDocs[i], skipper.docCount(0));
skipper.advance(skipper.maxDocID(0) + 1);
}
} else {
for (int i = 0; i < intervals; i++) {
assertEquals(i, skipper.minValue(0));
assertEquals(i, skipper.maxValue(0));
assertEquals(numDocs[i], skipper.docCount(0));
skipper.advance(skipper.maxDocID(0) + 1);
}
}
assertEquals(DocIdSetIterator.NO_MORE_DOCS, skipper.minDocID(0));
}
}
}
// break on empty doc values
public void testSkipperAllEqualValueWithGaps() throws IOException {
final IndexWriterConfig config = new IndexWriterConfig().setCodec(getCodec());
config.setIndexSort(new Sort(new SortField("sort", SortField.Type.LONG, false)));
try (Directory directory = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), directory, config)) {
final int gaps = random().nextInt(2, 10);
final int[] numDocs = new int[gaps];
long totaldocs = 0;
for (int i = 0; i < gaps; i++) {
numDocs[i] = random().nextInt(10) + 16;
for (int j = 0; j < numDocs[i]; j++) {
final Document doc = new Document();
doc.add(new NumericDocValuesField("sort", totaldocs++));
doc.add(SortedNumericDocValuesField.indexedField("dv", 0L));
writer.addDocument(doc);
}
// add doc with empty "dv"
final Document doc = new Document();
doc.add(new NumericDocValuesField("sort", totaldocs++));
writer.addDocument(doc);
}
writer.forceMerge(1);
try (IndexReader reader = writer.getReader()) {
assertEquals(1, reader.leaves().size());
final DocValuesSkipper skipper = reader.leaves().get(0).reader().getDocValuesSkipper("dv");
assertNotNull(skipper);
assertEquals(Arrays.stream(numDocs).sum(), skipper.docCount());
skipper.advance(0);
for (int i = 0; i < gaps; i++) {
assertEquals(0L, skipper.minValue(0));
assertEquals(0L, skipper.maxValue(0));
assertEquals(numDocs[i], skipper.docCount(0));
skipper.advance(skipper.maxDocID(0) + 1);
}
assertEquals(DocIdSetIterator.NO_MORE_DOCS, skipper.minDocID(0));
}
}
}
// break on multi-values
public void testSkipperAllEqualValueWithMultiValues() throws IOException {
final IndexWriterConfig config = new IndexWriterConfig().setCodec(getCodec());
config.setIndexSort(new Sort(new SortField("sort", SortField.Type.LONG, false)));
try (Directory directory = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), directory, config)) {
final int gaps = random().nextInt(2, 10);
final int[] numDocs = new int[gaps];
long totaldocs = 0;
for (int i = 0; i < gaps; i++) {
int docs = random().nextInt(10) + 16;
numDocs[i] += docs;
for (int j = 0; j < docs; j++) {
final Document doc = new Document();
doc.add(new NumericDocValuesField("sort", totaldocs++));
doc.add(SortedNumericDocValuesField.indexedField("dv", 0L));
writer.addDocument(doc);
}
if (i != gaps - 1) {
// add doc with mutivalues
final Document doc = new Document();
doc.add(new NumericDocValuesField("sort", totaldocs++));
doc.add(SortedNumericDocValuesField.indexedField("dv", 0L));
doc.add(SortedNumericDocValuesField.indexedField("dv", 0L));
writer.addDocument(doc);
numDocs[i + 1] = 1;
}
}
writer.forceMerge(1);
try (IndexReader reader = writer.getReader()) {
assertEquals(1, reader.leaves().size());
final DocValuesSkipper skipper = reader.leaves().get(0).reader().getDocValuesSkipper("dv");
assertNotNull(skipper);
assertEquals(Arrays.stream(numDocs).sum(), skipper.docCount());
skipper.advance(0);
for (int i = 0; i < gaps; i++) {
assertEquals(0L, skipper.minValue(0));
assertEquals(0L, skipper.maxValue(0));
assertEquals(numDocs[i], skipper.docCount(0));
skipper.advance(skipper.maxDocID(0) + 1);
}
assertEquals(DocIdSetIterator.NO_MORE_DOCS, skipper.minDocID(0));
}
}
}
}
|
ES87TSDBDocValuesFormatVariableSkipIntervalTests
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/basic/CharacterMappingTests.java
|
{
"start": 2949,
"end": 3360
}
|
class ____ {
@Id
Integer id;
//tag::basic-character-example-implicit[]
// these will be mapped using CHAR
Character wrapper;
char primitive;
//end::basic-character-example-implicit[]
public EntityOfCharacters() {
}
public EntityOfCharacters(Integer id, Character wrapper, char primitive) {
this.id = id;
this.wrapper = wrapper;
this.primitive = primitive;
}
}
}
|
EntityOfCharacters
|
java
|
quarkusio__quarkus
|
extensions/liquibase/liquibase/runtime-dev/src/main/java/io/quarkus/liquibase/runtime/dev/ui/LiquibaseJsonRpcService.java
|
{
"start": 272,
"end": 1431
}
|
class ____ {
private Collection<LiquibaseFactory> factories;
@PostConstruct
void init() {
factories = new LiquibaseFactoriesSupplier().get();
}
public boolean clear(String ds) throws Exception {
for (LiquibaseFactory lf : factories) {
if (ds.equalsIgnoreCase(lf.getDataSourceName())) {
try (Liquibase liquibase = lf.createLiquibase()) {
liquibase.dropAll();
}
return true;
}
}
return false;
}
public boolean migrate(String ds) throws Exception {
for (LiquibaseFactory lf : factories) {
if (ds.equalsIgnoreCase(lf.getDataSourceName())) {
try (Liquibase liquibase = lf.createLiquibase()) {
liquibase.update(lf.createContexts(), lf.createLabels());
}
return true;
}
}
return false;
}
public Integer getDatasourceCount() {
return factories.size();
}
public Collection<LiquibaseFactory> getLiquibaseFactories() {
return factories;
}
}
|
LiquibaseJsonRpcService
|
java
|
quarkusio__quarkus
|
extensions/picocli/runtime/src/main/java/io/quarkus/picocli/runtime/DefaultPicocliCommandLineFactory.java
|
{
"start": 302,
"end": 1753
}
|
class ____ implements PicocliCommandLineFactory {
private final Instance<Object> topCommand;
private final PicocliConfiguration picocliConfiguration;
private final CommandLine.IFactory picocliFactory;
public DefaultPicocliCommandLineFactory(@TopCommand Instance<Object> topCommand,
PicocliConfiguration picocliConfiguration,
CommandLine.IFactory picocliFactory) {
this.topCommand = topCommand;
this.picocliConfiguration = picocliConfiguration;
this.picocliFactory = picocliFactory;
}
private Class<?> classForName(String name) {
try {
return Class.forName(name, false, Thread.currentThread().getContextClassLoader());
} catch (ClassNotFoundException ex) {
throw new IllegalArgumentException(ex);
}
}
@Override
public CommandLine create() {
if (picocliConfiguration.topCommand().isPresent()) {
String topCommandName = picocliConfiguration.topCommand().get();
Instance<Object> namedTopCommand = topCommand.select(NamedLiteral.of(topCommandName));
if (namedTopCommand.isResolvable()) {
return new CommandLine(namedTopCommand.get(), picocliFactory);
}
return new CommandLine(classForName(topCommandName), picocliFactory);
}
return new CommandLine(topCommand.get(), picocliFactory);
}
}
|
DefaultPicocliCommandLineFactory
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobConstants.java
|
{
"start": 467,
"end": 907
}
|
class ____ {
public static final String EMPTY_CONNECTOR_SYNC_JOB_ID_ERROR_MESSAGE =
"[connector_sync_job_id] of the connector sync job cannot be null or empty.";
public static final String EMPTY_WORKER_HOSTNAME_ERROR_MESSAGE = "[worker_hostname] of the connector sync job cannot be null.";
public static final String CONNECTOR_SYNC_JOB_ID_PARAM = CONNECTOR_SYNC_JOB_ID_FIELD.getPreferredName();
}
|
ConnectorSyncJobConstants
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/validation/beanvalidation/BeanValidationBeanRegistrationAotProcessorTests.java
|
{
"start": 8649,
"end": 8837
}
|
class ____ {
@SuppressWarnings("unused")
public String hello(@Exists String name) {
return "Hello " + name;
}
}
@SuppressWarnings("unused")
static
|
MethodParameterLevelConstraint
|
java
|
elastic__elasticsearch
|
x-pack/plugin/migrate/src/main/java/org/elasticsearch/system_indices/task/MigrationResultsUpdateTask.java
|
{
"start": 872,
"end": 4542
}
|
class ____ extends ClusterStateUpdateTask {
private static final Logger logger = LogManager.getLogger(MigrationResultsUpdateTask.class);
private final String featureName;
private final ProjectId projectId;
private final SingleFeatureMigrationResult status;
private final ActionListener<ClusterState> listener;
private MigrationResultsUpdateTask(
String featureName,
ProjectId projectId,
SingleFeatureMigrationResult status,
ActionListener<ClusterState> listener
) {
this.featureName = featureName;
this.projectId = projectId;
this.status = status;
this.listener = listener;
}
/**
* Creates a task that will update the status of a feature migration.
* @param featureName The name of the feature whose status should be updated.
* @param projectId The project ID
* @param status The status to be associated with the given feature.
* @param listener A listener that will be called upon successfully updating the cluster state.
*/
public static MigrationResultsUpdateTask upsert(
String featureName,
ProjectId projectId,
SingleFeatureMigrationResult status,
ActionListener<ClusterState> listener
) {
return new MigrationResultsUpdateTask(featureName, projectId, status, listener);
}
/**
* Submit the update task so that it will actually be executed.
* @param clusterService The cluster service to which this task should be submitted.
*/
public void submit(ClusterService clusterService) {
String source = format("record [%s] migration [%s]", featureName, status.succeeded() ? "success" : "failure");
submitUnbatchedTask(clusterService, source, this);
}
@SuppressForbidden(reason = "legacy usage of unbatched task") // TODO add support for batching here
private static void submitUnbatchedTask(
ClusterService clusterService,
@SuppressWarnings("SameParameterValue") String source,
ClusterStateUpdateTask task
) {
clusterService.submitUnbatchedStateUpdateTask(source, task);
}
@Override
public ClusterState execute(ClusterState currentState) throws Exception {
final var project = currentState.metadata().getProject(projectId);
FeatureMigrationResults currentResults = project.custom(FeatureMigrationResults.TYPE);
if (currentResults == null) {
currentResults = new FeatureMigrationResults(new HashMap<>());
}
FeatureMigrationResults newResults = currentResults.withResult(featureName, status);
return currentState.copyAndUpdateProject(project.id(), builder -> builder.putCustom(FeatureMigrationResults.TYPE, newResults));
}
@Override
public void clusterStateProcessed(ClusterState oldState, ClusterState newState) {
listener.onResponse(newState);
}
@Override
public void onFailure(Exception clusterStateUpdateException) {
if (status.succeeded()) {
logger.warn(
() -> "failed to update cluster state after successful migration of feature [" + featureName + "]",
clusterStateUpdateException
);
} else {
logger.error(
() -> format(
"failed to update cluster state after failed migration of feature [%s] on index [%s]",
featureName,
status.getFailedResourceName()
),
clusterStateUpdateException
);
}
listener.onFailure(clusterStateUpdateException);
}
}
|
MigrationResultsUpdateTask
|
java
|
assertj__assertj-core
|
assertj-core/src/main/java/org/assertj/core/condition/NestableCondition.java
|
{
"start": 1494,
"end": 3507
}
|
class ____ {
* final String firstLine;
* final String postcode;
*
* Address(String firstLine, String postcode) {
* this.firstLine = firstLine;
* this.postcode = postcode;
* }
* }
*
* static Condition<Customer> name(String expected) {
* return new Condition<>(
* it -> expected.equals(it.name),
* "name: " + expected
* );
* }
*
* static Condition<Customer> customer(Condition<Customer>... conditions) {
* return nestable("person", conditions);
* }
*
* static Condition<Address> firstLine(String expected) {
* return new Condition<>(
* it -> expected.equals(it.firstLine),
* "first line: " + expected
* );
* }
*
* static Condition<Address> postcode(String expected) {
* return new Condition<>(
* it -> expected.equals(it.postcode),
* "postcode: " + expected
* );
* }
*
* static Condition<Customer> address(Condition<Address>... conditions) {
* return nestable(
* "address",
* customer -> customer.address,
* conditions
* );
* }</code></pre>
*
* And assertions can be written like:
* <pre><code class='java'> assertThat(customer).is(
* customer(
* name("John"),
* address(
* firstLine("3"),
* postcode("KM3 8SP")
* )
* )
* ); </code></pre>
* leads to an easy-to-read assertion error:
* <pre><code class='text'> Expecting actual:
* org.assertj.core.condition.Customer@27ff5d15
* to be:
* [✗] person:[
* [✓] name: John,
* [✗] address:[
* [✗] first line: 3,
* [✓] postcode: KM3 8SP
* ]
* ]</code></pre>
* For an even better assertion error, see <code>{@link VerboseCondition}</code>.
*
* @param <ACTUAL> the type of object this condition accepts ({@literal Customer} in the example)
* @param <NESTED> the type of object nested into {@literal ACTUAL} ({@literal Address} in the example)
*
* @author Alessandro Ciccimarra
*/
public
|
Address
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/DoubleTerms.java
|
{
"start": 1494,
"end": 6713
}
|
class ____ extends InternalTerms.Bucket<Bucket> {
double term;
Bucket(
double term,
long docCount,
InternalAggregations aggregations,
boolean showDocCountError,
long docCountError,
DocValueFormat format
) {
super(docCount, aggregations, showDocCountError, docCountError, format);
this.term = term;
}
/**
* Read from a stream.
*/
Bucket(StreamInput in, DocValueFormat format, boolean showDocCountError) throws IOException {
super(in, format, showDocCountError);
term = in.readDouble();
}
@Override
protected void writeTermTo(StreamOutput out) throws IOException {
out.writeDouble(term);
}
@Override
public String getKeyAsString() {
return format.format(term).toString();
}
@Override
public Object getKey() {
return term;
}
@Override
public Number getKeyAsNumber() {
return term;
}
@Override
public int compareKey(Bucket other) {
return Double.compare(term, other.term);
}
@Override
protected final XContentBuilder keyToXContent(XContentBuilder builder) throws IOException {
builder.field(CommonFields.KEY.getPreferredName(), term);
if (format != DocValueFormat.RAW) {
builder.field(CommonFields.KEY_AS_STRING.getPreferredName(), format.format(term).toString());
}
return builder;
}
@Override
public boolean equals(Object obj) {
return super.equals(obj) && Objects.equals(term, ((Bucket) obj).term);
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), term);
}
}
public DoubleTerms(
String name,
BucketOrder reduceOrder,
BucketOrder order,
int requiredSize,
long minDocCount,
Map<String, Object> metadata,
DocValueFormat format,
int shardSize,
boolean showTermDocCountError,
long otherDocCount,
List<Bucket> buckets,
Long docCountError
) {
super(
name,
reduceOrder,
order,
requiredSize,
minDocCount,
metadata,
format,
shardSize,
showTermDocCountError,
otherDocCount,
buckets,
docCountError
);
}
/**
* Read from a stream.
*/
public DoubleTerms(StreamInput in) throws IOException {
super(in, Bucket::new);
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public DoubleTerms create(List<Bucket> buckets) {
return new DoubleTerms(
name,
reduceOrder,
order,
requiredSize,
minDocCount,
metadata,
format,
shardSize,
showTermDocCountError,
otherDocCount,
buckets,
docCountError
);
}
@Override
public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) {
return new Bucket(
prototype.term,
prototype.docCount,
aggregations,
showTermDocCountError,
prototype.getDocCountError(),
prototype.format
);
}
@Override
protected DoubleTerms create(String name, List<Bucket> buckets, BucketOrder reduceOrder, long docCountError, long otherDocCount) {
return new DoubleTerms(
name,
reduceOrder,
order,
requiredSize,
minDocCount,
getMetadata(),
format,
shardSize,
showTermDocCountError,
otherDocCount,
buckets,
docCountError
);
}
@Override
protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceContext, int size) {
return new AggregatorReducer() {
private final AggregatorReducer processor = termsAggregationReducer(reduceContext, size);
@Override
public void accept(InternalAggregation aggregation) {
if (aggregation instanceof LongTerms longTerms) {
processor.accept(LongTerms.convertLongTermsToDouble(longTerms, format));
} else {
processor.accept(aggregation);
}
}
@Override
public InternalAggregation get() {
return processor.get();
}
@Override
public void close() {
Releasables.close(processor);
}
};
}
@Override
protected Bucket createBucket(long docCount, InternalAggregations aggs, long docCountError, DoubleTerms.Bucket prototype) {
return new Bucket(prototype.term, docCount, aggs, showTermDocCountError, docCountError, format);
}
}
|
Bucket
|
java
|
spring-projects__spring-security
|
oauth2/oauth2-jose/src/main/java/org/springframework/security/oauth2/jwt/JwtValidationException.java
|
{
"start": 1077,
"end": 2455
}
|
class ____ extends BadJwtException {
@Serial
private static final long serialVersionUID = 134652048447295615L;
private final Collection<OAuth2Error> errors;
/**
* Constructs a {@link JwtValidationException} using the provided parameters
*
* While each {@link OAuth2Error} does contain an error description, this constructor
* can take an overarching description that encapsulates the composition of failures
*
* That said, it is appropriate to pass one of the messages from the error list in as
* the exception description, for example:
*
* <pre>
* if ( result.hasErrors() ) {
* Collection<OAuth2Error> errors = result.getErrors();
* throw new JwtValidationException(errors.iterator().next().getDescription(), errors);
* }
* </pre>
* @param message - the exception message
* @param errors - a list of {@link OAuth2Error}s with extra detail about the
* validation result
*/
public JwtValidationException(String message, Collection<OAuth2Error> errors) {
super(message);
Assert.notEmpty(errors, "errors cannot be empty");
this.errors = new ArrayList<>(errors);
}
/**
* Return the list of {@link OAuth2Error}s associated with this exception
* @return the list of {@link OAuth2Error}s associated with this exception
*/
public Collection<OAuth2Error> getErrors() {
return this.errors;
}
}
|
JwtValidationException
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-datajoin/src/main/java/org/apache/hadoop/contrib/utils/join/DataJoinReducerBase.java
|
{
"start": 1888,
"end": 7937
}
|
class ____ extends JobBase {
protected Reporter reporter = null;
private long maxNumOfValuesPerGroup = 100;
protected long largestNumOfValues = 0;
protected long numOfValues = 0;
protected long collected = 0;
protected JobConf job;
public void close() throws IOException {
if (this.reporter != null) {
this.reporter.setStatus(super.getReport());
}
}
public void configure(JobConf job) {
super.configure(job);
this.job = job;
this.maxNumOfValuesPerGroup = job.getLong("datajoin.maxNumOfValuesPerGroup", 100);
}
/**
* The subclass can provide a different implementation on ResetableIterator.
* This is necessary if the number of values in a reduce call is very high.
*
* The default provided here uses ArrayListBackedIterator
*
* @return an Object of ResetableIterator.
*/
protected ResetableIterator createResetableIterator() {
return new ArrayListBackedIterator();
}
/**
* This is the function that re-groups values for a key into sub-groups based
* on a secondary key (input tag).
*
* @param arg1
* @return
*/
private SortedMap<Object, ResetableIterator> regroup(Object key,
Iterator arg1, Reporter reporter) throws IOException {
this.numOfValues = 0;
SortedMap<Object, ResetableIterator> retv = new TreeMap<Object, ResetableIterator>();
TaggedMapOutput aRecord = null;
while (arg1.hasNext()) {
this.numOfValues += 1;
if (this.numOfValues % 100 == 0) {
reporter.setStatus("key: " + key.toString() + " numOfValues: "
+ this.numOfValues);
}
if (this.numOfValues > this.maxNumOfValuesPerGroup) {
continue;
}
aRecord = ((TaggedMapOutput) arg1.next()).clone(job);
Text tag = aRecord.getTag();
ResetableIterator data = retv.get(tag);
if (data == null) {
data = createResetableIterator();
retv.put(tag, data);
}
data.add(aRecord);
}
if (this.numOfValues > this.largestNumOfValues) {
this.largestNumOfValues = numOfValues;
LOG.info("key: " + key.toString() + " this.largestNumOfValues: "
+ this.largestNumOfValues);
}
return retv;
}
public void reduce(Object key, Iterator values,
OutputCollector output, Reporter reporter) throws IOException {
if (this.reporter == null) {
this.reporter = reporter;
}
SortedMap<Object, ResetableIterator> groups = regroup(key, values, reporter);
Object[] tags = groups.keySet().toArray();
ResetableIterator[] groupValues = new ResetableIterator[tags.length];
for (int i = 0; i < tags.length; i++) {
groupValues[i] = groups.get(tags[i]);
}
joinAndCollect(tags, groupValues, key, output, reporter);
addLongValue("groupCount", 1);
for (int i = 0; i < tags.length; i++) {
groupValues[i].close();
}
}
/**
* The subclass can overwrite this method to perform additional filtering
* and/or other processing logic before a value is collected.
*
* @param key
* @param aRecord
* @param output
* @param reporter
* @throws IOException
*/
protected void collect(Object key, TaggedMapOutput aRecord,
OutputCollector output, Reporter reporter) throws IOException {
this.collected += 1;
addLongValue("collectedCount", 1);
if (aRecord != null) {
output.collect(key, aRecord.getData());
reporter.setStatus("key: " + key.toString() + " collected: " + collected);
addLongValue("actuallyCollectedCount", 1);
}
}
/**
* join the list of the value lists, and collect the results.
*
* @param tags
* a list of input tags
* @param values
* a list of value lists, each corresponding to one input source
* @param key
* @param output
* @throws IOException
*/
private void joinAndCollect(Object[] tags, ResetableIterator[] values,
Object key, OutputCollector output, Reporter reporter)
throws IOException {
if (values.length < 1) {
return;
}
Object[] partialList = new Object[values.length];
joinAndCollect(tags, values, 0, partialList, key, output, reporter);
}
/**
* Perform the actual join recursively.
*
* @param tags
* a list of input tags
* @param values
* a list of value lists, each corresponding to one input source
* @param pos
* indicating the next value list to be joined
* @param partialList
* a list of values, each from one value list considered so far.
* @param key
* @param output
* @throws IOException
*/
private void joinAndCollect(Object[] tags, ResetableIterator[] values,
int pos, Object[] partialList, Object key,
OutputCollector output, Reporter reporter) throws IOException {
if (values.length == pos) {
// get a value from each source. Combine them
TaggedMapOutput combined = combine(tags, partialList);
collect(key, combined, output, reporter);
return;
}
ResetableIterator nextValues = values[pos];
nextValues.reset();
while (nextValues.hasNext()) {
Object v = nextValues.next();
partialList[pos] = v;
joinAndCollect(tags, values, pos + 1, partialList, key, output, reporter);
}
}
public static Text SOURCE_TAGS_FIELD = new Text("SOURCE_TAGS");
public static Text NUM_OF_VALUES_FIELD = new Text("NUM_OF_VALUES");
/**
*
* @param tags
* a list of source tags
* @param values
* a value per source
* @return combined value derived from values of the sources
*/
protected abstract TaggedMapOutput combine(Object[] tags, Object[] values);
public void map(Object arg0, Object arg1, OutputCollector arg2,
Reporter arg3) throws IOException {
// TODO Auto-generated method stub
}
}
|
DataJoinReducerBase
|
java
|
playframework__playframework
|
core/play/src/main/java/play/mvc/BodyParser.java
|
{
"start": 18004,
"end": 18972
}
|
class ____ extends MaxLengthBodyParser<File> {
private final File to;
private final Materializer materializer;
public ToFile(
File to, long maxLength, HttpErrorHandler errorHandler, Materializer materializer) {
super(maxLength, errorHandler);
this.to = to;
this.materializer = materializer;
}
public ToFile(
File to,
HttpConfiguration httpConfiguration,
HttpErrorHandler errorHandler,
Materializer materializer) {
this(to, httpConfiguration.parser().maxDiskBuffer(), errorHandler, materializer);
}
@Override
protected Accumulator<ByteString, F.Either<Result, File>> apply1(Http.RequestHeader request) {
return Accumulator.fromSink(
StreamConverters.fromOutputStream(
() -> java.nio.file.Files.newOutputStream(this.to.toPath())))
.map(ioResult -> F.Either.Right(this.to), materializer.executionContext());
}
}
|
ToFile
|
java
|
google__truth
|
core/src/main/java/com/google/common/truth/PrimitiveFloatArraySubject.java
|
{
"start": 1199,
"end": 10625
}
|
class ____ extends Subject {
private final float @Nullable [] actual;
private PrimitiveFloatArraySubject(FailureMetadata metadata, float @Nullable [] actual) {
super(metadata, actual);
this.actual = actual;
}
/**
* Checks that the actual array and {@code expected} are arrays of the same length and type,
* containing elements such that each element in {@code expected} is equal to each element in the
* actual array, and in the same position, with element equality defined the same way that {@link
* Arrays#equals(float[], float[])} and {@link Float#equals(Object)} define it (which is different
* to the way that the {@code ==} operator on primitive {@code float} defines it). This method is
* <i>not</i> recommended when the code under test is doing any kind of arithmetic: use {@link
* #usingTolerance} with a suitable tolerance in that case, e.g. {@code
* assertThat(actualArray).usingTolerance(1.0e-5).containsExactly(expectedArray).inOrder()}.
* (Remember that the exact result of floating point arithmetic is sensitive to apparently trivial
* changes such as replacing {@code (a + b) + c} with {@code a + (b + c)}.) This method is
* recommended when the code under test is specified as either copying values without modification
* from its input or returning well-defined literal or constant values.
*
* <ul>
* <li>It considers {@link Float#POSITIVE_INFINITY}, {@link Float#NEGATIVE_INFINITY}, and {@link
* Float#NaN} to be equal to themselves (contrast with {@code usingTolerance(0.0)} which
* does not).
* <li>It does <i>not</i> consider {@code -0.0f} to be equal to {@code 0.0f} (contrast with
* {@code usingTolerance(0.0)} which does).
* </ul>
*/
@Override
public void isEqualTo(@Nullable Object expected) {
super.isEqualTo(expected);
}
/**
* Checks that the actual array and {@code expected} are not arrays of the same length and type,
* containing elements such that each element in {@code expected} is equal to each element in the
* actual array, and in the same position, with element equality defined the same way that {@link
* Arrays#equals(float[], float[])} and {@link Float#equals(Object)} define it (which is different
* to the way that the {@code ==} operator on primitive {@code float} defines it). See {@link
* #isEqualTo(Object)} for advice on when exact equality is recommended.
*
* <ul>
* <li>It considers {@link Float#POSITIVE_INFINITY}, {@link Float#NEGATIVE_INFINITY}, and {@link
* Float#NaN} to be equal to themselves.
* <li>It does <i>not</i> consider {@code -0.0} to be equal to {@code 0.0}.
* </ul>
*/
@Override
public void isNotEqualTo(@Nullable Object other) {
super.isNotEqualTo(other);
}
/**
* Starts a method chain for a check in which the actual values (i.e. the elements of the array
* under test) are compared to expected elements using a {@link Correspondence} which considers
* values to correspond if they are finite values within {@code tolerance} of each other. The
* check is actually executed by continuing the method chain. For example:
*
* <pre>{@code
* assertThat(actualFloatArray).usingTolerance(1.0e-5f).contains(3.14159f);
* }</pre>
*
* <ul>
* <li>It does not consider values to correspond if either value is infinite or NaN.
* <li>It considers {@code -0.0f} to be within any tolerance of {@code 0.0f}.
* <li>The expected values provided later in the chain will be {@link Number} instances which
* will be converted to floats, which may result in a loss of precision for some numeric
* types.
* <li>The subsequent methods in the chain may throw a {@link NullPointerException} if any
* expected {@link Number} instance is null.
* </ul>
*
* @param tolerance an inclusive upper bound on the difference between the float values of the
* actual and expected numbers, which must be a non-negative finite value, i.e. not {@link
* Double#NaN}, {@link Double#POSITIVE_INFINITY}, or negative, including {@code -0.0}
*/
public FloatArrayAsIterable usingTolerance(double tolerance) {
if (actual == null) {
failWithoutActual(simpleFact("cannot perform assertions on the contents of a null array"));
return ignoreCheck().that(new float[0]).usingTolerance(tolerance);
}
return FloatArrayAsIterable.create(tolerance(tolerance), iterableSubject(actual));
}
private static final Correspondence<Float, Number> EXACT_EQUALITY_CORRESPONDENCE =
Correspondence.from(
(a, e) -> floatToIntBits(a) == floatToIntBits(checkedToFloat(e)), "is exactly equal to");
private static float checkedToFloat(Number expected) {
checkNotNull(expected);
checkArgument(
!(expected instanceof Double),
"Expected value in assertion using exact float equality was a double, which is not "
+ "supported as a double may not have an exact float representation");
boolean okType =
expected instanceof Float || expected instanceof Integer || expected instanceof Long;
if (!okType) {
throw new IllegalArgumentException(
"Expected value in assertion using exact float equality was of unsupported type "
+ SubjectUtils.longName(expected.getClass())
+ " (it may not have an exact float representation)");
}
if (expected instanceof Integer) {
checkArgument(
abs((Integer) expected) <= 1 << 24,
"Expected value %s in assertion using exact float equality was an int with an absolute "
+ "value greater than 2^24 which has no exact float representation",
expected);
}
if (expected instanceof Long) {
checkArgument(
abs((Long) expected) <= 1L << 24,
"Expected value %s in assertion using exact float equality was a long with an absolute "
+ "value greater than 2^24 which has no exact float representation",
expected);
}
return expected.floatValue();
}
/**
* Starts a method chain for a check in which the actual values (i.e. the elements of the array
* under test) are compared to expected elements using a {@link Correspondence} which considers
* values to correspond if they are exactly equal, with equality defined by {@link Float#equals}.
* This method is <i>not</i> recommended when the code under test is doing any kind of arithmetic:
* use {@link #usingTolerance} with a suitable tolerance in that case. (Remember that the exact
* result of floating point arithmetic is sensitive to apparently trivial changes such as
* replacing {@code (a + b) + c} with {@code a + (b + c)}.) This method is recommended when the
* code under test is specified as either copying a value without modification from its input or
* returning a well-defined literal or constant value. The check is actually executed by
* continuing the method chain. For example:
*
* <pre>{@code
* assertThat(actualFloatArray).usingExactEquality().contains(3.14159f);
* }</pre>
*
* <p>For convenience, some subsequent methods accept expected values as {@link Number} instances.
* These numbers must be either of type {@link Float}, {@link Integer}, or {@link Long}, and if
* they are {@link Integer} or {@link Long} then their absolute values must not exceed 2^24 which
* is 16,777,216. (This restriction ensures that the expected values have exact {@link Float}
* representations: using exact equality makes no sense if they do not.)
*
* <ul>
* <li>It considers {@link Float#POSITIVE_INFINITY}, {@link Float#NEGATIVE_INFINITY}, and {@link
* Float#NaN} to be equal to themselves (contrast with {@code usingTolerance(0.0)} which
* does not).
* <li>It does <i>not</i> consider {@code -0.0f} to be equal to {@code 0.0f} (contrast with
* {@code usingTolerance(0.0)} which does).
* <li>The subsequent methods in the chain may throw a {@link NullPointerException} if any
* expected {@link Float} instance is null.
* </ul>
*/
public FloatArrayAsIterable usingExactEquality() {
if (actual == null) {
failWithoutActual(simpleFact("cannot perform assertions on the contents of a null array"));
return ignoreCheck().that(new float[0]).usingExactEquality();
}
return FloatArrayAsIterable.create(EXACT_EQUALITY_CORRESPONDENCE, iterableSubject(actual));
}
/** Checks that the actual array is empty (i.e., that {@code array.length == 0}). */
public void isEmpty() {
arrayIsEmptyImpl();
}
/** Checks that the actual array is not empty (i.e., that {@code array.length > 0}). */
public void isNotEmpty() {
arrayIsNotEmptyImpl();
}
/** Checks that the actual array has the given length. */
public void hasLength(int length) {
arrayHasLengthImpl(length);
}
/**
* A partially specified check for doing assertions on the array similar to the assertions
* supported for {@link Iterable} values, in which the elements of the array under test are
* compared to expected elements using either exact or tolerant float equality: see {@link
* #usingExactEquality} and {@link #usingTolerance}. Call methods on this object to actually
* execute the check.
*
* <p>In the exact equality case, the methods on this
|
PrimitiveFloatArraySubject
|
java
|
google__error-prone
|
core/src/main/java/com/google/errorprone/bugpatterns/DefaultLocale.java
|
{
"start": 2896,
"end": 28689
}
|
enum ____ {
ROOT_FIX("Specify ROOT locale") {
@Override
String replacement(SuggestedFix.Builder fix, VisitorState state) {
fix.addImport("java.util.Locale");
return "Locale.ROOT";
}
},
DEFAULT_LOCALE_FIX("Specify default locale") {
@Override
String replacement(SuggestedFix.Builder fix, VisitorState state) {
fix.addImport("java.util.Locale");
return "Locale.getDefault()";
}
},
DEFAULT_DISPLAY_LOCALE_FIX("Specify default display locale") {
@Override
String replacement(SuggestedFix.Builder fix, VisitorState state) {
fix.addImport("java.util.Locale");
return String.format(
"Locale.getDefault(%s)",
SuggestedFixes.qualifyStaticImport("java.util.Locale.Category.DISPLAY", fix, state));
}
},
DEFAULT_FORMAT_LOCALE_FIX("Specify default format locale") {
@Override
String replacement(SuggestedFix.Builder fix, VisitorState state) {
fix.addImport("java.util.Locale");
return String.format(
"Locale.getDefault(%s)",
SuggestedFixes.qualifyStaticImport("java.util.Locale.Category.FORMAT", fix, state));
}
};
private final String title;
LocaleFix(String title) {
this.title = title;
}
String title() {
return title;
}
abstract String replacement(SuggestedFix.Builder fix, VisitorState state);
}
private static final Pattern SPECIFIER_ALLOW_LIST_REGEX =
Pattern.compile("%([%n]|(\\d+\\$|<)?-?\\d*(\\.\\d+)?[bhsc])");
private static final Supplier<Type> FORMATTABLE = Suppliers.typeFromClass(Formattable.class);
private static final Supplier<Type> APPENDABLE = Suppliers.typeFromClass(Appendable.class);
private static final Supplier<Type> PRINTSTREAM = Suppliers.typeFromClass(PrintStream.class);
private static final ImmutableList<Supplier<Type>> PATTERN_AND_ARGS =
ImmutableList.of(Suppliers.STRING_TYPE, Suppliers.arrayOf(Suppliers.OBJECT_TYPE));
private static final Matcher<ExpressionTree> FORMAT_METHODS =
anyOf(
instanceMethod()
.onDescendantOfAny(PrintStream.class.getName(), PrintWriter.class.getName())
.namedAnyOf("format", "printf")
.withParametersOfType(PATTERN_AND_ARGS),
staticMethod()
.onClass(Suppliers.STRING_TYPE)
.named("format")
.withParametersOfType(PATTERN_AND_ARGS));
private static final Matcher<MethodInvocationTree> SYSTEM_OUT_RECEIVER =
receiverOfInvocation(
anyOf(
staticField(System.class.getName(), "out"),
staticField(System.class.getName(), "err")));
private static final Matcher<ExpressionTree> STRING_FORMATTED =
instanceMethod().onExactClass(Suppliers.STRING_TYPE).named("formatted");
private static final Matcher<ExpressionTree> DISPLAY_METHODS =
instanceMethod().onExactClass("java.util.Currency").named("getSymbol").withNoParameters();
private static final Matcher<ExpressionTree> FACTORIES =
anyOf(
staticMethod()
.onClass("java.text.BreakIterator")
.namedAnyOf(
"getCharacterInstance",
"getLineInstance",
"getSentenceInstance",
"getWordInstance")
.withNoParameters(),
staticMethod().onClass("java.text.Collator").named("getInstance").withNoParameters());
private static final Matcher<ExpressionTree> FORMATTER_FACTORIES =
anyOf(
staticMethod()
.onClass("java.text.NumberFormat")
.namedAnyOf(
"getCompactNumberInstance",
"getCurrencyInstance",
"getInstance",
"getIntegerInstance",
"getNumberInstance",
"getPercentInstance")
.withNoParameters(),
staticMethod()
.onClass("java.text.DateFormatSymbols")
.named("getInstance")
.withNoParameters(),
staticMethod()
.onClass("java.text.DecimalFormatSymbols")
.named("getInstance")
.withNoParameters(),
staticMethod()
.onClass("java.time.format.DateTimeFormatter")
.named("ofPattern")
.withParametersOfType(ImmutableList.of(Suppliers.STRING_TYPE)),
instanceMethod()
.onDescendantOf("java.time.format.DateTimeFormatterBuilder")
.named("toFormatter")
.withNoParameters());
private static final Matcher<ExpressionTree> DATE_FORMAT =
anyOf(
staticMethod().onClass("java.text.DateFormat").named("getInstance").withNoParameters(),
staticMethod()
.onClass("java.text.DateFormat")
.namedAnyOf("getDateInstance", "getTimeInstance")
.withNoParameters(),
staticMethod()
.onClass("java.text.DateFormat")
.namedAnyOf("getDateInstance", "getTimeInstance")
.withParametersOfType(ImmutableList.of(Suppliers.INT_TYPE)),
staticMethod()
.onClass("java.text.DateFormat")
.named("getDateTimeInstance")
.withNoParameters(),
staticMethod()
.onClass("java.text.DateFormat")
.named("getDateTimeInstance")
.withParametersOfType(ImmutableList.of(Suppliers.INT_TYPE, Suppliers.INT_TYPE)));
private static final Matcher<ExpressionTree> MESSAGEFORMAT_FORMAT =
staticMethod()
.onClass("java.text.MessageFormat")
.named("format")
.withParametersOfType(PATTERN_AND_ARGS);
private static final Matcher<ExpressionTree> RESOURCE_BUNDLE =
anyOf(
staticMethod()
.onClass("java.util.ResourceBundle")
.named("getBundle")
.withParametersOfType(ImmutableList.of(Suppliers.STRING_TYPE)),
staticMethod()
.onClass("java.util.ResourceBundle")
.named("getBundle")
.withParameters("java.lang.String", "java.util.ResourceBundle.Control"),
staticMethod()
.onClass("java.util.ResourceBundle")
.named("getBundle")
.withParameters("java.lang.String", "java.lang.Module"));
private static final Matcher<ExpressionTree> FORMAT_CONSTRUCTORS =
anyOf(
constructor()
.forClass("java.text.MessageFormat")
.withParametersOfType(ImmutableList.of(Suppliers.STRING_TYPE)),
constructor().forClass("java.text.DateFormatSymbols").withNoParameters(),
constructor().forClass("java.text.DecimalFormatSymbols").withNoParameters());
private static final Matcher<ExpressionTree> DECIMAL_FORMAT =
anyOf(
constructor().forClass("java.text.DecimalFormat").withNoParameters(),
constructor()
.forClass("java.text.DecimalFormat")
.withParametersOfType(ImmutableList.of(Suppliers.STRING_TYPE)));
private static final Matcher<ExpressionTree> SIMPLE_DATE_FORMAT =
anyOf(
constructor().forClass("java.text.SimpleDateFormat").withNoParameters(),
constructor()
.forClass("java.text.SimpleDateFormat")
.withParametersOfType(ImmutableList.of(Suppliers.STRING_TYPE)));
private static final Matcher<ExpressionTree> FORMATTER =
anyOf(
constructor().forClass("java.util.Formatter").withNoParameters(),
constructor()
.forClass("java.util.Formatter")
.withParametersOfType(ImmutableList.of(Suppliers.STRING_TYPE)),
constructor()
.forClass("java.util.Formatter")
.withParametersOfType(ImmutableList.of(Suppliers.STRING_TYPE, Suppliers.STRING_TYPE)),
constructor().forClass("java.util.Formatter").withParameters("java.lang.Appendable"),
constructor().forClass("java.util.Formatter").withParameters("java.io.File"),
constructor()
.forClass("java.util.Formatter")
.withParameters("java.io.File", "java.lang.String"),
constructor().forClass("java.util.Formatter").withParameters("java.io.PrintStream"),
constructor().forClass("java.util.Formatter").withParameters("java.io.OutputStream"),
constructor()
.forClass("java.util.Formatter")
.withParameters("java.io.OutputStream", "java.lang.String"));
@Override
public Description matchMethodInvocation(MethodInvocationTree tree, VisitorState state) {
// String.toUpperCase/toLowerCase are already handled by StringCaseLocaleUsage
if (FORMAT_METHODS.matches(tree, state)) {
// Allow System.out and System.err
if (SYSTEM_OUT_RECEIVER.matches(tree, state)
|| !shouldRefactorStringFormat(
tree.getArguments().getFirst(),
tree.getArguments().stream().skip(1).collect(toImmutableList()),
state)) {
return NO_MATCH;
}
return prependLocales(
tree,
state,
LocaleFix.ROOT_FIX,
LocaleFix.DEFAULT_LOCALE_FIX,
LocaleFix.DEFAULT_FORMAT_LOCALE_FIX);
}
if (STRING_FORMATTED.matches(tree, state)) {
return handleStringFormatted(tree, state);
}
if (DISPLAY_METHODS.matches(tree, state)) {
return appendLocales(
tree,
state,
LocaleFix.ROOT_FIX,
LocaleFix.DEFAULT_LOCALE_FIX,
LocaleFix.DEFAULT_DISPLAY_LOCALE_FIX);
}
if (FACTORIES.matches(tree, state)) {
return appendLocales(tree, state, LocaleFix.ROOT_FIX, LocaleFix.DEFAULT_LOCALE_FIX);
}
if (FORMATTER_FACTORIES.matches(tree, state)) {
return appendLocales(
tree,
state,
LocaleFix.ROOT_FIX,
LocaleFix.DEFAULT_LOCALE_FIX,
LocaleFix.DEFAULT_FORMAT_LOCALE_FIX);
}
if (DATE_FORMAT.matches(tree, state)) {
return handleDateFormat(tree, state);
}
if (MESSAGEFORMAT_FORMAT.matches(tree, state)) {
return handleMessageFormatFormat(tree, state);
}
if (RESOURCE_BUNDLE.matches(tree, state)) {
return handleResourceBundle(tree, state);
}
return NO_MATCH;
}
@Override
public Description matchNewClass(NewClassTree tree, VisitorState state) {
if (FORMAT_CONSTRUCTORS.matches(tree, state)) {
return appendLocales(
tree,
state,
LocaleFix.ROOT_FIX,
LocaleFix.DEFAULT_LOCALE_FIX,
LocaleFix.DEFAULT_FORMAT_LOCALE_FIX);
}
if (DECIMAL_FORMAT.matches(tree, state)) {
return handleDecimalFormat(tree, state);
}
if (SIMPLE_DATE_FORMAT.matches(tree, state)) {
return handleSimpleDateFormat(tree, state);
}
if (FORMATTER.matches(tree, state)) {
return handleFormatter(tree, state);
}
return NO_MATCH;
}
private Description handleStringFormatted(MethodInvocationTree tree, VisitorState state) {
if (!shouldRefactorStringFormat(ASTHelpers.getReceiver(tree), tree.getArguments(), state)) {
return NO_MATCH;
}
var description = buildDescription(tree);
description.addFix(stringFormattedFix(tree, state, LocaleFix.ROOT_FIX));
description.addFix(stringFormattedFix(tree, state, LocaleFix.DEFAULT_LOCALE_FIX));
description.addFix(stringFormattedFix(tree, state, LocaleFix.DEFAULT_FORMAT_LOCALE_FIX));
return description.build();
}
private Fix stringFormattedFix(
MethodInvocationTree tree, VisitorState state, LocaleFix localeFix) {
var fix = SuggestedFix.builder().setShortDescription(localeFix.title());
fix.replace(
tree,
String.format(
"String.format(%s, %s, %s)",
localeFix.replacement(fix, state),
state.getSourceForNode(ASTHelpers.getReceiver(tree)),
tree.getArguments().stream()
.map(state::getSourceForNode)
.collect(Collectors.joining(", "))));
return fix.build();
}
private Description handleDateFormat(MethodInvocationTree tree, VisitorState state) {
var description = buildDescription(tree);
var methodName = ASTHelpers.getSymbol(tree).getSimpleName();
if (methodName.contentEquals("getInstance")) {
dateFormatGetInstanceFixes(description, tree, state);
} else if (methodName.contentEquals("getDateTimeInstance")) {
dateFormatFixes(description, tree, state, 2);
} else {
dateFormatFixes(description, tree, state, 1);
}
return description.build();
}
private void dateFormatGetInstanceFixes(
Description.Builder description, MethodInvocationTree tree, VisitorState state) {
description.addFix(dateFormatGetInstanceFix(tree, state, LocaleFix.ROOT_FIX));
description.addFix(dateFormatGetInstanceFix(tree, state, LocaleFix.DEFAULT_LOCALE_FIX));
description.addFix(dateFormatGetInstanceFix(tree, state, LocaleFix.DEFAULT_FORMAT_LOCALE_FIX));
}
private Fix dateFormatGetInstanceFix(
MethodInvocationTree tree, VisitorState state, LocaleFix localeFix) {
var fix = SuggestedFix.builder().setShortDescription(localeFix.title());
fix.replace(
state.getEndPosition(tree.getMethodSelect()),
state.getEndPosition(tree),
String.format(
"(%1$s, %<s, %2$s)",
SuggestedFixes.qualifyStaticImport("java.text.DateFormat.SHORT", fix, state),
localeFix.replacement(fix, state)))
.merge(SuggestedFixes.renameMethodInvocation(tree, "getDateTimeInstance", state));
return fix.build();
}
private void dateFormatFixes(
Description.Builder description,
MethodInvocationTree tree,
VisitorState state,
int nonLocaleArgs) {
description.addFix(dateFormatFix(tree, state, nonLocaleArgs, LocaleFix.ROOT_FIX));
description.addFix(dateFormatFix(tree, state, nonLocaleArgs, LocaleFix.DEFAULT_LOCALE_FIX));
description.addFix(
dateFormatFix(tree, state, nonLocaleArgs, LocaleFix.DEFAULT_FORMAT_LOCALE_FIX));
}
private Fix dateFormatFix(
MethodInvocationTree tree, VisitorState state, int nonLocaleArgs, LocaleFix localeFix) {
var fix = SuggestedFix.builder().setShortDescription(localeFix.title());
if (tree.getArguments().isEmpty()) {
var defaultConst =
SuggestedFixes.qualifyStaticImport("java.text.DateFormat.DEFAULT", fix, state);
fix.replace(
state.getEndPosition(tree.getMethodSelect()),
state.getEndPosition(tree),
String.format(
"(%s, %s)",
Stream.generate(() -> defaultConst)
.limit(nonLocaleArgs)
.collect(Collectors.joining(", ")),
localeFix.replacement(fix, state)));
} else {
fix.postfixWith(
Iterables.getLast(tree.getArguments()), ", " + localeFix.replacement(fix, state));
}
return fix.build();
}
private Description handleMessageFormatFormat(MethodInvocationTree tree, VisitorState state) {
var pattern = tree.getArguments().getFirst();
var arguments = tree.getArguments().stream().skip(1).collect(toImmutableList());
if (!shouldRefactorStringFormat(pattern, arguments, state)) {
return NO_MATCH;
}
var description = buildDescription(tree);
description.addFix(messageFormatFormatFix(tree, pattern, arguments, state, LocaleFix.ROOT_FIX));
description.addFix(
messageFormatFormatFix(tree, pattern, arguments, state, LocaleFix.DEFAULT_LOCALE_FIX));
description.addFix(
messageFormatFormatFix(
tree, pattern, arguments, state, LocaleFix.DEFAULT_FORMAT_LOCALE_FIX));
return description.build();
}
/**
* Should only refactor String.format() and similar methods' invocations where specifiers aren't
* locale-dependent. For %s this includes checking for non-Formattable arguments. Format strings
* (first argument) as variables or constants are excluded from refactoring.
*/
private boolean shouldRefactorStringFormat(
ExpressionTree pattern, List<? extends ExpressionTree> arguments, VisitorState state) {
String patternValue = ASTHelpers.constValue(pattern, String.class);
// TODO: add a flag to be stricter and reformat whenever the pattern is not a constant
if (patternValue != null && !onlyContainsSpecifiersInAllowList(patternValue)) {
return true;
}
// Ideally we'd only check for Formattable on arguments used in %s specifiers
return containsSomeFormattableArgument(arguments, state);
}
@VisibleForTesting
static boolean onlyContainsSpecifiersInAllowList(String pattern) {
var noSpecifierFormatBase = SPECIFIER_ALLOW_LIST_REGEX.matcher(pattern).replaceAll("");
// If it still has a specifier after the replacement, it means that it was not on the allowlist.
return !noSpecifierFormatBase.contains("%");
}
private boolean containsSomeFormattableArgument(
List<? extends ExpressionTree> arguments, VisitorState state) {
return arguments.stream().anyMatch(tree -> mightBeFormattable(tree, state));
}
private boolean mightBeFormattable(ExpressionTree tree, VisitorState state) {
if (tree instanceof LiteralTree) {
return false;
}
// TODO: add a flag to be stricter and detect any argument that could be cast to Formattable
// (rather than only the ones that are proven to be Formattable)
return ASTHelpers.isSubtype(ASTHelpers.getResultType(tree), FORMATTABLE.get(state), state);
}
private Fix messageFormatFormatFix(
MethodInvocationTree tree,
ExpressionTree pattern,
ImmutableList<? extends ExpressionTree> arguments,
VisitorState state,
LocaleFix localeFix) {
var fix = SuggestedFix.builder().setShortDescription(localeFix.title());
fix.replace(
tree,
String.format(
"new %s(%s, %s).format(%s)",
SuggestedFixes.qualifyType(state, fix, "java.text.MessageFormat"),
state.getSourceForNode(pattern),
localeFix.replacement(fix, state),
arguments.stream().map(state::getSourceForNode).collect(Collectors.joining(", "))));
return fix.build();
}
private Description handleResourceBundle(MethodInvocationTree tree, VisitorState state) {
var description = buildDescription(tree);
description.addFix(resourceBundleFix(tree, state, LocaleFix.ROOT_FIX));
description.addFix(resourceBundleFix(tree, state, LocaleFix.DEFAULT_LOCALE_FIX));
return description.build();
}
private Fix resourceBundleFix(
MethodInvocationTree tree, VisitorState state, LocaleFix localeFix) {
var fix = SuggestedFix.builder().setShortDescription(localeFix.title());
fix.postfixWith(tree.getArguments().getFirst(), ", " + localeFix.replacement(fix, state));
return fix.build();
}
private Description handleDecimalFormat(NewClassTree tree, VisitorState state) {
var description = buildDescription(tree);
if (tree.getArguments().isEmpty()) {
description.addFix(decimalFormatToNumberFormatFix(tree, state, LocaleFix.ROOT_FIX));
description.addFix(decimalFormatToNumberFormatFix(tree, state, LocaleFix.DEFAULT_LOCALE_FIX));
description.addFix(
decimalFormatToNumberFormatFix(tree, state, LocaleFix.DEFAULT_FORMAT_LOCALE_FIX));
} else {
description.addFix(decimalFormatFix(tree, state, LocaleFix.ROOT_FIX));
description.addFix(decimalFormatFix(tree, state, LocaleFix.DEFAULT_LOCALE_FIX));
description.addFix(decimalFormatFix(tree, state, LocaleFix.DEFAULT_FORMAT_LOCALE_FIX));
}
return description.build();
}
private Fix decimalFormatToNumberFormatFix(
NewClassTree tree, VisitorState state, LocaleFix localeFix) {
var fix =
SuggestedFix.builder()
.setShortDescription(localeFix.title())
.addImport("java.text.NumberFormat");
fix.replace(
tree, String.format("NumberFormat.getInstance(%s)", localeFix.replacement(fix, state)));
return fix.build();
}
private Fix decimalFormatFix(NewClassTree tree, VisitorState state, LocaleFix localeFix) {
var fix =
SuggestedFix.builder()
.setShortDescription(localeFix.title())
.addImport("java.text.DecimalFormatSymbols");
fix.postfixWith(
Iterables.getLast(tree.getArguments()),
String.format(", DecimalFormatSymbols.getInstance(%s)", localeFix.replacement(fix, state)));
return fix.build();
}
private Description handleSimpleDateFormat(NewClassTree tree, VisitorState state) {
var description = buildDescription(tree);
if (tree.getArguments().isEmpty()) {
description.addFix(simpleDateFormatToDateFormatFix(tree, state, LocaleFix.ROOT_FIX));
description.addFix(
simpleDateFormatToDateFormatFix(tree, state, LocaleFix.DEFAULT_LOCALE_FIX));
description.addFix(
simpleDateFormatToDateFormatFix(tree, state, LocaleFix.DEFAULT_FORMAT_LOCALE_FIX));
} else {
description.addFix(simpleDateFormatFix(tree, state, LocaleFix.ROOT_FIX));
description.addFix(simpleDateFormatFix(tree, state, LocaleFix.DEFAULT_LOCALE_FIX));
description.addFix(simpleDateFormatFix(tree, state, LocaleFix.DEFAULT_FORMAT_LOCALE_FIX));
}
return description.build();
}
private Fix simpleDateFormatToDateFormatFix(
NewClassTree tree, VisitorState state, LocaleFix localeFix) {
var fix =
SuggestedFix.builder()
.setShortDescription(localeFix.title())
.addImport("java.text.DateFormat");
fix.replace(
tree,
String.format(
"DateFormat.getDateTimeInstance(%1$s, %<s, %2$s)",
SuggestedFixes.qualifyStaticImport("java.text.DateFormat.SHORT", fix, state),
localeFix.replacement(fix, state)));
return fix.build();
}
private Fix simpleDateFormatFix(NewClassTree tree, VisitorState state, LocaleFix localeFix) {
var fix = SuggestedFix.builder().setShortDescription(localeFix.title());
fix.postfixWith(
Iterables.getLast(tree.getArguments()), ", " + localeFix.replacement(fix, state));
return fix.build();
}
private Description handleFormatter(NewClassTree tree, VisitorState state) {
if (tree.getArguments().isEmpty() || tree.getArguments().size() == 2) {
return appendLocales(
tree,
state,
LocaleFix.ROOT_FIX,
LocaleFix.DEFAULT_LOCALE_FIX,
LocaleFix.DEFAULT_FORMAT_LOCALE_FIX);
}
var argType = ASTHelpers.getResultType(Iterables.getOnlyElement(tree.getArguments()));
if (ASTHelpers.isSubtype(argType, APPENDABLE.get(state), state)
&& !ASTHelpers.isSubtype(argType, PRINTSTREAM.get(state), state)) {
return appendLocales(
tree,
state,
LocaleFix.ROOT_FIX,
LocaleFix.DEFAULT_LOCALE_FIX,
LocaleFix.DEFAULT_FORMAT_LOCALE_FIX);
}
return buildDescription(tree).build();
}
private Description prependLocales(
MethodInvocationTree tree, VisitorState state, LocaleFix... localeFixes) {
return prependLocales(tree, tree.getMethodSelect(), tree.getArguments(), state, localeFixes);
}
private Description prependLocales(
Tree tree,
Tree select,
List<? extends ExpressionTree> arguments,
VisitorState state,
LocaleFix... localeFixes) {
Description.Builder description = buildDescription(tree);
for (LocaleFix localeFix : localeFixes) {
description.addFix(prependLocale(tree, select, arguments, state, localeFix));
}
return description.build();
}
private Fix prependLocale(
Tree tree,
Tree select,
List<? extends ExpressionTree> arguments,
VisitorState state,
LocaleFix localeFix) {
SuggestedFix.Builder fix = SuggestedFix.builder().setShortDescription(localeFix.title());
if (arguments.isEmpty()) {
fix.replace(
state.getEndPosition(select),
state.getEndPosition(tree),
String.format("(%s)", localeFix.replacement(fix, state)));
} else {
fix.prefixWith(arguments.getFirst(), localeFix.replacement(fix, state) + ", ");
}
return fix.build();
}
private Description appendLocales(
MethodInvocationTree tree, VisitorState state, LocaleFix... localeFixes) {
return appendLocales(tree, tree.getMethodSelect(), tree.getArguments(), state, localeFixes);
}
private Description appendLocales(
NewClassTree tree, VisitorState state, LocaleFix... localeFixes) {
return appendLocales(tree, tree.getIdentifier(), tree.getArguments(), state, localeFixes);
}
private Description appendLocales(
Tree tree,
Tree select,
List<? extends ExpressionTree> arguments,
VisitorState state,
LocaleFix... localeFixes) {
Description.Builder description = buildDescription(tree);
for (LocaleFix localeFix : localeFixes) {
description.addFix(appendLocale(tree, select, arguments, state, localeFix));
}
return description.build();
}
private Fix appendLocale(
Tree tree,
Tree select,
List<? extends ExpressionTree> arguments,
VisitorState state,
LocaleFix localeFix) {
SuggestedFix.Builder fix = SuggestedFix.builder().setShortDescription(localeFix.title());
if (arguments.isEmpty()) {
fix.replace(
state.getEndPosition(select),
state.getEndPosition(tree),
String.format("(%s)", localeFix.replacement(fix, state)));
} else {
fix.postfixWith(Iterables.getLast(arguments), ", " + localeFix.replacement(fix, state));
}
return fix.build();
}
}
|
LocaleFix
|
java
|
elastic__elasticsearch
|
client/sniffer/src/main/java/org/elasticsearch/client/sniff/Sniffer.java
|
{
"start": 5711,
"end": 8592
}
|
class ____ implements Runnable {
final long nextTaskDelay;
final AtomicReference<TaskState> taskState = new AtomicReference<>(TaskState.WAITING);
Task(long nextTaskDelay) {
this.nextTaskDelay = nextTaskDelay;
}
@Override
public void run() {
/*
* Skipped or already started tasks do nothing. In most cases tasks will be cancelled and not run, but we want to protect for
* cases where future#cancel returns true yet the task runs. We want to make sure that such tasks do nothing otherwise they will
* schedule another round at the end and so on, leaving us with multiple parallel sniffing "tracks" whish is undesirable.
*/
if (taskState.compareAndSet(TaskState.WAITING, TaskState.STARTED) == false) {
return;
}
try {
sniff();
} catch (Exception e) {
logger.error("error while sniffing nodes", e);
} finally {
Task task = new Task(sniffIntervalMillis);
Future<?> future = scheduler.schedule(task, nextTaskDelay);
// tasks are run by a single threaded executor, so swapping is safe with a simple volatile variable
ScheduledTask previousTask = nextScheduledTask;
nextScheduledTask = new ScheduledTask(task, future);
assert initialized.get() == false || previousTask.task.isSkipped() || previousTask.task.hasStarted()
: "task that we are replacing is neither cancelled nor has it ever started";
}
}
/**
* Returns true if the task has started, false in case it didn't start (yet?) or it was skipped
*/
boolean hasStarted() {
return taskState.get() == TaskState.STARTED;
}
/**
* Sets this task to be skipped. Returns true if the task will be skipped, false if the task has already started.
*/
boolean skip() {
/*
* Threads may still get run although future#cancel returns true. We make sure that a task is either cancelled (or skipped),
* or entirely run. In the odd case that future#cancel returns true and the thread still runs, the task won't do anything.
* In case future#cancel returns true but the task has already started, this state change will not succeed hence this method
* returns false and the task will normally run.
*/
return taskState.compareAndSet(TaskState.WAITING, TaskState.SKIPPED);
}
/**
* Returns true if the task was set to be skipped before it was started
*/
boolean isSkipped() {
return taskState.get() == TaskState.SKIPPED;
}
}
static final
|
Task
|
java
|
elastic__elasticsearch
|
x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/DlsFlsLicenseRequestInterceptor.java
|
{
"start": 1638,
"end": 6184
}
|
class ____ implements RequestInterceptor {
private static final Logger logger = LogManager.getLogger(DlsFlsLicenseRequestInterceptor.class);
private final ThreadContext threadContext;
private final XPackLicenseState licenseState;
public DlsFlsLicenseRequestInterceptor(ThreadContext threadContext, XPackLicenseState licenseState) {
this.threadContext = threadContext;
this.licenseState = licenseState;
}
@Override
public SubscribableListener<Void> intercept(
AuthorizationEngine.RequestInfo requestInfo,
AuthorizationEngine authorizationEngine,
AuthorizationInfo authorizationInfo
) {
if (requestInfo.getRequest() instanceof IndicesRequest && false == TransportActionProxy.isProxyAction(requestInfo.getAction())) {
final Role role = RBACEngine.maybeGetRBACEngineRole(AUTHORIZATION_INFO_VALUE.get(threadContext));
// Checking whether role has FLS or DLS first before checking indicesAccessControl for efficiency because indicesAccessControl
// can contain a long list of indices
// But if role is null, it means a custom authorization engine is in use and we have to directly go check indicesAccessControl
if (role == null || role.hasFieldOrDocumentLevelSecurity()) {
logger.trace("Role has DLS or FLS. Checking for whether the request touches any indices that have DLS or FLS configured");
final IndicesAccessControl indicesAccessControl = INDICES_PERMISSIONS_VALUE.get(threadContext);
if (indicesAccessControl != null) {
final XPackLicenseState frozenLicenseState = licenseState.copyCurrentLicenseState();
if (logger.isDebugEnabled()) {
final IndicesAccessControl.DlsFlsUsage dlsFlsUsage = indicesAccessControl.getFieldAndDocumentLevelSecurityUsage();
if (dlsFlsUsage.hasFieldLevelSecurity()) {
logger.debug(
() -> format(
"User [%s] has field level security on [%s]",
requestInfo.getAuthentication(),
indicesAccessControl.getIndicesWithFieldLevelSecurity()
)
);
}
if (dlsFlsUsage.hasDocumentLevelSecurity()) {
logger.debug(
() -> format(
"User [%s] has document level security on [%s]",
requestInfo.getAuthentication(),
indicesAccessControl.getIndicesWithDocumentLevelSecurity()
)
);
}
}
if (false == DOCUMENT_LEVEL_SECURITY_FEATURE.checkWithoutTracking(frozenLicenseState)
|| false == FIELD_LEVEL_SECURITY_FEATURE.checkWithoutTracking(frozenLicenseState)) {
boolean incompatibleLicense = false;
IndicesAccessControl.DlsFlsUsage dlsFlsUsage = indicesAccessControl.getFieldAndDocumentLevelSecurityUsage();
if (dlsFlsUsage.hasDocumentLevelSecurity() && false == DOCUMENT_LEVEL_SECURITY_FEATURE.check(frozenLicenseState)) {
incompatibleLicense = true;
}
if (dlsFlsUsage.hasFieldLevelSecurity() && false == FIELD_LEVEL_SECURITY_FEATURE.check(frozenLicenseState)) {
incompatibleLicense = true;
}
if (incompatibleLicense) {
final ElasticsearchSecurityException licenseException = LicenseUtils.newComplianceException(
"field and document level security"
);
licenseException.addMetadata(
"es.indices_with_dls_or_fls",
indicesAccessControl.getIndicesWithFieldOrDocumentLevelSecurity()
);
return SubscribableListener.newFailed(licenseException);
}
}
}
}
}
return SubscribableListener.nullSuccess();
}
}
|
DlsFlsLicenseRequestInterceptor
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/annotations/processing/SQL.java
|
{
"start": 2253,
"end": 3761
}
|
class ____ actually implement
* the type which declares the method annotated {@code @SQL}.
* </ul>
* <p>
* Thus, the generated methods may be called according to the following
* protocol:
* <pre>
* Books books = new Books_(session);
* Book book = books.findBookByIsbn(isbn);
* List<Book> books = books.findBooksByTitleWithPagination(pattern, 10, 0);
* </pre>
* <p>
* This is reminiscent of traditional DAO-style repositories.
* <p>
* The return type of an annotated method must be:
* <ul>
* <li>an entity type or {@link java.util.Optional},
* <li>{@link java.util.List} or {@link java.util.stream.Stream},
* <li>{@code io.smallrye.mutiny.Uni}, when used with Hibernate Reactive,
* <li>{@link org.hibernate.query.Query},
* <li>{@link jakarta.persistence.Query}, or
* <li>{@link org.hibernate.query.NativeQuery}.
* </ul>
* <p>
* The method parameters must match the parameters of the SQL query,
* either by name or by position:
* <ul>
* <li>an ordinal query parameter of form {@code ?n} is matched to
* the <em>n</em>th parameter of the method, and
* <li>a named query parameter of form {@code :name} is matched to
* the method parameter {@code name}.
* </ul>
* <p>
* As an exception, the method may have at most one parameter of
* type {@code EntityManager}, {@code Session},
* {@code StatelessSession}, or {@code Mutiny.Session}.
*
* @see HQL
* @see Find
*
* @author Gavin King
* @since 6.3
*/
@Target(METHOD)
@Retention(CLASS)
@Incubating
public @
|
will
|
java
|
quarkusio__quarkus
|
extensions/smallrye-graphql/deployment/src/test/java/io/quarkus/smallrye/graphql/deployment/GraphQLOverHttpApi.java
|
{
"start": 407,
"end": 549
}
|
class ____ {
@Query
public User getUser(@Id String id) {
return new User(id, "Koos", "van der Merwe");
}
}
|
GraphQLOverHttpApi
|
java
|
apache__flink
|
flink-connectors/flink-connector-files/src/main/java/org/apache/flink/connector/file/src/reader/StreamFormat.java
|
{
"start": 10814,
"end": 11017
}
|
class ____} (section "Checkpointing") for
* details.
*/
@Nullable
default CheckpointedPosition getCheckpointedPosition() {
return null;
}
}
}
|
comment
|
java
|
apache__camel
|
core/camel-util/src/main/java/org/apache/camel/util/IOHelper.java
|
{
"start": 26497,
"end": 28121
}
|
class ____ extends InputStream {
private final Lock lock = new ReentrantLock();
private final Path file;
private final BufferedReader reader;
private final Charset defaultStreamCharset;
private ByteBuffer bufferBytes;
private final CharBuffer bufferedChars = CharBuffer.allocate(4096);
public EncodingInputStream(Path file, String charset) throws IOException {
this.file = file;
reader = toReader(file, charset);
defaultStreamCharset = defaultCharset.get();
}
@Override
public int read() throws IOException {
if (bufferBytes == null || bufferBytes.remaining() <= 0) {
BufferCaster.cast(bufferedChars).clear();
int len = reader.read(bufferedChars);
bufferedChars.flip();
if (len == -1) {
return -1;
}
bufferBytes = defaultStreamCharset.encode(bufferedChars);
}
return bufferBytes.get() & 0xFF;
}
@Override
public void close() throws IOException {
reader.close();
}
@Override
public void reset() throws IOException {
lock.lock();
try {
reader.reset();
} finally {
lock.unlock();
}
}
public InputStream toOriginalInputStream() throws IOException {
return Files.newInputStream(file);
}
}
/**
* Encoding-aware file reader.
*/
public static
|
EncodingInputStream
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/api/QueryOperationSqlSerializationTest.java
|
{
"start": 2067,
"end": 7585
}
|
class ____ implements TableTestProgramRunner {
@Override
public List<TableTestProgram> programs() {
return Arrays.asList(
QueryOperationTestPrograms.SOURCE_QUERY_OPERATION,
QueryOperationTestPrograms.VALUES_QUERY_OPERATION,
QueryOperationTestPrograms.FILTER_QUERY_OPERATION,
QueryOperationTestPrograms.AGGREGATE_QUERY_OPERATION,
QueryOperationTestPrograms.AGGREGATE_NO_GROUP_BY_QUERY_OPERATION,
QueryOperationTestPrograms.DISTINCT_QUERY_OPERATION,
QueryOperationTestPrograms.JOIN_QUERY_OPERATION,
QueryOperationTestPrograms.ORDER_BY_QUERY_OPERATION,
QueryOperationTestPrograms.WINDOW_AGGREGATE_QUERY_OPERATION,
QueryOperationTestPrograms.UNION_ALL_QUERY_OPERATION,
QueryOperationTestPrograms.LATERAL_JOIN_QUERY_OPERATION,
QueryOperationTestPrograms.SQL_QUERY_OPERATION,
QueryOperationTestPrograms.OVER_WINDOW_RANGE,
QueryOperationTestPrograms.OVER_WINDOW_ROWS,
QueryOperationTestPrograms.OVER_WINDOW_ROWS_UNBOUNDED_NO_PARTITION,
QueryOperationTestPrograms.OVER_WINDOW_LAG,
QueryOperationTestPrograms.ACCESSING_NESTED_COLUMN,
QueryOperationTestPrograms.ROW_SEMANTIC_TABLE_PTF,
QueryOperationTestPrograms.SET_SEMANTIC_TABLE_PTF,
QueryOperationTestPrograms.INLINE_FUNCTION_SERIALIZATION,
QueryOperationTestPrograms.ML_PREDICT_MODEL_API,
QueryOperationTestPrograms.ASYNC_ML_PREDICT_MODEL_API,
QueryOperationTestPrograms.ASYNC_ML_PREDICT_TABLE_API_MAP_EXPRESSION_CONFIG);
}
@ParameterizedTest
@MethodSource("supportedPrograms")
void testSqlSerialization(TableTestProgram program) {
final TableEnvironment env = setupEnv(program);
final TableApiTestStep tableApiStep =
(TableApiTestStep)
program.runSteps.stream()
.filter(s -> s instanceof TableApiTestStep)
.findFirst()
.get();
final SqlTestStep sqlStep =
(SqlTestStep)
program.runSteps.stream()
.filter(s -> s instanceof SqlTestStep)
.findFirst()
.get();
final Table table = tableApiStep.toTable(env);
assertThat(table.getQueryOperation().asSerializableString(new InlineFunctionSqlFactory()))
.isEqualTo(sqlStep.sql);
}
@ParameterizedTest
@MethodSource("supportedPrograms")
void testSqlAsJobNameForQueryOperation(TableTestProgram program) {
final TableEnvironmentImpl env = (TableEnvironmentImpl) setupEnv(program);
final TableApiTestStep tableApiStep =
(TableApiTestStep)
program.runSteps.stream()
.filter(s -> s instanceof TableApiTestStep)
.findFirst()
.get();
final SqlTestStep sqlStep =
(SqlTestStep)
program.runSteps.stream()
.filter(s -> s instanceof SqlTestStep)
.findFirst()
.get();
final Table table = tableApiStep.toTable(env);
QueryOperation queryOperation = table.getQueryOperation();
CollectModifyOperation sinkOperation = new CollectModifyOperation(queryOperation);
List<Transformation<?>> transformations =
env.getPlanner().translate(Collections.singletonList(sinkOperation));
StreamGraph streamGraph =
(StreamGraph)
env.generatePipelineFromQueryOperation(queryOperation, transformations);
assertThat(streamGraph.getJobName()).isEqualTo(sqlStep.sql);
}
private static TableEnvironment setupEnv(TableTestProgram program) {
final TableEnvironment env =
TableEnvironment.create(
EnvironmentSettings.newInstance()
.inStreamingMode()
.withSqlFactory(new InlineFunctionSqlFactory())
.build());
final Map<String, String> connectorOptions = new HashMap<>();
connectorOptions.put("connector", "values");
program.getSetupSourceTestSteps().forEach(s -> s.apply(env, connectorOptions));
program.getSetupModelTestSteps().forEach(s -> s.apply(env, Map.of("provider", "values")));
program.getSetupSinkTestSteps().forEach(s -> s.apply(env, connectorOptions));
program.getSetupFunctionTestSteps().forEach(f -> f.apply(env));
program.getSetupSqlTestSteps().forEach(s -> s.apply(env));
return env;
}
@Override
public EnumSet<TestKind> supportedSetupSteps() {
return EnumSet.of(
TestKind.CONFIG,
TestKind.MODEL,
TestKind.SQL,
TestKind.FUNCTION,
TestKind.SOURCE_WITH_DATA,
TestKind.SINK_WITH_DATA);
}
@Override
public EnumSet<TestKind> supportedRunSteps() {
return EnumSet.of(TestKind.TABLE_API, TestKind.SQL);
}
private static
|
QueryOperationSqlSerializationTest
|
java
|
apache__flink
|
flink-table/flink-table-api-bridge-base/src/main/java/org/apache/flink/table/api/bridge/internal/AbstractStreamTableEnvironmentImpl.java
|
{
"start": 3433,
"end": 3515
}
|
class ____ implement a {@code StreamTableEnvironment}. */
@Internal
public abstract
|
to
|
java
|
apache__spark
|
connector/kafka-0-10/src/test/java/org/apache/spark/streaming/kafka010/JavaConsumerStrategySuite.java
|
{
"start": 1110,
"end": 4109
}
|
class ____ implements Serializable {
@Test
public void testConsumerStrategyConstructors() {
final String topic1 = "topic1";
final Pattern pat = Pattern.compile("top.*");
final Collection<String> topics = Arrays.asList(topic1);
final scala.collection.Iterable<String> sTopics = CollectionConverters.asScala(topics);
final TopicPartition tp1 = new TopicPartition(topic1, 0);
final TopicPartition tp2 = new TopicPartition(topic1, 1);
final Collection<TopicPartition> parts = Arrays.asList(tp1, tp2);
final scala.collection.Iterable<TopicPartition> sParts = CollectionConverters.asScala(parts);
final Map<String, Object> kafkaParams = new HashMap<>();
kafkaParams.put("bootstrap.servers", "not used");
final scala.collection.Map<String, Object> sKafkaParams =
CollectionConverters.asScala(kafkaParams);
final Map<TopicPartition, Long> offsets = new HashMap<>();
offsets.put(tp1, 23L);
final Map<TopicPartition, Object> dummyOffsets = new HashMap<>();
dummyOffsets.putAll(offsets);
final scala.collection.Map<TopicPartition, Object> sOffsets =
CollectionConverters.asScala(dummyOffsets);
final ConsumerStrategy<String, String> sub1 =
ConsumerStrategies.Subscribe(sTopics, sKafkaParams, sOffsets);
final ConsumerStrategy<String, String> sub2 =
ConsumerStrategies.Subscribe(sTopics, sKafkaParams);
final ConsumerStrategy<String, String> sub3 =
ConsumerStrategies.Subscribe(topics, kafkaParams, offsets);
final ConsumerStrategy<String, String> sub4 =
ConsumerStrategies.Subscribe(topics, kafkaParams);
Assertions.assertEquals(
sub1.executorKafkaParams().get("bootstrap.servers"),
sub3.executorKafkaParams().get("bootstrap.servers"));
final ConsumerStrategy<String, String> psub1 =
ConsumerStrategies.SubscribePattern(pat, sKafkaParams, sOffsets);
final ConsumerStrategy<String, String> psub2 =
ConsumerStrategies.SubscribePattern(pat, sKafkaParams);
final ConsumerStrategy<String, String> psub3 =
ConsumerStrategies.SubscribePattern(pat, kafkaParams, offsets);
final ConsumerStrategy<String, String> psub4 =
ConsumerStrategies.SubscribePattern(pat, kafkaParams);
Assertions.assertEquals(
psub1.executorKafkaParams().get("bootstrap.servers"),
psub3.executorKafkaParams().get("bootstrap.servers"));
final ConsumerStrategy<String, String> asn1 =
ConsumerStrategies.Assign(sParts, sKafkaParams, sOffsets);
final ConsumerStrategy<String, String> asn2 =
ConsumerStrategies.Assign(sParts, sKafkaParams);
final ConsumerStrategy<String, String> asn3 =
ConsumerStrategies.Assign(parts, kafkaParams, offsets);
final ConsumerStrategy<String, String> asn4 =
ConsumerStrategies.Assign(parts, kafkaParams);
Assertions.assertEquals(
asn1.executorKafkaParams().get("bootstrap.servers"),
asn3.executorKafkaParams().get("bootstrap.servers"));
}
}
|
JavaConsumerStrategySuite
|
java
|
elastic__elasticsearch
|
modules/streams/src/main/java/org/elasticsearch/rest/streams/StreamsPlugin.java
|
{
"start": 1901,
"end": 2867
}
|
class ____ extends Plugin implements ActionPlugin {
@Override
public List<RestHandler> getRestHandlers(
Settings settings,
NamedWriteableRegistry namedWriteableRegistry,
RestController restController,
ClusterSettings clusterSettings,
IndexScopedSettings indexScopedSettings,
SettingsFilter settingsFilter,
IndexNameExpressionResolver indexNameExpressionResolver,
Supplier<DiscoveryNodes> nodesInCluster,
Predicate<NodeFeature> clusterSupportsFeature
) {
return List.of(new RestSetLogStreamsEnabledAction(), new RestStreamsStatusAction());
}
@Override
public List<ActionHandler> getActions() {
return List.of(
new ActionHandler(LogsStreamsActivationToggleAction.INSTANCE, TransportLogsStreamsToggleActivation.class),
new ActionHandler(StreamsStatusAction.INSTANCE, TransportStreamsStatusAction.class)
);
}
}
|
StreamsPlugin
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/hql/JoinInheritanceInsertTest.java
|
{
"start": 2781,
"end": 3053
}
|
class ____ extends Book {
private boolean forbidden;
SpellBook() {
}
public SpellBook(Integer id, String title, boolean forbidden) {
super( id, title );
this.forbidden = forbidden;
}
public boolean getForbidden() {
return forbidden;
}
}
}
|
SpellBook
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/dialect/lock/AbstractPessimisticUpdateLockingStrategy.java
|
{
"start": 753,
"end": 4281
}
|
class ____ implements LockingStrategy {
private final EntityPersister lockable;
private final LockMode lockMode;
private final String sql;
/**
* Construct a locking strategy based on SQL UPDATE statements.
*
* @param lockable The metadata for the entity to be locked.
* @param lockMode Indicates the type of lock to be acquired. Note that
* read-locks are not valid for this strategy.
*/
public AbstractPessimisticUpdateLockingStrategy(EntityPersister lockable, LockMode lockMode) {
this.lockable = lockable;
this.lockMode = lockMode;
if ( !lockable.isVersioned() ) {
CORE_LOGGER.writeLocksNotSupported( lockable.getEntityName() );
this.sql = null;
}
else {
this.sql = generateLockString();
}
}
@Override
public void lock(Object id, Object version, Object object, int timeout, SharedSessionContractImplementor session) {
try {
doLock( id, version, session );
}
catch (JDBCException e) {
throw new PessimisticEntityLockException( object, "could not obtain pessimistic lock", e );
}
}
void doLock(Object id, Object version, SharedSessionContractImplementor session) {
if ( !lockable.isVersioned() ) {
throw new HibernateException( "write locks via update not supported for non-versioned entities [" + lockable.getEntityName() + "]" );
}
try {
final var factory = session.getFactory();
final var jdbcCoordinator = session.getJdbcCoordinator();
final var preparedStatement = jdbcCoordinator.getStatementPreparer().prepareStatement( sql );
try {
final var versionType = lockable.getVersionType();
final var identifierType = lockable.getIdentifierType();
versionType.nullSafeSet( preparedStatement, version, 1, session );
int offset = 2;
identifierType.nullSafeSet( preparedStatement, id, offset, session );
offset += identifierType.getColumnSpan( factory.getRuntimeMetamodels() );
if ( lockable.isVersioned() ) {
versionType.nullSafeSet( preparedStatement, version, offset, session );
}
final int affected = jdbcCoordinator.getResultSetReturn().executeUpdate( preparedStatement, sql );
// todo: should this instead check for exactly one row modified?
if ( affected < 0 ) {
final var statistics = factory.getStatistics();
final String entityName = lockable.getEntityName();
if ( statistics.isStatisticsEnabled() ) {
statistics.optimisticFailure( entityName );
}
throw new StaleObjectStateException( entityName, id );
}
}
finally {
jdbcCoordinator.getLogicalConnection().getResourceRegistry().release( preparedStatement );
jdbcCoordinator.afterStatementExecution();
}
}
catch ( SQLException e ) {
throw session.getJdbcServices().getSqlExceptionHelper().convert(
e,
"could not lock: " + MessageHelper.infoString( lockable, id, session.getFactory() ),
sql
);
}
}
protected String generateLockString() {
final var factory = lockable.getFactory();
final var update = new Update( factory );
update.setTableName( lockable.getRootTableName() );
update.addAssignment( lockable.getVersionColumnName() );
update.addRestriction( lockable.getRootTableIdentifierColumnNames() );
update.addRestriction( lockable.getVersionColumnName() );
if ( factory.getSessionFactoryOptions().isCommentsEnabled() ) {
update.setComment( lockMode + " lock " + lockable.getEntityName() );
}
return update.toStatementString();
}
protected LockMode getLockMode() {
return lockMode;
}
}
|
AbstractPessimisticUpdateLockingStrategy
|
java
|
apache__maven
|
impl/maven-core/src/main/java/org/apache/maven/configuration/internal/DefaultBeanConfigurator.java
|
{
"start": 5822,
"end": 7278
}
|
class ____ extends AbstractConfigurationConverter {
@Override
public boolean canConvert(Class<?> type) {
return XmlNode.class.equals(type);
}
@Override
public Object fromConfiguration(
final ConverterLookup lookup,
final PlexusConfiguration configuration,
final Class<?> type,
final Class<?> enclosingType,
final ClassLoader loader,
final ExpressionEvaluator evaluator,
final ConfigurationListener listener)
throws ComponentConfigurationException {
try {
return toXml(configuration, evaluator);
} catch (ExpressionEvaluationException e) {
throw new ComponentConfigurationException("Unable to convert configuration to xml node", e);
}
}
XmlNode toXml(PlexusConfiguration config, ExpressionEvaluator evaluator) throws ExpressionEvaluationException {
List<XmlNode> children = new ArrayList<>();
for (PlexusConfiguration c : config.getChildren()) {
children.add(toXml(c, evaluator));
}
String name = config.getName();
Object value = evaluator.evaluate(config.getValue());
return XmlNode.newInstance(name, value != null ? value.toString() : null, null, children, null);
}
}
static
|
XmlConverter
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/search/fetch/FetchProfiler.java
|
{
"start": 5295,
"end": 6350
}
|
class ____ extends AbstractProfileBreakdown<FetchPhaseTiming> {
private final long start;
private final Map<String, Object> debug = new HashMap<>();
private final List<FetchSubPhaseProfileBreakdown> subPhases = new ArrayList<>();
FetchProfileBreakdown(long start) {
super(FetchPhaseTiming.class);
this.start = start;
}
@Override
protected Map<String, Object> toDebugMap() {
return Map.copyOf(debug);
}
ProfileResult result(long stop) {
List<ProfileResult> children = subPhases.stream()
.sorted(Comparator.comparing(b -> b.type))
.map(FetchSubPhaseProfileBreakdown::result)
.collect(toList());
return new ProfileResult("fetch", "", toBreakdownMap(), toDebugMap(), stop - start, children);
}
}
/**
* Actions within the "main" fetch phase that are explicitly profiled.
* See also {@link FetchSubPhaseProfileBreakdown}.
*/
|
FetchProfileBreakdown
|
java
|
quarkusio__quarkus
|
integration-tests/main/src/main/java/io/quarkus/it/corestuff/serialization/SomeSerializationObject.java
|
{
"start": 215,
"end": 925
}
|
class ____ implements Serializable {
private Person person;
private ExternalizablePerson externalizablePerson;
private List<String> list;
public Person getPerson() {
return person;
}
public void setPerson(Person person) {
this.person = person;
}
public ExternalizablePerson getExternalizablePerson() {
return externalizablePerson;
}
public void setExternalizablePerson(ExternalizablePerson externalizablePerson) {
this.externalizablePerson = externalizablePerson;
}
public List<String> getList() {
return list;
}
public void setList(List<String> list) {
this.list = list;
}
}
|
SomeSerializationObject
|
java
|
apache__rocketmq
|
broker/src/main/java/org/apache/rocketmq/broker/metrics/ConsumerLagCalculator.java
|
{
"start": 4643,
"end": 5072
}
|
class ____ {
public String group;
public String topic;
public boolean isPop;
public String retryTopic;
public ProcessGroupInfo(String group, String topic, boolean isPop,
String retryTopic) {
this.group = group;
this.topic = topic;
this.isPop = isPop;
this.retryTopic = retryTopic;
}
}
public static
|
ProcessGroupInfo
|
java
|
alibaba__nacos
|
client/src/test/java/com/alibaba/nacos/client/naming/remote/gprc/redo/NamingGrpcRedoServiceTest.java
|
{
"start": 2073,
"end": 13216
}
|
class ____ {
private static final String SERVICE = "service";
private static final String GROUP = "group";
private static final String CLUSTER = "cluster";
@Mock
private NamingGrpcClientProxy clientProxy;
@Mock
private NamingFuzzyWatchServiceListHolder namingFuzzyWatchServiceListHolder;
private NamingGrpcRedoService redoService;
@BeforeEach
void setUp() throws Exception {
Properties prop = new Properties();
NacosClientProperties nacosClientProperties = NacosClientProperties.PROTOTYPE.derive(prop);
redoService = new NamingGrpcRedoService(clientProxy, namingFuzzyWatchServiceListHolder, nacosClientProperties);
ScheduledExecutorService redoExecutor = (ScheduledExecutorService) ReflectUtils.getFieldValue(redoService,
"redoExecutor");
redoExecutor.shutdownNow();
}
@AfterEach
void tearDown() throws Exception {
redoService.shutdown();
}
@Test
void testDefaultProperties() throws Exception {
Field redoThreadCountField = NamingGrpcRedoService.class.getDeclaredField("redoThreadCount");
redoThreadCountField.setAccessible(true);
Field redoDelayTimeField = NamingGrpcRedoService.class.getDeclaredField("redoDelayTime");
redoDelayTimeField.setAccessible(true);
Long redoDelayTimeValue = (Long) redoDelayTimeField.get(redoService);
Integer redoThreadCountValue = (Integer) redoThreadCountField.get(redoService);
assertEquals(Long.valueOf(3000L), redoDelayTimeValue);
assertEquals(Integer.valueOf(1), redoThreadCountValue);
}
@Test
void testCustomProperties() throws Exception {
Properties prop = new Properties();
prop.setProperty(PropertyKeyConst.REDO_DELAY_TIME, "4000");
prop.setProperty(PropertyKeyConst.REDO_DELAY_THREAD_COUNT, "2");
NacosClientProperties nacosClientProperties = NacosClientProperties.PROTOTYPE.derive(prop);
NamingGrpcRedoService redoService = new NamingGrpcRedoService(clientProxy, namingFuzzyWatchServiceListHolder, nacosClientProperties);
Field redoThreadCountField = NamingGrpcRedoService.class.getDeclaredField("redoThreadCount");
redoThreadCountField.setAccessible(true);
Field redoDelayTimeField = NamingGrpcRedoService.class.getDeclaredField("redoDelayTime");
redoDelayTimeField.setAccessible(true);
Long redoDelayTimeValue = (Long) redoDelayTimeField.get(redoService);
Integer redoThreadCountValue = (Integer) redoThreadCountField.get(redoService);
assertEquals(Long.valueOf(4000L), redoDelayTimeValue);
assertEquals(Integer.valueOf(2), redoThreadCountValue);
}
@Test
void testOnConnected() {
assertFalse(redoService.isConnected());
redoService.onConnected(new TestConnection(new RpcClient.ServerInfo()));
assertTrue(redoService.isConnected());
}
@Test
void testOnDisConnect() {
redoService.onConnected(new TestConnection(new RpcClient.ServerInfo()));
redoService.cacheInstanceForRedo(SERVICE, GROUP, new Instance());
redoService.instanceRegistered(SERVICE, GROUP);
redoService.cacheSubscriberForRedo(SERVICE, GROUP, CLUSTER);
redoService.subscriberRegistered(SERVICE, GROUP, CLUSTER);
assertTrue(redoService.isConnected());
assertTrue(redoService.findInstanceRedoData().isEmpty());
assertTrue(redoService.findSubscriberRedoData().isEmpty());
redoService.onDisConnect(new TestConnection(new RpcClient.ServerInfo()));
assertFalse(redoService.isConnected());
assertFalse(redoService.findInstanceRedoData().isEmpty());
assertFalse(redoService.findSubscriberRedoData().isEmpty());
}
@Test
void testCacheInstanceForRedo() {
ConcurrentMap<String, InstanceRedoData> registeredInstances = getInstanceRedoDataMap();
assertTrue(registeredInstances.isEmpty());
Instance instance = new Instance();
redoService.cacheInstanceForRedo(SERVICE, GROUP, instance);
assertFalse(registeredInstances.isEmpty());
InstanceRedoData actual = registeredInstances.entrySet().iterator().next().getValue();
assertEquals(SERVICE, actual.getServiceName());
assertEquals(GROUP, actual.getGroupName());
assertEquals(instance, actual.get());
assertFalse(actual.isRegistered());
assertFalse(actual.isUnregistering());
assertTrue(actual.isExpectedRegistered());
}
@Test
void testCacheInstanceForRedoByBatchInstanceRedoData() {
ConcurrentMap<String, InstanceRedoData> registeredInstances = getInstanceRedoDataMap();
assertTrue(registeredInstances.isEmpty());
Instance instance = new Instance();
List<Instance> instanceList = new ArrayList<>();
instanceList.add(instance);
redoService.cacheInstanceForRedo(SERVICE, GROUP, instanceList);
assertFalse(registeredInstances.isEmpty());
BatchInstanceRedoData actual = (BatchInstanceRedoData) registeredInstances.entrySet().iterator().next()
.getValue();
assertEquals(SERVICE, actual.getServiceName());
assertEquals(GROUP, actual.getGroupName());
assertEquals(instanceList, actual.getInstances());
assertFalse(actual.isRegistered());
assertFalse(actual.isUnregistering());
}
@Test
void testInstanceRegistered() {
ConcurrentMap<String, InstanceRedoData> registeredInstances = getInstanceRedoDataMap();
redoService.cacheInstanceForRedo(SERVICE, GROUP, new Instance());
redoService.instanceRegistered(SERVICE, GROUP);
InstanceRedoData actual = registeredInstances.entrySet().iterator().next().getValue();
assertTrue(actual.isRegistered());
}
@Test
void testInstanceDeregister() {
ConcurrentMap<String, InstanceRedoData> registeredInstances = getInstanceRedoDataMap();
redoService.cacheInstanceForRedo(SERVICE, GROUP, new Instance());
redoService.instanceDeregister(SERVICE, GROUP);
InstanceRedoData actual = registeredInstances.entrySet().iterator().next().getValue();
assertTrue(actual.isUnregistering());
assertFalse(actual.isExpectedRegistered());
}
@Test
void testInstanceDeregistered() {
ConcurrentMap<String, InstanceRedoData> registeredInstances = getInstanceRedoDataMap();
redoService.cacheInstanceForRedo(SERVICE, GROUP, new Instance());
redoService.instanceDeregistered(SERVICE, GROUP);
InstanceRedoData actual = registeredInstances.entrySet().iterator().next().getValue();
assertFalse(actual.isRegistered());
assertTrue(actual.isUnregistering());
}
@Test
void testRemoveInstanceForRedo() {
ConcurrentMap<String, InstanceRedoData> registeredInstances = getInstanceRedoDataMap();
assertTrue(registeredInstances.isEmpty());
redoService.cacheInstanceForRedo(SERVICE, GROUP, new Instance());
assertFalse(registeredInstances.isEmpty());
redoService.instanceDeregister(SERVICE, GROUP);
redoService.removeInstanceForRedo(SERVICE, GROUP);
assertTrue(registeredInstances.isEmpty());
}
@Test
void testFindInstanceRedoData() {
redoService.cacheInstanceForRedo(SERVICE, GROUP, new Instance());
assertFalse(redoService.findInstanceRedoData().isEmpty());
redoService.instanceRegistered(SERVICE, GROUP);
assertTrue(redoService.findInstanceRedoData().isEmpty());
redoService.instanceDeregister(SERVICE, GROUP);
assertFalse(redoService.findInstanceRedoData().isEmpty());
}
@SuppressWarnings("all")
private ConcurrentMap<String, InstanceRedoData> getInstanceRedoDataMap() {
return (ConcurrentMap<String, InstanceRedoData>) ReflectUtils.getFieldValue(redoService, "registeredInstances");
}
@Test
void testCacheSubscriberForRedo() {
ConcurrentMap<String, SubscriberRedoData> subscribes = getSubscriberRedoDataMap();
assertTrue(subscribes.isEmpty());
redoService.cacheSubscriberForRedo(SERVICE, GROUP, CLUSTER);
assertFalse(subscribes.isEmpty());
SubscriberRedoData actual = subscribes.entrySet().iterator().next().getValue();
assertEquals(SERVICE, actual.getServiceName());
assertEquals(GROUP, actual.getGroupName());
assertEquals(CLUSTER, actual.get());
assertFalse(actual.isRegistered());
assertFalse(actual.isUnregistering());
}
@Test
void testSubscriberRegistered() {
ConcurrentMap<String, SubscriberRedoData> subscribes = getSubscriberRedoDataMap();
redoService.cacheSubscriberForRedo(SERVICE, GROUP, CLUSTER);
redoService.subscriberRegistered(SERVICE, GROUP, CLUSTER);
SubscriberRedoData actual = subscribes.entrySet().iterator().next().getValue();
assertTrue(actual.isRegistered());
}
@Test
void testSubscriberDeregister() {
ConcurrentMap<String, SubscriberRedoData> subscribes = getSubscriberRedoDataMap();
redoService.cacheSubscriberForRedo(SERVICE, GROUP, CLUSTER);
redoService.subscriberDeregister(SERVICE, GROUP, CLUSTER);
SubscriberRedoData actual = subscribes.entrySet().iterator().next().getValue();
assertTrue(actual.isUnregistering());
}
@Test
void testIsSubscriberRegistered() {
assertFalse(redoService.isSubscriberRegistered(SERVICE, GROUP, CLUSTER));
redoService.cacheSubscriberForRedo(SERVICE, GROUP, CLUSTER);
redoService.subscriberRegistered(SERVICE, GROUP, CLUSTER);
assertTrue(redoService.isSubscriberRegistered(SERVICE, GROUP, CLUSTER));
}
@Test
void testRemoveSubscriberForRedo() {
ConcurrentMap<String, SubscriberRedoData> subscribes = getSubscriberRedoDataMap();
assertTrue(subscribes.isEmpty());
redoService.cacheSubscriberForRedo(SERVICE, GROUP, CLUSTER);
assertFalse(subscribes.isEmpty());
redoService.subscriberDeregister(SERVICE, GROUP, CLUSTER);
redoService.removeSubscriberForRedo(SERVICE, GROUP, CLUSTER);
assertTrue(subscribes.isEmpty());
}
@Test
void testFindSubscriberRedoData() {
redoService.cacheSubscriberForRedo(SERVICE, GROUP, CLUSTER);
assertFalse(redoService.findSubscriberRedoData().isEmpty());
redoService.subscriberRegistered(SERVICE, GROUP, CLUSTER);
assertTrue(redoService.findSubscriberRedoData().isEmpty());
redoService.subscriberDeregister(SERVICE, GROUP, CLUSTER);
assertFalse(redoService.findSubscriberRedoData().isEmpty());
}
@SuppressWarnings("all")
private ConcurrentMap<String, SubscriberRedoData> getSubscriberRedoDataMap() {
return (ConcurrentMap<String, SubscriberRedoData>) ReflectUtils.getFieldValue(redoService, "subscribes");
}
}
|
NamingGrpcRedoServiceTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/sql/ast/NativeParameterMarkerStrategyTests.java
|
{
"start": 1908,
"end": 1976
}
|
enum ____ {
JDBC,
ORDINAL,
NAMED
}
public static
|
ParameterStyle
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/customexceptions/ImplClassExceptionMapperTest.java
|
{
"start": 1755,
"end": 2144
}
|
class ____ {
@ServerExceptionMapper
public Response handleThrowable(RuntimeException t) {
return Response.status(417).build();
}
@GET
@Path("error")
@Produces("text/plain")
public String throwsException() {
throw removeStackTrace(new RuntimeException());
}
}
public static
|
DefaultCustomResource
|
java
|
apache__commons-lang
|
src/test/java/org/apache/commons/lang3/NotImplementedExceptionTest.java
|
{
"start": 1108,
"end": 2785
}
|
class ____ extends AbstractLangTest {
private void assertCorrect(final String assertMessage, final NotImplementedException nie, final String message, final Throwable nested, final String code) {
assertNotNull(nie, assertMessage + ": target is null");
assertEquals(message, nie.getMessage(), assertMessage + ": Message not equal");
assertEquals(nested, nie.getCause(), assertMessage + ": Nested throwable not equal");
assertEquals(code, nie.getCode(), assertMessage + ": Code not equal");
}
@Test
void testConstructors() {
final Throwable nested = new RuntimeException();
final String message = "Not Implemented";
final String code = "CODE";
NotImplementedException nie = new NotImplementedException(message);
assertCorrect("Issue in (String)", nie, message, null, null);
nie = new NotImplementedException(nested);
assertCorrect("Issue in (Throwable)", nie, nested.toString(), nested, null);
nie = new NotImplementedException(message, nested);
assertCorrect("Issue in (String, Throwable)", nie, message, nested, null);
nie = new NotImplementedException(message, code);
assertCorrect("Issue in (String, String)", nie, message, null, code);
nie = new NotImplementedException(nested, code);
assertCorrect("Issue in (Throwable, String)", nie, nested.toString(), nested, code);
nie = new NotImplementedException(message, nested, code);
assertCorrect("Issue in (String, Throwable, String)", nie, message, nested, code);
assertNull(new NotImplementedException().getCode());
}
}
|
NotImplementedExceptionTest
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/audit/ITestAuditManagerDisabled.java
|
{
"start": 1454,
"end": 2574
}
|
class ____ extends AbstractS3ACostTest {
@Override
public Configuration createConfiguration() {
Configuration conf = super.createConfiguration();
resetAuditOptions(conf);
conf.setBoolean(AUDIT_ENABLED, false);
return conf;
}
/**
* Verify that the auditor is the no-op auditor if auditing is disabled.
*/
@Test
public void testAuditorDisabled() {
final S3AFileSystem fs = getFileSystem();
final AuditManagerS3A auditManager = fs.getAuditManager();
Assertions.assertThat(auditManager)
.isInstanceOf(NoopAuditManagerS3A.class);
}
/**
* All the audit spans are the no-op span.
*/
@Test
public void testAuditSpansAreAllTheSame() throws Throwable {
final S3AFileSystem fs = getFileSystem();
final AuditSpanS3A span1 = fs.createSpan("span1", null, null);
final AuditSpanS3A span2 = fs.createSpan("span2", null, null);
Assertions.assertThat(span1)
.describedAs("audit span 1")
.isSameAs(NOOP_SPAN);
Assertions.assertThat(span2)
.describedAs("audit span 2")
.isSameAs(span1);
}
}
|
ITestAuditManagerDisabled
|
java
|
quarkusio__quarkus
|
extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/OpenTelemetryUtil.java
|
{
"start": 414,
"end": 4219
}
|
class ____ {
public static final String TRACE_ID = "traceId";
public static final String SPAN_ID = "spanId";
public static final String SAMPLED = "sampled";
public static final String PARENT_ID = "parentId";
private OpenTelemetryUtil() {
}
/**
* Converts a list of "key=value" pairs into a map.
* Empty entries will be removed.
* In case of duplicate keys, the latest takes precedence.
*
* @param headers nullable list of "key=value" pairs
* @return non-null map of key-value pairs
*/
public static Map<String, String> convertKeyValueListToMap(List<String> headers) {
if (headers == null) {
return Collections.emptyMap();
}
Map<String, String> result = new LinkedHashMap<>();
for (String header : headers) {
if (header.isEmpty()) {
continue;
}
String[] parts = header.split("=", 2);
String key = parts[0].trim();
String value = parts[1].trim();
result.put(key, value);
}
return result;
}
/**
* Sets MDC data by using the current span from the context.
* <p>
* This method is in the hot path and was optimized to not use getSpanData()
*
* @param context opentelemetry context
* @param vertxContext vertx context
*/
public static void setMDCData(Context context, io.vertx.core.Context vertxContext) {
if (context == null) {
return;
}
Span span = Span.fromContextOrNull(context);
if (span != null) {
SpanContext spanContext = span.getSpanContext();
VertxMDC.INSTANCE.put(SPAN_ID, spanContext.getSpanId(), vertxContext);
VertxMDC.INSTANCE.put(TRACE_ID, spanContext.getTraceId(), vertxContext);
VertxMDC.INSTANCE.put(SAMPLED, Boolean.toString(spanContext.isSampled()), vertxContext);
if (span instanceof ReadableSpan) {
SpanContext parentSpanContext = ((ReadableSpan) span).getParentSpanContext();
if (parentSpanContext != null && parentSpanContext.isValid()) {
VertxMDC.INSTANCE.put(PARENT_ID, parentSpanContext.getSpanId(), vertxContext);
}
}
}
}
/**
* Gets current span data from the MDC context.
*
* @param context opentelemetry context
*/
public static Map<String, String> getSpanData(Context context) {
if (context == null) {
return Collections.emptyMap();
}
Span span = Span.fromContextOrNull(context);
Map<String, String> spanData = new HashMap<>(4, 1f);
if (span != null) {
SpanContext spanContext = span.getSpanContext();
spanData.put(SPAN_ID, spanContext.getSpanId());
spanData.put(TRACE_ID, spanContext.getTraceId());
spanData.put(SAMPLED, Boolean.toString(spanContext.isSampled()));
if (span instanceof ReadableSpan) {
SpanContext parentSpanContext = ((ReadableSpan) span).getParentSpanContext();
if (parentSpanContext != null && parentSpanContext.isValid()) {
spanData.put(PARENT_ID, parentSpanContext.getSpanId());
}
}
}
return spanData;
}
/**
* Clears MDC data related to OpenTelemetry
*
* @param vertxContext vertx context
*/
public static void clearMDCData(io.vertx.core.Context vertxContext) {
VertxMDC vertxMDC = VertxMDC.INSTANCE;
vertxMDC.remove(TRACE_ID, vertxContext);
vertxMDC.remove(SPAN_ID, vertxContext);
vertxMDC.remove(PARENT_ID, vertxContext);
vertxMDC.remove(SAMPLED, vertxContext);
}
}
|
OpenTelemetryUtil
|
java
|
google__dagger
|
javatests/dagger/functional/subcomponent/repeat/ParentComponent.java
|
{
"start": 1065,
"end": 1180
}
|
interface ____ {
Builder repeatedModule(RepeatedModule repeatedModule);
ParentComponent build();
}
}
|
Builder
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/DataGeneratorForTest.java
|
{
"start": 2277,
"end": 18763
}
|
class ____ loaded from classpath
conf.set(YarnConfiguration.FLOW_RUN_COPROCESSOR_JAR_HDFS_LOCATION, " ");
// now create all tables
HBaseTimelineSchemaCreator.createAllTables(conf, false);
}
public static void loadApps(HBaseTestingUtility util, long ts)
throws IOException {
TimelineEntities te = new TimelineEntities();
TimelineEntity entity = new TimelineEntity();
String id = "application_1111111111_2222";
entity.setId(id);
entity.setType(TimelineEntityType.YARN_APPLICATION.toString());
Long cTime = 1425016502000L;
entity.setCreatedTime(cTime);
// add the info map in Timeline Entity
entity.addInfo(getInfoMap3());
// add the isRelatedToEntity info
Set<String> isRelatedToSet = new HashSet<>();
isRelatedToSet.add("relatedto1");
Map<String, Set<String>> isRelatedTo = new HashMap<>();
isRelatedTo.put("task", isRelatedToSet);
entity.setIsRelatedToEntities(isRelatedTo);
// add the relatesTo info
Set<String> relatesToSet = new HashSet<>();
relatesToSet.add("relatesto1");
relatesToSet.add("relatesto3");
Map<String, Set<String>> relatesTo = new HashMap<>();
relatesTo.put("container", relatesToSet);
Set<String> relatesToSet11 = new HashSet<>();
relatesToSet11.add("relatesto4");
relatesTo.put("container1", relatesToSet11);
entity.setRelatesToEntities(relatesTo);
// add some config entries
Map<String, String> conf = new HashMap<>();
conf.put("config_param1", "value1");
conf.put("config_param2", "value2");
conf.put("cfg_param1", "value3");
entity.addConfigs(conf);
// add metrics
Set<TimelineMetric> metrics = new HashSet<>();
metrics.add(getMetric4(ts));
TimelineMetric m12 = new TimelineMetric();
m12.setId("MAP1_BYTES");
m12.addValue(ts, 50);
metrics.add(m12);
entity.addMetrics(metrics);
entity.addEvent(addStartEvent(ts));
te.addEntity(entity);
TimelineEntities te1 = new TimelineEntities();
TimelineEntity entity1 = new TimelineEntity();
String id1 = "application_1111111111_3333";
entity1.setId(id1);
entity1.setType(TimelineEntityType.YARN_APPLICATION.toString());
entity1.setCreatedTime(cTime + 20L);
// add the info map in Timeline Entity
entity1.addInfo(getInfoMap4());
// add the isRelatedToEntity info
Set<String> isRelatedToSet1 = new HashSet<>();
isRelatedToSet1.add("relatedto3");
isRelatedToSet1.add("relatedto5");
Map<String, Set<String>> isRelatedTo1 = new HashMap<>();
isRelatedTo1.put("task1", isRelatedToSet1);
Set<String> isRelatedToSet11 = new HashSet<>();
isRelatedToSet11.add("relatedto4");
isRelatedTo1.put("task2", isRelatedToSet11);
entity1.setIsRelatedToEntities(isRelatedTo1);
// add the relatesTo info
Set<String> relatesToSet1 = new HashSet<>();
relatesToSet1.add("relatesto1");
relatesToSet1.add("relatesto2");
Map<String, Set<String>> relatesTo1 = new HashMap<>();
relatesTo1.put("container", relatesToSet1);
entity1.setRelatesToEntities(relatesTo1);
// add some config entries
Map<String, String> conf1 = new HashMap<>();
conf1.put("cfg_param1", "value1");
conf1.put("cfg_param2", "value2");
entity1.addConfigs(conf1);
// add metrics
entity1.addMetrics(getMetrics4(ts));
TimelineEvent event11 = new TimelineEvent();
event11.setId("end_event");
event11.setTimestamp(ts);
entity1.addEvent(event11);
TimelineEvent event12 = new TimelineEvent();
event12.setId("update_event");
event12.setTimestamp(ts - 10);
entity1.addEvent(event12);
te1.addEntity(entity1);
TimelineEntities te2 = new TimelineEntities();
te2.addEntity(getEntity4(cTime, ts));
HBaseTimelineWriterImpl hbi = null;
try {
hbi = new HBaseTimelineWriterImpl();
hbi.init(util.getConfiguration());
hbi.start();
UserGroupInformation remoteUser =
UserGroupInformation.createRemoteUser("user1");
hbi.write(
new TimelineCollectorContext("cluster1", "user1", "some_flow_name",
"AB7822C10F1111", 1002345678919L, "application_1111111111_2222"),
te, remoteUser);
hbi.write(
new TimelineCollectorContext("cluster1", "user1", "some_flow_name",
"AB7822C10F1111", 1002345678919L, "application_1111111111_3333"),
te1, remoteUser);
hbi.write(
new TimelineCollectorContext("cluster1", "user1", "some_flow_name",
"AB7822C10F1111", 1002345678919L, "application_1111111111_4444"),
te2, remoteUser);
hbi.stop();
} finally {
if (hbi != null) {
hbi.stop();
hbi.close();
}
}
}
private static Set<TimelineMetric> getMetrics4(long ts) {
Set<TimelineMetric> metrics1 = new HashSet<>();
TimelineMetric m2 = new TimelineMetric();
m2.setId("MAP1_SLOT_MILLIS");
Map<Long, Number> metricValues1 = new HashMap<>();
metricValues1.put(ts - 120000, 100000000);
metricValues1.put(ts - 100000, 200000000);
metricValues1.put(ts - 80000, 300000000);
metricValues1.put(ts - 60000, 400000000);
metricValues1.put(ts - 40000, 50000000000L);
metricValues1.put(ts - 20000, 60000000000L);
m2.setType(Type.TIME_SERIES);
m2.setValues(metricValues1);
metrics1.add(m2);
return metrics1;
}
private static TimelineEntity getEntity4(long cTime, long ts) {
TimelineEntity entity2 = new TimelineEntity();
String id2 = "application_1111111111_4444";
entity2.setId(id2);
entity2.setType(TimelineEntityType.YARN_APPLICATION.toString());
entity2.setCreatedTime(cTime + 40L);
TimelineEvent event21 = new TimelineEvent();
event21.setId("update_event");
event21.setTimestamp(ts - 20);
entity2.addEvent(event21);
Set<String> isRelatedToSet2 = new HashSet<String>();
isRelatedToSet2.add("relatedto3");
Map<String, Set<String>> isRelatedTo2 = new HashMap<>();
isRelatedTo2.put("task1", isRelatedToSet2);
entity2.setIsRelatedToEntities(isRelatedTo2);
Map<String, Set<String>> relatesTo3 = new HashMap<>();
Set<String> relatesToSet14 = new HashSet<String>();
relatesToSet14.add("relatesto7");
relatesTo3.put("container2", relatesToSet14);
entity2.setRelatesToEntities(relatesTo3);
return entity2;
}
private static Map<String, Object> getInfoMap4() {
Map<String, Object> infoMap1 = new HashMap<>();
infoMap1.put("infoMapKey1", "infoMapValue1");
infoMap1.put("infoMapKey2", 10);
return infoMap1;
}
private static TimelineMetric getMetric4(long ts) {
TimelineMetric m1 = new TimelineMetric();
m1.setId("MAP_SLOT_MILLIS");
Map<Long, Number> metricValues = new HashMap<>();
metricValues.put(ts - 120000, 100000000);
metricValues.put(ts - 100000, 200000000);
metricValues.put(ts - 80000, 300000000);
metricValues.put(ts - 60000, 400000000);
metricValues.put(ts - 40000, 50000000000L);
metricValues.put(ts - 20000, 60000000000L);
m1.setType(Type.TIME_SERIES);
m1.setValues(metricValues);
return m1;
}
private static Map<String, Object> getInfoMap3() {
Map<String, Object> infoMap = new HashMap<>();
infoMap.put("infoMapKey1", "infoMapValue2");
infoMap.put("infoMapKey2", 20);
infoMap.put("infoMapKey3", 85.85);
return infoMap;
}
private static Map<String, Object> getInfoMap1() {
Map<String, Object> infoMap = new HashMap<>();
infoMap.put("infoMapKey1", "infoMapValue2");
infoMap.put("infoMapKey2", 20);
infoMap.put("infoMapKey3", 71.4);
return infoMap;
}
private static Map<String, Set<String>> getRelatesTo1() {
Set<String> relatesToSet = new HashSet<String>();
relatesToSet.add("relatesto1");
relatesToSet.add("relatesto3");
Map<String, Set<String>> relatesTo = new HashMap<>();
relatesTo.put("container", relatesToSet);
Set<String> relatesToSet11 = new HashSet<>();
relatesToSet11.add("relatesto4");
relatesTo.put("container1", relatesToSet11);
return relatesTo;
}
private static Map<String, String> getConfig1() {
Map<String, String> conf = new HashMap<>();
conf.put("config_param1", "value1");
conf.put("config_param2", "value2");
conf.put("cfg_param1", "value3");
return conf;
}
private static Map<String, String> getConfig2() {
Map<String, String> conf1 = new HashMap<>();
conf1.put("cfg_param1", "value1");
conf1.put("cfg_param2", "value2");
return conf1;
}
private static Map<String, Object> getInfoMap2() {
Map<String, Object> infoMap1 = new HashMap<>();
infoMap1.put("infoMapKey1", "infoMapValue1");
infoMap1.put("infoMapKey2", 10);
return infoMap1;
}
private static Map<String, Set<String>> getIsRelatedTo1() {
Set<String> isRelatedToSet = new HashSet<>();
isRelatedToSet.add("relatedto1");
Map<String, Set<String>> isRelatedTo = new HashMap<>();
isRelatedTo.put("task", isRelatedToSet);
return isRelatedTo;
}
private static Map<Long, Number> getMetricValues1(long ts) {
Map<Long, Number> metricValues = new HashMap<>();
metricValues.put(ts - 120000, 100000000);
metricValues.put(ts - 100000, 200000000);
metricValues.put(ts - 80000, 300000000);
metricValues.put(ts - 60000, 400000000);
metricValues.put(ts - 40000, 50000000000L);
metricValues.put(ts - 20000, 70000000000L);
return metricValues;
}
public static void loadEntities(HBaseTestingUtility util, long ts)
throws IOException {
TimelineEntities te = new TimelineEntities();
TimelineEntity entity = new TimelineEntity();
String id = "hello";
String type = "world";
entity.setId(id);
entity.setType(type);
Long cTime = 1425016502000L;
entity.setCreatedTime(cTime);
// add the info map in Timeline Entity
entity.addInfo(getInfoMap1());
// add the isRelatedToEntity info
entity.setIsRelatedToEntities(getIsRelatedTo1());
// add the relatesTo info
entity.setRelatesToEntities(getRelatesTo1());
// add some config entries
entity.addConfigs(getConfig1());
// add metrics
Set<TimelineMetric> metrics = new HashSet<>();
TimelineMetric m1 = new TimelineMetric();
m1.setId("MAP_SLOT_MILLIS");
m1.setType(Type.TIME_SERIES);
m1.setValues(getMetricValues1(ts));
metrics.add(m1);
TimelineMetric m12 = new TimelineMetric();
m12.setId("MAP1_BYTES");
m12.addValue(ts, 50);
metrics.add(m12);
entity.addMetrics(metrics);
entity.addEvent(addStartEvent(ts));
te.addEntity(entity);
TimelineEntity entity1 = new TimelineEntity();
String id1 = "hello1";
entity1.setId(id1);
entity1.setType(type);
entity1.setCreatedTime(cTime + 20L);
// add the info map in Timeline Entity
entity1.addInfo(getInfoMap2());
// add event.
TimelineEvent event11 = new TimelineEvent();
event11.setId("end_event");
event11.setTimestamp(ts);
entity1.addEvent(event11);
TimelineEvent event12 = new TimelineEvent();
event12.setId("update_event");
event12.setTimestamp(ts - 10);
entity1.addEvent(event12);
// add the isRelatedToEntity info
entity1.setIsRelatedToEntities(getIsRelatedTo2());
// add the relatesTo info
Set<String> relatesToSet1 = new HashSet<String>();
relatesToSet1.add("relatesto1");
relatesToSet1.add("relatesto2");
Map<String, Set<String>> relatesTo1 = new HashMap<>();
relatesTo1.put("container", relatesToSet1);
entity1.setRelatesToEntities(relatesTo1);
// add some config entries
entity1.addConfigs(getConfig2());
// add metrics
Set<TimelineMetric> metrics1 = new HashSet<>();
TimelineMetric m2 = new TimelineMetric();
m2.setId("MAP1_SLOT_MILLIS");
m2.setType(Type.TIME_SERIES);
m2.setValues(getMetricValues2(ts));
metrics1.add(m2);
entity1.addMetrics(metrics1);
te.addEntity(entity1);
te.addEntity(getEntity2(type, cTime, ts));
// For listing types
for (int i = 0; i < 10; i++) {
TimelineEntity entity3 = new TimelineEntity();
String id3 = "typeTest" + i;
entity3.setId(id3);
StringBuilder typeName = new StringBuilder("newType");
for (int j = 0; j < (i % 3); j++) {
typeName.append(" ").append(j);
}
entity3.setType(typeName.toString());
entity3.setCreatedTime(cTime + 80L + i);
te.addEntity(entity3);
}
// Create app entity for app to flow table
TimelineEntities appTe1 = new TimelineEntities();
TimelineEntity entityApp1 = new TimelineEntity();
String appName1 = "application_1231111111_1111";
entityApp1.setId(appName1);
entityApp1.setType(TimelineEntityType.YARN_APPLICATION.toString());
entityApp1.setCreatedTime(cTime + 40L);
TimelineEvent appCreationEvent1 = new TimelineEvent();
appCreationEvent1.setId(ApplicationMetricsConstants.CREATED_EVENT_TYPE);
appCreationEvent1.setTimestamp(cTime);
entityApp1.addEvent(appCreationEvent1);
appTe1.addEntity(entityApp1);
TimelineEntities appTe2 = new TimelineEntities();
TimelineEntity entityApp2 = new TimelineEntity();
String appName2 = "application_1231111111_1112";
entityApp2.setId(appName2);
entityApp2.setType(TimelineEntityType.YARN_APPLICATION.toString());
entityApp2.setCreatedTime(cTime + 50L);
TimelineEvent appCreationEvent2 = new TimelineEvent();
appCreationEvent2.setId(ApplicationMetricsConstants.CREATED_EVENT_TYPE);
appCreationEvent2.setTimestamp(cTime);
entityApp2.addEvent(appCreationEvent2);
appTe2.addEntity(entityApp2);
HBaseTimelineWriterImpl hbi = null;
try {
hbi = new HBaseTimelineWriterImpl();
hbi.init(util.getConfiguration());
hbi.start();
UserGroupInformation user =
UserGroupInformation.createRemoteUser("user1");
TimelineCollectorContext context =
new TimelineCollectorContext("cluster1", "user1", "some_flow_name",
"AB7822C10F1111", 1002345678919L, appName1);
hbi.write(context, te, user);
hbi.write(context, appTe1, user);
context = new TimelineCollectorContext("cluster1", "user1",
"some_flow_name", "AB7822C10F1111", 1002345678919L, appName2);
hbi.write(context, te, user);
hbi.write(context, appTe2, user);
hbi.stop();
} finally {
if (hbi != null) {
hbi.stop();
hbi.close();
}
}
}
private static TimelineEntity getEntity2(String type, long cTime,
long ts) {
TimelineEntity entity2 = new TimelineEntity();
String id2 = "hello2";
entity2.setId(id2);
entity2.setType(type);
entity2.setCreatedTime(cTime + 40L);
TimelineEvent event21 = new TimelineEvent();
event21.setId("update_event");
event21.setTimestamp(ts - 20);
entity2.addEvent(event21);
Set<String> isRelatedToSet2 = new HashSet<>();
isRelatedToSet2.add("relatedto3");
Map<String, Set<String>> isRelatedTo2 = new HashMap<>();
isRelatedTo2.put("task1", isRelatedToSet2);
entity2.setIsRelatedToEntities(isRelatedTo2);
Map<String, Set<String>> relatesTo3 = new HashMap<>();
Set<String> relatesToSet14 = new HashSet<>();
relatesToSet14.add("relatesto7");
relatesTo3.put("container2", relatesToSet14);
entity2.setRelatesToEntities(relatesTo3);
return entity2;
}
private static TimelineEvent addStartEvent(long ts) {
TimelineEvent event = new TimelineEvent();
event.setId("start_event");
event.setTimestamp(ts);
return event;
}
private static Map<Long, Number> getMetricValues2(long ts1) {
Map<Long, Number> metricValues1 = new HashMap<>();
metricValues1.put(ts1 - 120000, 100000000);
metricValues1.put(ts1 - 100000, 200000000);
metricValues1.put(ts1 - 80000, 300000000);
metricValues1.put(ts1 - 60000, 400000000);
metricValues1.put(ts1 - 40000, 50000000000L);
metricValues1.put(ts1 - 20000, 60000000000L);
return metricValues1;
}
private static Map<String, Set<String>> getIsRelatedTo2() {
Set<String> isRelatedToSet1 = new HashSet<>();
isRelatedToSet1.add("relatedto3");
isRelatedToSet1.add("relatedto5");
Map<String, Set<String>> isRelatedTo1 = new HashMap<>();
isRelatedTo1.put("task1", isRelatedToSet1);
Set<String> isRelatedToSet11 = new HashSet<>();
isRelatedToSet11.add("relatedto4");
isRelatedTo1.put("task2", isRelatedToSet11);
return isRelatedTo1;
}
}
|
is
|
java
|
spring-projects__spring-boot
|
configuration-metadata/spring-boot-configuration-processor/src/test/java/org/springframework/boot/configurationsample/inheritance/ChildProperties.java
|
{
"start": 700,
"end": 1051
}
|
class ____ extends BaseProperties {
private long longValue;
private final NestInChild childNest = new NestInChild();
public long getLongValue() {
return this.longValue;
}
public void setLongValue(long longValue) {
this.longValue = longValue;
}
public NestInChild getChildNest() {
return this.childNest;
}
public static
|
ChildProperties
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/threadsafety/ImmutableCheckerTest.java
|
{
"start": 107663,
"end": 107784
}
|
interface ____<A, B> {
default void foo() {}
}
@Immutable
|
Function
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/beans/factory/xml/XmlBeanFactoryTests.java
|
{
"start": 72886,
"end": 74055
}
|
class ____ implements InitializingBean, DisposableBean {
public static boolean constructed;
public boolean afterPropertiesSetInvoked, initMethodInvoked, destroyed, customDestroyed;
public InitAndIB() {
constructed = true;
}
@Override
public void afterPropertiesSet() {
assertThat(this.initMethodInvoked).isFalse();
if (this.afterPropertiesSetInvoked) {
throw new IllegalStateException("Already initialized");
}
this.afterPropertiesSetInvoked = true;
}
/** Init method */
public void customInit() {
assertThat(this.afterPropertiesSetInvoked).isTrue();
if (this.initMethodInvoked) {
throw new IllegalStateException("Already customInitialized");
}
this.initMethodInvoked = true;
}
@Override
public void destroy() {
assertThat(this.customDestroyed).isFalse();
if (this.destroyed) {
throw new IllegalStateException("Already destroyed");
}
this.destroyed = true;
}
public void customDestroy() {
assertThat(this.destroyed).isTrue();
if (this.customDestroyed) {
throw new IllegalStateException("Already customDestroyed");
}
this.customDestroyed = true;
}
}
static
|
InitAndIB
|
java
|
spring-projects__spring-boot
|
module/spring-boot-security-oauth2-authorization-server/src/test/java/org/springframework/boot/security/oauth2/server/authorization/autoconfigure/servlet/OAuth2AuthorizationServerAutoConfigurationTests.java
|
{
"start": 9795,
"end": 10322
}
|
class ____ {
@Bean
RegisteredClientRepository registeredClientRepository() {
RegisteredClient registeredClient = RegisteredClient.withId("test")
.clientId("abcd")
.clientSecret("secret")
.clientAuthenticationMethod(ClientAuthenticationMethod.CLIENT_SECRET_BASIC)
.authorizationGrantType(AuthorizationGrantType.CLIENT_CREDENTIALS)
.scope("test")
.build();
return new InMemoryRegisteredClientRepository(registeredClient);
}
}
@Configuration
static
|
TestRegisteredClientRepositoryConfiguration
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageReconstructor.java
|
{
"start": 17974,
"end": 20903
}
|
class ____ implements SectionProcessor {
static final String NAME = "ErasureCodingSection";
@Override
public void process() throws IOException {
Node node = new Node();
loadNodeChildren(node, "ErasureCodingSection fields");
ErasureCodingSection.Builder builder = ErasureCodingSection.newBuilder();
while (true) {
ErasureCodingPolicyProto.Builder policyBuilder =
ErasureCodingPolicyProto.newBuilder();
Node ec = node.removeChild(ERASURE_CODING_SECTION_POLICY);
if (ec == null) {
break;
}
int policyId = ec.removeChildInt(ERASURE_CODING_SECTION_POLICY_ID);
policyBuilder.setId(policyId);
String name = ec.removeChildStr(ERASURE_CODING_SECTION_POLICY_NAME);
policyBuilder.setName(name);
Integer cellSize =
ec.removeChildInt(ERASURE_CODING_SECTION_POLICY_CELL_SIZE);
policyBuilder.setCellSize(cellSize);
String policyState =
ec.removeChildStr(ERASURE_CODING_SECTION_POLICY_STATE);
if (policyState != null) {
policyBuilder.setState(
HdfsProtos.ErasureCodingPolicyState.valueOf(policyState));
}
Node schema = ec.removeChild(ERASURE_CODING_SECTION_SCHEMA);
Preconditions.checkNotNull(schema);
ECSchemaProto.Builder schemaBuilder = ECSchemaProto.newBuilder();
String codecName =
schema.removeChildStr(ERASURE_CODING_SECTION_SCHEMA_CODEC_NAME);
schemaBuilder.setCodecName(codecName);
Integer dataUnits =
schema.removeChildInt(ERASURE_CODING_SECTION_SCHEMA_DATA_UNITS);
schemaBuilder.setDataUnits(dataUnits);
Integer parityUnits = schema.
removeChildInt(ERASURE_CODING_SECTION_SCHEMA_PARITY_UNITS);
schemaBuilder.setParityUnits(parityUnits);
Node options = schema
.removeChild(ERASURE_CODING_SECTION_SCHEMA_OPTIONS);
if (options != null) {
while (true) {
Node option =
options.removeChild(ERASURE_CODING_SECTION_SCHEMA_OPTION);
if (option == null) {
break;
}
String key = option
.removeChildStr(ERASURE_CODING_SECTION_SCHEMA_OPTION_KEY);
String value = option
.removeChildStr(ERASURE_CODING_SECTION_SCHEMA_OPTION_VALUE);
schemaBuilder.addOptions(HdfsProtos.ECSchemaOptionEntryProto
.newBuilder().setKey(key).setValue(value).build());
}
}
policyBuilder.setSchema(schemaBuilder.build());
builder.addPolicies(policyBuilder.build());
}
ErasureCodingSection section = builder.build();
section.writeDelimitedTo(out);
node.verifyNoRemainingKeys("ErasureCodingSection");
recordSectionLength(SectionName.ERASURE_CODING.name());
}
}
private
|
ErasureCodingSectionProcessor
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/clients/consumer/internals/events/ShareAcknowledgementEvent.java
|
{
"start": 1256,
"end": 2282
}
|
class ____ {
private final Map<TopicIdPartition, Acknowledgements> acknowledgementsMap;
private final boolean checkForRenewAcknowledgements;
private final Optional<Integer> acquisitionLockTimeoutMs;
public ShareAcknowledgementEvent(Map<TopicIdPartition, Acknowledgements> acknowledgementsMap,
boolean checkForRenewAcknowledgements,
Optional<Integer> acquisitionLockTimeoutMs) {
this.acknowledgementsMap = acknowledgementsMap;
this.checkForRenewAcknowledgements = checkForRenewAcknowledgements;
this.acquisitionLockTimeoutMs = acquisitionLockTimeoutMs;
}
public Map<TopicIdPartition, Acknowledgements> acknowledgementsMap() {
return acknowledgementsMap;
}
public boolean checkForRenewAcknowledgements() {
return checkForRenewAcknowledgements;
}
public Optional<Integer> acquisitionLockTimeoutMs() {
return acquisitionLockTimeoutMs;
}
}
|
ShareAcknowledgementEvent
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/scripting/support/RefreshableScriptTargetSourceTests.java
|
{
"start": 863,
"end": 1090
}
|
class ____ {
@Test
void createWithNullScriptSource() {
assertThatIllegalArgumentException().isThrownBy(() ->
new RefreshableScriptTargetSource(mock(), "a.bean", null, null, false));
}
}
|
RefreshableScriptTargetSourceTests
|
java
|
spring-projects__spring-framework
|
spring-websocket/src/main/java/org/springframework/web/socket/config/HandlersBeanDefinitionParser.java
|
{
"start": 5057,
"end": 5212
}
|
interface ____ {
void addMapping(Element mappingElement, ManagedMap<String, Object> map, ParserContext context);
}
private static
|
HandlerMappingStrategy
|
java
|
google__auto
|
value/src/test/java/com/google/auto/value/processor/AutoValueCompilationTest.java
|
{
"start": 121144,
"end": 121276
}
|
class ____<T> {",
" abstract String blam();",
"",
" @AutoValue.Builder",
" public
|
Baz
|
java
|
apache__camel
|
components/camel-file/src/main/java/org/apache/camel/component/file/GenericFileExclusiveReadLockStrategy.java
|
{
"start": 1841,
"end": 5932
}
|
interface ____<T> {
/**
* Allows custom logic to be run on startup preparing the strategy, such as removing old lock files etc.
*
* @param operations generic file operations
* @param endpoint the endpoint
* @throws Exception can be thrown in case of errors
*/
void prepareOnStartup(GenericFileOperations<T> operations, GenericFileEndpoint<T> endpoint) throws Exception;
/**
* Acquires exclusive read lock to the file.
*
* @param operations generic file operations
* @param file the file
* @param exchange the exchange
* @return <tt>true</tt> if read lock was acquired. If <tt>false</tt> Camel will skip the file and try it
* on the next poll
* @throws Exception can be thrown in case of errors
*/
boolean acquireExclusiveReadLock(GenericFileOperations<T> operations, GenericFile<T> file, Exchange exchange)
throws Exception;
/**
* Releases the exclusive read lock granted by the <tt>acquireExclusiveReadLock</tt> method due an abort operation
* (acquireExclusiveReadLock returned false).
*
* @param operations generic file operations
* @param file the file
* @param exchange the exchange
* @throws Exception can be thrown in case of errors
*/
void releaseExclusiveReadLockOnAbort(GenericFileOperations<T> operations, GenericFile<T> file, Exchange exchange)
throws Exception;
/**
* Releases the exclusive read lock granted by the <tt>acquireExclusiveReadLock</tt> method due a rollback operation
* (Exchange processing failed)
*
* @param operations generic file operations
* @param file the file
* @param exchange the exchange
* @throws Exception can be thrown in case of errors
*/
void releaseExclusiveReadLockOnRollback(GenericFileOperations<T> operations, GenericFile<T> file, Exchange exchange)
throws Exception;
/**
* Releases the exclusive read lock granted by the <tt>acquireExclusiveReadLock</tt> method due a commit operation
* (Exchange processing succeeded)
*
* @param operations generic file operations
* @param file the file
* @param exchange the exchange
* @throws Exception can be thrown in case of errors
*/
void releaseExclusiveReadLockOnCommit(GenericFileOperations<T> operations, GenericFile<T> file, Exchange exchange)
throws Exception;
/**
* Sets an optional timeout period.
* <p/>
* If the readlock could not be granted within the time period then the wait is stopped and the
* <tt>acquireExclusiveReadLock</tt> method returns <tt>false</tt>.
*
* @param timeout period in millis
*/
void setTimeout(long timeout);
/**
* Sets the check interval period.
* <p/>
* The check interval is used for sleeping between attempts to acquire read lock. Setting a high value allows to
* cater for <i>slow writes</i> in case the producer of the file is slow.
* <p/>
* The default period is 1000 millis.
*
* @param checkInterval interval in millis
*/
void setCheckInterval(long checkInterval);
/**
* Sets logging level used when a read lock could not be acquired.
* <p/>
* Logging level used when a read lock could not be acquired.
* <p/>
* The default logging level is WARN
*
* @param readLockLoggingLevel LoggingLevel
*/
void setReadLockLoggingLevel(LoggingLevel readLockLoggingLevel);
/**
* Sets whether marker file should be used or not.
*
* @param markerFile <tt>true</tt> to use marker files.
*/
void setMarkerFiler(boolean markerFile);
/**
* Sets whether orphan marker files should be deleted upon startup
*
* @param deleteOrphanLockFiles <tt>true</tt> to delete files, <tt>false</tt> to skip this check
*/
void setDeleteOrphanLockFiles(boolean deleteOrphanLockFiles);
}
|
GenericFileExclusiveReadLockStrategy
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/BulkRolesResponse.java
|
{
"start": 2710,
"end": 4562
}
|
class ____ implements ToXContentObject {
private final Exception cause;
private final String roleName;
private final DocWriteResponse.Result resultType;
private Item(String roleName, DocWriteResponse.Result resultType, Exception cause) {
this.roleName = roleName;
this.resultType = resultType;
this.cause = cause;
}
Item(StreamInput in) throws IOException {
roleName = in.readString();
resultType = DocWriteResponse.Result.readFrom(in);
cause = in.readException();
}
public Exception getCause() {
return cause;
}
public String getResultType() {
return resultType == null ? "errors" : resultType.getLowercase();
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (resultType == null) {
ElasticsearchException.generateThrowableXContent(builder, params, cause);
} else {
builder.value(roleName);
}
return builder;
}
public static Item success(String roleName, DocWriteResponse.Result result) {
return new Item(roleName, result, null);
}
public static Item failure(String roleName, Exception cause) {
return new Item(roleName, null, cause);
}
public String getRoleName() {
return roleName;
}
public boolean isFailed() {
return cause != null;
}
public String getFailureMessage() {
if (cause != null) {
return cause.getMessage();
}
return null;
}
}
public List<Item> getItems() {
return items;
}
}
|
Item
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/context/annotation/ConfigurationWithFactoryBeanAndParametersTests.java
|
{
"start": 1678,
"end": 1775
}
|
class ____ {
Foo foo;
@Autowired
public Bar(Foo foo) {
this.foo = foo;
}
}
static
|
Bar
|
java
|
elastic__elasticsearch
|
x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionModel.java
|
{
"start": 1358,
"end": 4989
}
|
class ____ extends OpenAiModel {
public static OpenAiChatCompletionModel of(OpenAiChatCompletionModel model, Map<String, Object> taskSettings) {
if (taskSettings == null || taskSettings.isEmpty()) {
return model;
}
return new OpenAiChatCompletionModel(model, model.getTaskSettings().updatedTaskSettings(taskSettings));
}
public static OpenAiChatCompletionModel of(OpenAiChatCompletionModel model, UnifiedCompletionRequest request) {
var originalModelServiceSettings = model.getServiceSettings();
var overriddenServiceSettings = new OpenAiChatCompletionServiceSettings(
Objects.requireNonNullElse(request.model(), originalModelServiceSettings.modelId()),
originalModelServiceSettings.uri(),
originalModelServiceSettings.organizationId(),
originalModelServiceSettings.maxInputTokens(),
originalModelServiceSettings.rateLimitSettings()
);
return new OpenAiChatCompletionModel(
model.getInferenceEntityId(),
model.getTaskType(),
model.getConfigurations().getService(),
overriddenServiceSettings,
model.getTaskSettings(),
model.getSecretSettings()
);
}
public OpenAiChatCompletionModel(
String inferenceEntityId,
TaskType taskType,
String service,
Map<String, Object> serviceSettings,
Map<String, Object> taskSettings,
@Nullable Map<String, Object> secrets,
ConfigurationParseContext context
) {
this(
inferenceEntityId,
taskType,
service,
OpenAiChatCompletionServiceSettings.fromMap(serviceSettings, context),
new OpenAiChatCompletionTaskSettings(taskSettings),
DefaultSecretSettings.fromMap(secrets)
);
}
OpenAiChatCompletionModel(
String modelId,
TaskType taskType,
String service,
OpenAiChatCompletionServiceSettings serviceSettings,
OpenAiChatCompletionTaskSettings taskSettings,
@Nullable DefaultSecretSettings secrets
) {
super(
new ModelConfigurations(modelId, taskType, service, serviceSettings, taskSettings),
new ModelSecrets(secrets),
serviceSettings,
secrets,
buildUri(serviceSettings.uri(), OpenAiService.NAME, OpenAiChatCompletionModel::buildDefaultUri)
);
}
public static URI buildDefaultUri() throws URISyntaxException {
return new URIBuilder().setScheme("https")
.setHost(OpenAiUtils.HOST)
.setPathSegments(OpenAiUtils.VERSION_1, OpenAiUtils.CHAT_PATH, OpenAiUtils.COMPLETIONS_PATH)
.build();
}
private OpenAiChatCompletionModel(OpenAiChatCompletionModel originalModel, OpenAiChatCompletionTaskSettings taskSettings) {
super(originalModel, taskSettings);
}
@Override
public OpenAiChatCompletionServiceSettings getServiceSettings() {
return (OpenAiChatCompletionServiceSettings) super.getServiceSettings();
}
@Override
public OpenAiChatCompletionTaskSettings getTaskSettings() {
return (OpenAiChatCompletionTaskSettings) super.getTaskSettings();
}
@Override
public DefaultSecretSettings getSecretSettings() {
return (DefaultSecretSettings) super.getSecretSettings();
}
@Override
public ExecutableAction accept(OpenAiActionVisitor creator, Map<String, Object> taskSettings) {
return creator.create(this, taskSettings);
}
}
|
OpenAiChatCompletionModel
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.