language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
netty__netty
|
codec-base/src/main/java/io/netty/handler/codec/serialization/CachingClassResolver.java
|
{
"start": 706,
"end": 1431
}
|
class ____ implements ClassResolver {
private final Map<String, Class<?>> classCache;
private final ClassResolver delegate;
CachingClassResolver(ClassResolver delegate, Map<String, Class<?>> classCache) {
this.delegate = delegate;
this.classCache = classCache;
}
@Override
public Class<?> resolve(String className) throws ClassNotFoundException {
// Query the cache first.
Class<?> clazz;
clazz = classCache.get(className);
if (clazz != null) {
return clazz;
}
// And then try to load.
clazz = delegate.resolve(className);
classCache.put(className, clazz);
return clazz;
}
}
|
CachingClassResolver
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/ConflictingEntryPointsTest.java
|
{
"start": 1467,
"end": 1674
}
|
interface ____ {",
" Long foo();",
"}");
Source base2 =
CompilerTests.javaSource(
"test.Base2", //
"package test;",
"",
"
|
Base1
|
java
|
quarkusio__quarkus
|
extensions/websockets-next/deployment/src/test/java/io/quarkus/websockets/next/test/sessioncontext/SessionContextTest.java
|
{
"start": 784,
"end": 2416
}
|
class ____ {
@RegisterExtension
public static final QuarkusUnitTest test = new QuarkusUnitTest()
.withApplicationRoot(root -> {
root.addClasses(Append.class, WSClient.class, SessionScopedBean.class, SessionContextListener.class);
});
@Inject
Vertx vertx;
@TestHTTPResource("append")
URI appendUri;
@Inject
SessionContextListener listener;
@Test
void testSessionContext() throws InterruptedException {
WSClient client = WSClient.create(vertx).connect(appendUri);
client.send("foo");
client.send("bar");
client.send("baz");
client.waitForMessages(3);
assertEquals("foo", client.getMessages().get(0).toString());
assertEquals("foobar", client.getMessages().get(1).toString());
assertEquals("foobarbaz", client.getMessages().get(2).toString());
client.disconnect();
assertTrue(listener.destroyLatch.await(5, TimeUnit.SECONDS));
assertTrue(SessionScopedBean.DESTROYED.get());
assertEquals(3, listener.events.size());
assertEquals(listener.events.get(0).payload(), listener.events.get(1).payload());
assertEquals(listener.events.get(1).payload(), listener.events.get(2).payload());
assertTrue(listener.events.get(0).qualifiers().contains(Initialized.Literal.SESSION));
assertTrue(listener.events.get(1).qualifiers().contains(BeforeDestroyed.Literal.SESSION));
assertTrue(listener.events.get(2).qualifiers().contains(Destroyed.Literal.SESSION));
}
@WebSocket(path = "/append")
public static
|
SessionContextTest
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryFileManager.java
|
{
"start": 3906,
"end": 4341
}
|
enum ____ {
IN_INTERMEDIATE, IN_DONE, DELETED, MOVE_FAILED
};
private static String DONE_BEFORE_SERIAL_TAIL = JobHistoryUtils
.doneSubdirsBeforeSerialTail();
/**
* Maps between a serial number (generated based on jobId) and the timestamp
* component(s) to which it belongs. Facilitates jobId based searches. If a
* jobId is not found in this list - it will not be found.
*/
private static
|
HistoryInfoState
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/optimizer/OptimizerRules.java
|
{
"start": 77993,
"end": 78618
}
|
class ____<C extends Expression> extends Rule<LogicalPlan, LogicalPlan> {
private final Class<C> castType;
public PruneCast(Class<C> castType) {
this.castType = castType;
}
@Override
public final LogicalPlan apply(LogicalPlan plan) {
return rule(plan);
}
protected final LogicalPlan rule(LogicalPlan plan) {
// eliminate redundant casts
return plan.transformExpressionsUp(castType, this::maybePruneCast);
}
protected abstract Expression maybePruneCast(C cast);
}
public abstract static
|
PruneCast
|
java
|
ReactiveX__RxJava
|
src/main/java/io/reactivex/rxjava3/internal/operators/flowable/FlowableSequenceEqual.java
|
{
"start": 1152,
"end": 1984
}
|
class ____<T> extends Flowable<Boolean> {
final Publisher<? extends T> first;
final Publisher<? extends T> second;
final BiPredicate<? super T, ? super T> comparer;
final int prefetch;
public FlowableSequenceEqual(Publisher<? extends T> first, Publisher<? extends T> second,
BiPredicate<? super T, ? super T> comparer, int prefetch) {
this.first = first;
this.second = second;
this.comparer = comparer;
this.prefetch = prefetch;
}
@Override
public void subscribeActual(Subscriber<? super Boolean> s) {
EqualCoordinator<T> parent = new EqualCoordinator<>(s, prefetch, comparer);
s.onSubscribe(parent);
parent.subscribe(first, second);
}
/**
* Provides callbacks for the EqualSubscribers.
*/
|
FlowableSequenceEqual
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/results/ModelStoreResponseTests.java
|
{
"start": 563,
"end": 3060
}
|
class ____ extends AbstractBWCWireSerializationTestCase<ModelStoreResponse> {
public static ModelStoreResponse randomModelStoreResponse() {
return new ModelStoreResponse(
randomAlphaOfLength(10),
randomFrom(RestStatus.values()),
randomBoolean() ? null : new IllegalStateException("Test exception")
);
}
public void testFailed() {
{
var successResponse = new ModelStoreResponse("model_1", RestStatus.OK, null);
assertFalse(successResponse.failed());
}
{
var failedResponse = new ModelStoreResponse(
"model_2",
RestStatus.INTERNAL_SERVER_ERROR,
new IllegalStateException("Test failure")
);
assertTrue(failedResponse.failed());
}
{
var failedResponse = new ModelStoreResponse("model_2", RestStatus.OK, new IllegalStateException("Test failure"));
assertTrue(failedResponse.failed());
}
}
@Override
protected ModelStoreResponse mutateInstanceForVersion(ModelStoreResponse instance, TransportVersion version) {
return instance;
}
@Override
protected Writeable.Reader<ModelStoreResponse> instanceReader() {
return ModelStoreResponse::new;
}
@Override
protected ModelStoreResponse createTestInstance() {
return randomModelStoreResponse();
}
@Override
protected ModelStoreResponse mutateInstance(ModelStoreResponse instance) throws IOException {
int choice = randomIntBetween(0, 2);
return switch (choice) {
case 0 -> {
String newInferenceId = instance.inferenceId() + "_mutated";
yield new ModelStoreResponse(newInferenceId, instance.status(), instance.failureCause());
}
case 1 -> new ModelStoreResponse(
instance.inferenceId(),
randomValueOtherThan(instance.status(), () -> randomFrom(RestStatus.values())),
instance.failureCause()
);
case 2 -> {
Exception newFailureCause = instance.failureCause() == null ? new IllegalStateException("Mutated exception") : null;
yield new ModelStoreResponse(instance.inferenceId(), instance.status(), newFailureCause);
}
default -> throw new IllegalStateException("Unexpected value: " + choice);
};
}
}
|
ModelStoreResponseTests
|
java
|
quarkusio__quarkus
|
integration-tests/infinispan-cache/src/test/java/io/quarkus/it/cache/infinispan/InfinispanCacheClientTestCase.java
|
{
"start": 537,
"end": 2889
}
|
class ____ {
private static final String CITY = "Toulouse";
private static final String TODAY = "2020-12-20";
@Test
public void test() {
assertInvocations("0");
getSunriseTimeInvocations();
assertInvocations("1");
getSunriseTimeInvocations();
assertInvocations("1");
getAsyncSunriseTimeInvocations();
assertInvocations("1");
invalidate();
getSunriseTimeInvocations();
assertInvocations("2");
invalidateAll();
getSunriseTimeInvocations();
assertInvocations("3");
}
private void assertInvocations(String expectedInvocations) {
given()
.when()
.get("/rest-client/invocations")
.then()
.statusCode(200)
.body(equalTo(expectedInvocations));
}
private void getSunriseTimeInvocations() {
doGetSunriseTimeInvocations("/rest-client/time/{city}", true);
}
private void getAsyncSunriseTimeInvocations() {
doGetSunriseTimeInvocations("/rest-client/async/time/{city}", false);
}
private void doGetSunriseTimeInvocations(String path, Boolean blockingAllowed) {
Headers headers = given()
.queryParam("date", TODAY)
.when()
.get(path, CITY)
.then()
.statusCode(200)
.extract().headers();
assertEquals(headers.get("before").getValue(), headers.get("after").getValue());
assertEquals(blockingAllowed.toString(), headers.get("blockingAllowed").getValue());
}
private void invalidate() {
Headers headers = given()
.queryParam("date", TODAY)
.queryParam("notPartOfTheCacheKey", "notPartOfTheCacheKey")
.when()
.delete("/rest-client/invalidate/{city}", CITY)
.then()
.statusCode(204)
.extract().headers();
assertNotNull(headers.get("incoming").getValue());
assertEquals("false", headers.get("blockingAllowed").getValue());
}
private void invalidateAll() {
given()
.when()
.delete("/rest-client/invalidate")
.then()
.statusCode(204);
}
}
|
InfinispanCacheClientTestCase
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/runc/ImageTagToManifestPlugin.java
|
{
"start": 2828,
"end": 11310
}
|
class ____ extends AbstractService
implements RuncImageTagToManifestPlugin {
private Map<String, ImageManifest> manifestCache;
private ObjectMapper objMapper;
private AtomicReference<Map<String, String>> localImageToHashCache =
new AtomicReference<>(new HashMap<>());
private AtomicReference<Map<String, String>> hdfsImageToHashCache =
new AtomicReference<>(new HashMap<>());
private Configuration conf;
private ScheduledExecutorService exec;
private long hdfsModTime;
private long localModTime;
private String hdfsImageToHashFile;
private String manifestDir;
private String localImageTagToHashFile;
private static final Logger LOG = LoggerFactory.getLogger(ImageTagToManifestPlugin.class);
private static final int SHA256_HASH_LENGTH = 64;
private static final String ALPHA_NUMERIC = "[a-zA-Z0-9]+";
public ImageTagToManifestPlugin() {
super("ImageTagToManifestPluginService");
}
@Override
public ImageManifest getManifestFromImageTag(String imageTag)
throws IOException {
String hash = getHashFromImageTag(imageTag);
ImageManifest manifest = manifestCache.get(hash);
if (manifest != null) {
return manifest;
}
Path manifestPath = new Path(manifestDir + hash);
FileSystem fs = manifestPath.getFileSystem(conf);
FSDataInputStream input;
try {
input = fs.open(manifestPath);
} catch (IllegalArgumentException iae) {
throw new IOException("Manifest file is not a valid HDFS file: "
+ manifestPath.toString(), iae);
}
byte[] bytes = IOUtils.toByteArray(input);
manifest = objMapper.readValue(bytes, ImageManifest.class);
manifestCache.put(hash, manifest);
return manifest;
}
@Override
public String getHashFromImageTag(String imageTag) {
String hash;
Map<String, String> localImageToHashCacheMap = localImageToHashCache.get();
Map<String, String> hdfsImageToHashCacheMap = hdfsImageToHashCache.get();
// 1) Go to local file
// 2) Go to HDFS
// 3) Use tag as is/Assume tag is the hash
hash = localImageToHashCacheMap.get(imageTag);
if (hash == null) {
hash = hdfsImageToHashCacheMap.get(imageTag);
if (hash == null) {
hash = imageTag;
}
}
return hash;
}
protected BufferedReader getLocalImageToHashReader() throws IOException {
if (localImageTagToHashFile == null) {
LOG.debug("Did not load local image to hash file, " +
"file is null");
return null;
}
File imageTagToHashFile = new File(localImageTagToHashFile);
if(!imageTagToHashFile.exists()) {
LOG.debug("Did not load local image to hash file, " +
"file doesn't exist");
return null;
}
long newLocalModTime = imageTagToHashFile.lastModified();
if (newLocalModTime == localModTime) {
LOG.debug("Did not load local image to hash file, " +
"file is unmodified");
return null;
}
localModTime = newLocalModTime;
return new BufferedReader(new InputStreamReader(
new FileInputStream(imageTagToHashFile), StandardCharsets.UTF_8));
}
protected BufferedReader getHdfsImageToHashReader() throws IOException {
if (hdfsImageToHashFile == null) {
LOG.debug("Did not load hdfs image to hash file, " +
"file is null");
return null;
}
Path imageToHash = new Path(hdfsImageToHashFile);
FileSystem fs = imageToHash.getFileSystem(conf);
if (!fs.exists(imageToHash)) {
LOG.debug("Did not load hdfs image to hash file, " +
"file doesn't exist");
return null;
}
long newHdfsModTime = fs.getFileStatus(imageToHash).getModificationTime();
if (newHdfsModTime == hdfsModTime) {
LOG.debug("Did not load hdfs image to hash file, " +
"file is unmodified");
return null;
}
hdfsModTime = newHdfsModTime;
return new BufferedReader(new InputStreamReader(fs.open(imageToHash),
StandardCharsets.UTF_8));
}
/** You may specify multiple tags per hash all on the same line.
* Comments are allowed using #. Anything after this character will not
* be read
* Example file:
* foo/bar:current,fizz/gig:latest:123456789
* #this/line:wont,be:parsed:2378590895
* This will map both foo/bar:current and fizz/gig:latest to 123456789
*/
protected static Map<String, String> readImageToHashFile(
BufferedReader br) throws IOException {
if (br == null) {
return null;
}
String line;
Map<String, String> imageToHashCache = new HashMap<>();
while ((line = br.readLine()) != null) {
int index;
index = line.indexOf("#");
if (index == 0) {
continue;
} else if (index != -1) {
line = line.substring(0, index);
}
index = line.lastIndexOf(":");
if (index == -1) {
LOG.warn("Malformed imageTagToManifest entry: " + line);
continue;
}
String imageTags = line.substring(0, index);
String[] imageTagArray = imageTags.split(",");
String hash = line.substring(index + 1);
if (!hash.matches(ALPHA_NUMERIC) || hash.length() != SHA256_HASH_LENGTH) {
LOG.warn("Malformed image hash: " + hash);
continue;
}
for (String imageTag : imageTagArray) {
imageToHashCache.put(imageTag, hash);
}
}
return imageToHashCache;
}
public boolean loadImageToHashFiles() throws IOException {
boolean ret = false;
try (
BufferedReader localBr = getLocalImageToHashReader();
BufferedReader hdfsBr = getHdfsImageToHashReader()
) {
Map<String, String> localImageToHash = readImageToHashFile(localBr);
Map<String, String> hdfsImageToHash = readImageToHashFile(hdfsBr);
Map<String, String> tmpLocalImageToHash = localImageToHashCache.get();
Map<String, String> tmpHdfsImageToHash = hdfsImageToHashCache.get();
if (localImageToHash != null &&
!localImageToHash.equals(tmpLocalImageToHash)) {
localImageToHashCache.set(localImageToHash);
LOG.info("Reloaded local image tag to hash cache");
ret = true;
}
if (hdfsImageToHash != null &&
!hdfsImageToHash.equals(tmpHdfsImageToHash)) {
hdfsImageToHashCache.set(hdfsImageToHash);
LOG.info("Reloaded hdfs image tag to hash cache");
ret = true;
}
}
return ret;
}
@Override
protected void serviceInit(Configuration configuration) throws Exception {
super.serviceInit(configuration);
this.conf = configuration;
localImageTagToHashFile = conf.get(NM_LOCAL_RUNC_IMAGE_TAG_TO_HASH_FILE);
if (localImageTagToHashFile == null) {
LOG.debug("Failed to load local runC image to hash file. " +
"Config not set");
}
hdfsImageToHashFile = conf.get(NM_HDFS_RUNC_IMAGE_TAG_TO_HASH_FILE);
if (hdfsImageToHashFile == null) {
LOG.debug("Failed to load HDFS runC image to hash file. Config not set");
}
if(hdfsImageToHashFile == null && localImageTagToHashFile == null) {
LOG.warn("No valid image-tag-to-hash files");
}
manifestDir = conf.get(NM_RUNC_IMAGE_TOPLEVEL_DIR,
DEFAULT_NM_RUNC_IMAGE_TOPLEVEL_DIR) + "/manifests/";
int numManifestsToCache = conf.getInt(NM_RUNC_NUM_MANIFESTS_TO_CACHE,
DEFAULT_NUM_MANIFESTS_TO_CACHE);
this.objMapper = new ObjectMapper();
this.manifestCache = Collections.synchronizedMap(
new LRUCache(numManifestsToCache, 0.75f));
exec = HadoopExecutors.newScheduledThreadPool(1);
}
@Override
protected void serviceStart() throws Exception {
super.serviceStart();
if(!loadImageToHashFiles()) {
LOG.warn("Couldn't load any image-tag-to-hash-files");
}
int runcCacheRefreshInterval = conf.getInt(NM_RUNC_CACHE_REFRESH_INTERVAL,
DEFAULT_NM_RUNC_CACHE_REFRESH_INTERVAL);
exec = HadoopExecutors.newScheduledThreadPool(1);
exec.scheduleWithFixedDelay(
new Runnable() {
@Override
public void run() {
try {
loadImageToHashFiles();
} catch (Exception e) {
LOG.warn("runC cache refresh thread caught an exception: ", e);
}
}
}, runcCacheRefreshInterval, runcCacheRefreshInterval, TimeUnit.SECONDS);
}
@Override
protected void serviceStop() throws Exception {
super.serviceStop();
exec.shutdownNow();
}
private static
|
ImageTagToManifestPlugin
|
java
|
bumptech__glide
|
library/test/src/test/java/com/bumptech/glide/load/resource/bitmap/CenterCropTest.java
|
{
"start": 1357,
"end": 5344
}
|
class ____ {
@Rule public final KeyTester keyTester = new KeyTester();
@Mock private Resource<Bitmap> resource;
@Mock private BitmapPool pool;
@Mock private Transformation<Bitmap> transformation;
private CenterCrop centerCrop;
private int bitmapWidth;
private int bitmapHeight;
private Bitmap bitmap;
private Application context;
@Before
public void setUp() {
MockitoAnnotations.initMocks(this);
bitmapWidth = 100;
bitmapHeight = 100;
bitmap = Bitmap.createBitmap(bitmapWidth, bitmapHeight, Bitmap.Config.ARGB_8888);
when(resource.get()).thenReturn(bitmap);
when(pool.get(anyInt(), anyInt(), any(Bitmap.Config.class)))
.thenAnswer(new Util.CreateBitmap());
context = ApplicationProvider.getApplicationContext();
Glide.init(context, new GlideBuilder().setBitmapPool(pool));
centerCrop = new CenterCrop();
}
@After
public void tearDown() {
Glide.tearDown();
}
@Test
public void testDoesNotPutNullBitmapAcquiredFromPool() {
reset(pool);
when(pool.get(anyInt(), anyInt(), any(Bitmap.Config.class))).thenReturn(null);
centerCrop.transform(context, resource, 100, 100);
verify(pool, never()).put(any(Bitmap.class));
}
@Test
public void testReturnsGivenResourceIfMatchesSizeExactly() {
Resource<Bitmap> result = centerCrop.transform(context, resource, bitmapWidth, bitmapHeight);
assertEquals(resource, result);
}
@Test
public void testDoesNotRecycleGivenResourceIfMatchesSizeExactly() {
centerCrop.transform(context, resource, bitmapWidth, bitmapHeight);
verify(resource, never()).recycle();
}
@Test
public void testDoesNotRecycleGivenResource() {
centerCrop.transform(context, resource, 50, 50);
verify(resource, never()).recycle();
}
@Test
@Config(sdk = 19)
public void testAsksBitmapPoolForArgb8888IfInConfigIsNull() {
bitmap.setConfig(null);
centerCrop.transform(context, resource, 10, 10);
verify(pool).get(anyInt(), anyInt(), eq(Bitmap.Config.ARGB_8888));
verify(pool, never()).get(anyInt(), anyInt(), (Bitmap.Config) isNull());
}
@Test
public void testReturnsBitmapWithExactlyGivenDimensionsIfBitmapIsLargerThanTarget() {
int expectedWidth = 75;
int expectedHeight = 74;
for (int[] dimens :
new int[][] {new int[] {800, 200}, new int[] {450, 100}, new int[] {78, 78}}) {
Bitmap toTransform = Bitmap.createBitmap(dimens[0], dimens[1], Bitmap.Config.ARGB_4444);
when(resource.get()).thenReturn(toTransform);
Resource<Bitmap> result =
centerCrop.transform(context, resource, expectedWidth, expectedHeight);
Bitmap transformed = result.get();
assertEquals(expectedWidth, transformed.getWidth());
assertEquals(expectedHeight, transformed.getHeight());
}
}
@Test
public void testReturnsBitmapWithExactlyGivenDimensionsIfBitmapIsSmallerThanTarget() {
int expectedWidth = 100;
int expectedHeight = 100;
for (int[] dimens : new int[][] {new int[] {50, 90}, new int[] {150, 2}, new int[] {78, 78}}) {
Bitmap toTransform = Bitmap.createBitmap(dimens[0], dimens[1], Bitmap.Config.ARGB_4444);
when(resource.get()).thenReturn(toTransform);
Resource<Bitmap> result =
centerCrop.transform(context, resource, expectedWidth, expectedHeight);
Bitmap transformed = result.get();
assertEquals(expectedWidth, transformed.getWidth());
assertEquals(expectedHeight, transformed.getHeight());
}
}
@Test
public void testEquals() throws NoSuchAlgorithmException {
doAnswer(new Util.WriteDigest("other"))
.when(transformation)
.updateDiskCacheKey(any(MessageDigest.class));
keyTester
.addEquivalenceGroup(new CenterCrop(), new CenterCrop())
.addEquivalenceGroup(transformation)
.addRegressionTest(
new CenterCrop(), "68bd5819c42b37efbe7124bb851443a6388ee3e2e9034213da6eaa15381d3457")
.test();
}
}
|
CenterCropTest
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/filter/wall/StrictSyntaxCheckTest.java
|
{
"start": 168,
"end": 611
}
|
class ____ extends TestCase {
public void test_syntax() throws Exception {
assertFalse(WallUtils.isValidateMySql(//
"SELECT SELECT")); // 部分永真
}
public void test_syntax_1() throws Exception {
WallConfig config = new WallConfig();
config.setStrictSyntaxCheck(false);
assertTrue(WallUtils.isValidateMySql(//
"SELECT SELECT", config)); // 部分永真
}
}
|
StrictSyntaxCheckTest
|
java
|
google__gson
|
gson/src/test/java/com/google/gson/internal/ConstructorConstructorTest.java
|
{
"start": 1306,
"end": 1515
}
|
class ____ {
private ConstructorConstructor constructorConstructor =
new ConstructorConstructor(Collections.emptyMap(), true, Collections.emptyList());
private abstract static
|
ConstructorConstructorTest
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java
|
{
"start": 2214,
"end": 114139
}
|
class ____ extends ESTestCase {
private static final ZonedDateTime BOGUS_TIMESTAMP = ZonedDateTime.of(2016, 10, 23, 0, 0, 0, 0, ZoneOffset.UTC);
private IngestDocument document;
private static final String DOUBLE_ARRAY_FIELD = "double_array_field";
private static final String DOUBLE_DOUBLE_ARRAY_FIELD = "double_double_array";
@Before
public void setTestIngestDocument() {
Map<String, Object> document = new HashMap<>();
Map<String, Object> ingestMap = new HashMap<>();
ingestMap.put("timestamp", BOGUS_TIMESTAMP);
document.put("_ingest", ingestMap);
document.put("foo", "bar");
document.put("int", 123);
Map<String, Object> innerObject = new HashMap<>();
innerObject.put("buzz", "hello world");
innerObject.put("foo_null", null);
innerObject.put("1", "bar");
List<String> innerInnerList = new ArrayList<>();
innerInnerList.add("item1");
List<Object> innerList = new ArrayList<>();
innerList.add(innerInnerList);
innerObject.put("list", innerList);
document.put("fizz", innerObject);
List<Map<String, Object>> list = new ArrayList<>();
Map<String, Object> value = new HashMap<>();
value.put("field", "value");
list.add(value);
list.add(null);
document.put("list", list);
List<String> list2 = new ArrayList<>();
list2.add("foo");
list2.add("bar");
list2.add("baz");
document.put("list2", list2);
document.put(DOUBLE_ARRAY_FIELD, DoubleStream.generate(ESTestCase::randomDouble).limit(randomInt(1000)).toArray());
document.put(
DOUBLE_DOUBLE_ARRAY_FIELD,
new double[][] {
DoubleStream.generate(ESTestCase::randomDouble).limit(randomInt(1000)).toArray(),
DoubleStream.generate(ESTestCase::randomDouble).limit(randomInt(1000)).toArray(),
DoubleStream.generate(ESTestCase::randomDouble).limit(randomInt(1000)).toArray() }
);
var dots = new HashMap<>(
Map.of(
"foo.bar.baz",
"fizzbuzz",
"dotted.integers",
new HashMap<>(Map.of("a", 1, "b.c", 2, "d.e.f", 3, "g.h.i.j", 4, "k.l.m.n.o", 5)),
"inaccessible",
new HashMap<>(Map.of("a", new HashMap<>(Map.of("b", new HashMap<>(Map.of("c", "visible")))), "a.b.c", "inaccessible")),
"arrays",
new HashMap<>(
Map.of(
"dotted.strings",
new ArrayList<>(List.of("a", "b", "c", "d")),
"dotted.objects",
new ArrayList<>(List.of(new HashMap<>(Map.of("foo", "bar")), new HashMap<>(Map.of("baz", "qux")))),
"dotted.other",
new ArrayList<>() {
{
add(null);
add("");
}
}
)
),
"single_fieldname",
new HashMap<>(
Map.of(
"multiple.fieldnames",
new HashMap<>(Map.of("single_fieldname_again", new HashMap<>(Map.of("multiple.fieldnames.again", "result"))))
)
)
)
);
dots.put("foo.bar.null", null);
document.put("dots", dots);
document.put("dotted.bar.baz", true);
document.put("dotted.foo.bar.baz", new HashMap<>(Map.of("qux.quux", true)));
document.put("dotted.bar.baz_null", null);
this.document = new IngestDocument("index", "id", 1, null, null, document);
}
/**
* Executes an action against an ingest document using the provided access pattern. A synthetic pipeline instance with the provided
* access pattern is created and executed against the ingest document, thus updating its internal access pattern.
* @param accessPattern The access pattern to use when executing the block of code
* @param action A consumer which takes the updated ingest document and performs an action with it
* @throws Exception Any exception thrown from the provided consumer
*/
private void doWithAccessPattern(IngestPipelineFieldAccessPattern accessPattern, Consumer<IngestDocument> action) throws Exception {
IngestPipelineTestUtils.doWithAccessPattern(accessPattern, document, action);
}
/**
* Executes an action against an ingest document using a randomly selected access pattern. A synthetic pipeline instance with the
* selected access pattern is created and executed against the ingest document, thus updating its internal access pattern.
* @param action A consumer which takes the updated ingest document and performs an action with it
* @throws Exception Any exception thrown from the provided consumer
*/
private void doWithRandomAccessPattern(Consumer<IngestDocument> action) throws Exception {
IngestPipelineTestUtils.doWithRandomAccessPattern(document, action);
}
private void assertPathValid(IngestDocument doc, String path) {
// The fields being checked do not exist, so they all return false when running hasField
assertFalse(doc.hasField(path));
}
private void assertPathInvalid(IngestDocument doc, String path, String errorMessage) {
IllegalArgumentException expected = expectThrows(IllegalArgumentException.class, () -> doc.hasField(path));
assertThat(expected.getMessage(), equalTo(errorMessage));
}
public void testPathParsingLogic() throws Exception {
// Force a blank document for this test
document = new IngestDocument("index", "id", 1, null, null, new HashMap<>());
doWithRandomAccessPattern((doc) -> {
assertPathInvalid(doc, null, "path cannot be null nor empty");
assertPathInvalid(doc, "", "path cannot be null nor empty");
assertPathValid(doc, "a");
assertPathValid(doc, "ab");
assertPathValid(doc, "abc");
assertPathValid(doc, "a.b");
assertPathValid(doc, "a.b.c");
// Trailing empty strings are trimmed by field path parsing logic
assertPathValid(doc, "a.");
assertPathValid(doc, "a..");
assertPathValid(doc, "a...");
// Empty field names are not allowed in the beginning or middle of the path though
assertPathInvalid(doc, ".a.b", "path [.a.b] is not valid");
assertPathInvalid(doc, "a..b", "path [a..b] is not valid");
});
doWithAccessPattern(CLASSIC, (doc) -> {
// Classic allows number fields because they are treated as either field names or array indices depending on context
assertPathValid(doc, "a.0");
// Classic allows square brackets because it is not part of it's syntax
assertPathValid(doc, "a[0]");
assertPathValid(doc, "a[]");
assertPathValid(doc, "a][");
assertPathValid(doc, "[");
assertPathValid(doc, "a[");
assertPathValid(doc, "[a");
assertPathValid(doc, "]");
assertPathValid(doc, "a]");
assertPathValid(doc, "]a");
assertPathValid(doc, "[]");
assertPathValid(doc, "][");
assertPathValid(doc, "[a]");
assertPathValid(doc, "]a[");
assertPathValid(doc, "[]a");
assertPathValid(doc, "][a");
});
doWithAccessPattern(FLEXIBLE, (doc) -> {
// Flexible has specific handling of square brackets
assertPathInvalid(doc, "a[0]", "path [a[0]] is not valid");
assertPathInvalid(doc, "a[]", "path [a[]] is not valid");
assertPathInvalid(doc, "a][", "path [a][] is not valid");
assertPathInvalid(doc, "[", "path [[] is not valid");
assertPathInvalid(doc, "a[", "path [a[] is not valid");
assertPathInvalid(doc, "[a", "path [[a] is not valid");
assertPathInvalid(doc, "]", "path []] is not valid");
assertPathInvalid(doc, "a]", "path [a]] is not valid");
assertPathInvalid(doc, "]a", "path []a] is not valid");
assertPathInvalid(doc, "[]", "path [[]] is not valid");
assertPathInvalid(doc, "][", "path [][] is not valid");
assertPathInvalid(doc, "[a]", "path [[a]] is not valid");
assertPathInvalid(doc, "]a[", "path []a[] is not valid");
assertPathInvalid(doc, "[]a", "path [[]a] is not valid");
assertPathInvalid(doc, "][a", "path [][a] is not valid");
assertPathInvalid(doc, "a[0].b", "path [a[0].b] is not valid");
assertPathInvalid(doc, "a[0].b[1]", "path [a[0].b[1]] is not valid");
assertPathInvalid(doc, "a[0].b[1].c", "path [a[0].b[1].c] is not valid");
assertPathInvalid(doc, "a[0].b[1].c[2]", "path [a[0].b[1].c[2]] is not valid");
assertPathInvalid(doc, "a[0][1].c[2]", "path [a[0][1].c[2]] is not valid");
assertPathInvalid(doc, "a[0].b[1][2]", "path [a[0].b[1][2]] is not valid");
assertPathInvalid(doc, "a[0][1][2]", "path [a[0][1][2]] is not valid");
assertPathInvalid(doc, "a[0][", "path [a[0][] is not valid");
assertPathInvalid(doc, "a[0]]", "path [a[0]]] is not valid");
assertPathInvalid(doc, "a[0]blahblah", "path [a[0]blahblah] is not valid");
});
}
public void testSimpleGetFieldValue() throws Exception {
doWithRandomAccessPattern((doc) -> {
assertThat(doc.getFieldValue("foo", String.class), equalTo("bar"));
assertThat(doc.getFieldValue("int", Integer.class), equalTo(123));
assertThat(doc.getFieldValue("_source.foo", String.class), equalTo("bar"));
assertThat(doc.getFieldValue("_source.int", Integer.class), equalTo(123));
assertThat(doc.getFieldValue("_index", String.class), equalTo("index"));
assertThat(doc.getFieldValue("_id", String.class), equalTo("id"));
assertThat(
doc.getFieldValue("_ingest.timestamp", ZonedDateTime.class),
both(notNullValue()).and(not(equalTo(BOGUS_TIMESTAMP)))
);
assertThat(doc.getFieldValue("_source._ingest.timestamp", ZonedDateTime.class), equalTo(BOGUS_TIMESTAMP));
});
doWithAccessPattern(FLEXIBLE, (doc) -> {
assertThat(doc.getFieldValue("dots.foo.bar.baz", String.class), equalTo("fizzbuzz"));
assertThat(doc.getFieldValue("dotted.bar.baz", Boolean.class), equalTo(true));
});
}
public void testGetFieldValueIgnoreMissing() throws Exception {
doWithRandomAccessPattern((doc) -> {
assertThat(doc.getFieldValue("foo", String.class, randomBoolean()), equalTo("bar"));
assertThat(doc.getFieldValue("int", Integer.class, randomBoolean()), equalTo(123));
// if ignoreMissing is true, we just return nulls for values that aren't found
assertThat(doc.getFieldValue("nonsense", Integer.class, true), nullValue());
assertThat(doc.getFieldValue("some.nonsense", Integer.class, true), nullValue());
assertThat(doc.getFieldValue("fizz.some.nonsense", Integer.class, true), nullValue());
});
doWithAccessPattern(CLASSIC, (doc) -> {
// if ignoreMissing is false, we throw an exception for values that aren't found
IllegalArgumentException e;
e = expectThrows(IllegalArgumentException.class, () -> doc.getFieldValue("fizz.some.nonsense", Integer.class, false));
assertThat(e.getMessage(), is("field [some] not present as part of path [fizz.some.nonsense]"));
// if ignoreMissing is true, and the object is present-and-of-the-wrong-type, then we also throw an exception
e = expectThrows(IllegalArgumentException.class, () -> doc.getFieldValue("int", Boolean.class, true));
assertThat(e.getMessage(), is("field [int] of type [java.lang.Integer] cannot be cast to [java.lang.Boolean]"));
});
doWithAccessPattern(FLEXIBLE, (doc -> {
// if ignoreMissing is false, we throw an exception for values that aren't found
IllegalArgumentException e;
e = expectThrows(IllegalArgumentException.class, () -> doc.getFieldValue("fizz.some.nonsense", Integer.class, false));
assertThat(e.getMessage(), is("field [some.nonsense] not present as part of path [fizz.some.nonsense]"));
// if ignoreMissing is true, and the object is present-and-of-the-wrong-type, then we also throw an exception
e = expectThrows(IllegalArgumentException.class, () -> doc.getFieldValue("int", Boolean.class, true));
assertThat(e.getMessage(), is("field [int] of type [java.lang.Integer] cannot be cast to [java.lang.Boolean]"));
}));
}
public void testGetSourceObject() throws Exception {
try {
doWithRandomAccessPattern((doc) -> doc.getFieldValue("_source", Object.class));
fail("get field value should have failed");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("field [_source] not present as part of path [_source]"));
}
}
public void testGetIngestObject() throws Exception {
doWithRandomAccessPattern((doc) -> assertThat(doc.getFieldValue("_ingest", Map.class), notNullValue()));
}
public void testGetEmptyPathAfterStrippingOutPrefix() throws Exception {
try {
doWithRandomAccessPattern((doc) -> doc.getFieldValue("_source.", Object.class));
fail("get field value should have failed");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("path [_source.] is not valid"));
}
try {
doWithRandomAccessPattern((doc) -> doc.getFieldValue("_ingest.", Object.class));
fail("get field value should have failed");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("path [_ingest.] is not valid"));
}
}
public void testGetFieldValueNullValue() throws Exception {
doWithRandomAccessPattern((doc) -> assertThat(doc.getFieldValue("fizz.foo_null", Object.class), nullValue()));
}
public void testSimpleGetFieldValueTypeMismatch() throws Exception {
try {
doWithRandomAccessPattern((doc) -> doc.getFieldValue("int", String.class));
fail("getFieldValue should have failed");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("field [int] of type [java.lang.Integer] cannot be cast to [java.lang.String]"));
}
try {
doWithRandomAccessPattern((doc) -> doc.getFieldValue("foo", Integer.class));
fail("getFieldValue should have failed");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("field [foo] of type [java.lang.String] cannot be cast to [java.lang.Integer]"));
}
}
public void testSimpleGetFieldValueIgnoreMissingAndTypeMismatch() throws Exception {
try {
doWithRandomAccessPattern((doc) -> doc.getFieldValue("int", String.class, randomBoolean()));
fail("getFieldValue should have failed");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("field [int] of type [java.lang.Integer] cannot be cast to [java.lang.String]"));
}
try {
doWithRandomAccessPattern((doc) -> doc.getFieldValue("foo", Integer.class, randomBoolean()));
fail("getFieldValue should have failed");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("field [foo] of type [java.lang.String] cannot be cast to [java.lang.Integer]"));
}
}
public void testNestedGetFieldValue() throws Exception {
doWithRandomAccessPattern((doc) -> {
assertThat(doc.getFieldValue("fizz.buzz", String.class), equalTo("hello world"));
assertThat(doc.getFieldValue("fizz.1", String.class), equalTo("bar"));
});
doWithAccessPattern(FLEXIBLE, (doc) -> {
// Several layers of dotted field names dots -> dotted.integers -> [a - k.l.m.n.o]
assertThat(doc.getFieldValue("dots.dotted.integers.a", Integer.class), equalTo(1));
assertThat(doc.getFieldValue("dots.dotted.integers.b.c", Integer.class), equalTo(2));
assertThat(doc.getFieldValue("dots.dotted.integers.d.e.f", Integer.class), equalTo(3));
assertThat(doc.getFieldValue("dots.dotted.integers.g.h.i.j", Integer.class), equalTo(4));
assertThat(doc.getFieldValue("dots.dotted.integers.k.l.m.n.o", Integer.class), equalTo(5));
// The dotted field {dots: {inaccessible: {a.b.c: "inaccessible"}}} is inaccessible because
// the field {dots: {inaccessible: {a: {b: {c: "visible"}}}}} exists
assertThat(doc.getFieldValue("dots.inaccessible.a.b.c", String.class), equalTo("visible"));
// Mixing multiple single tokens with dotted tokens
assertThat(
doc.getFieldValue(
"dots.single_fieldname.multiple.fieldnames.single_fieldname_again.multiple.fieldnames.again",
String.class
),
equalTo("result")
);
// Flexible can retrieve list objects
assertThat(doc.getFieldValue("dots.arrays.dotted.strings", List.class), equalTo(new ArrayList<>(List.of("a", "b", "c", "d"))));
assertThat(
doc.getFieldValue("dots.arrays.dotted.objects", List.class),
equalTo(new ArrayList<>(List.of(new HashMap<>(Map.of("foo", "bar")), new HashMap<>(Map.of("baz", "qux")))))
);
});
}
public void testNestedGetFieldValueTypeMismatch() throws Exception {
try {
doWithRandomAccessPattern((doc) -> doc.getFieldValue("foo.foo.bar", String.class));
} catch (IllegalArgumentException e) {
assertThat(
e.getMessage(),
equalTo("cannot resolve [foo] from object of type [java.lang.String] as part of path [foo.foo.bar]")
);
}
}
public void testListGetFieldValue() throws Exception {
doWithAccessPattern(CLASSIC, (doc) -> assertThat(doc.getFieldValue("list.0.field", String.class), equalTo("value")));
doWithAccessPattern(FLEXIBLE, (doc) -> {
// TODO: Flexible will have a new notation for list indexing - For now it does not support traversing lists
var illegalArgument = expectThrows(IllegalArgumentException.class, () -> doc.getFieldValue("list.0.field", String.class));
assertThat(illegalArgument.getMessage(), equalTo("path [list.0.field] is not valid"));
illegalArgument = expectThrows(
IllegalArgumentException.class,
() -> doc.getFieldValue("dots.arrays.dotted.objects.0.foo", String.class)
);
assertThat(illegalArgument.getMessage(), equalTo("path [dots.arrays.dotted.objects.0.foo] is not valid"));
});
}
public void testListGetFieldValueNull() throws Exception {
doWithAccessPattern(CLASSIC, (doc) -> assertThat(doc.getFieldValue("list.1", String.class), nullValue()));
doWithAccessPattern(FLEXIBLE, (doc) -> {
// TODO: Flexible will have a new notation for list indexing - For now it does not support traversing lists
var illegalArgument = expectThrows(IllegalArgumentException.class, () -> doc.getFieldValue("list.1", String.class));
assertThat(illegalArgument.getMessage(), equalTo("path [list.1] is not valid"));
illegalArgument = expectThrows(
IllegalArgumentException.class,
() -> doc.getFieldValue("dots.arrays.dotted.other.0", String.class)
);
assertThat(illegalArgument.getMessage(), equalTo("path [dots.arrays.dotted.other.0] is not valid"));
});
}
public void testListGetFieldValueIndexNotNumeric() throws Exception {
try {
doWithAccessPattern(CLASSIC, (doc) -> doc.getFieldValue("list.test.field", String.class));
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("[test] is not an integer, cannot be used as an index as part of path [list.test.field]"));
}
try {
// TODO: Flexible will have a new notation for list indexing - For now it does not support traversing lists
doWithAccessPattern(FLEXIBLE, (doc) -> doc.getFieldValue("list.test.field", String.class));
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("path [list.test.field] is not valid"));
}
try {
// TODO: Flexible will have a new notation for list indexing - For now it does not support traversing lists
doWithAccessPattern(FLEXIBLE, (doc) -> doc.getFieldValue("dots.arrays.dotted.strings.test.field", String.class));
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("path [dots.arrays.dotted.strings.test.field] is not valid"));
}
}
public void testListGetFieldValueIndexOutOfBounds() throws Exception {
try {
doWithAccessPattern(CLASSIC, (doc) -> doc.getFieldValue("list.10.field", String.class));
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("[10] is out of bounds for array with length [2] as part of path [list.10.field]"));
}
try {
// TODO: Flexible will have a new notation for list indexing - For now it does not support traversing lists
doWithAccessPattern(FLEXIBLE, (doc) -> doc.getFieldValue("list.10.field", String.class));
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("path [list.10.field] is not valid"));
}
try {
// TODO: Flexible will have a new notation for list indexing - For now it does not support traversing lists
doWithAccessPattern(FLEXIBLE, (doc) -> doc.getFieldValue("dots.arrays.dotted.strings.10", String.class));
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("path [dots.arrays.dotted.strings.10] is not valid"));
}
}
public void testGetFieldValueNotFound() throws Exception {
try {
doWithAccessPattern(CLASSIC, (doc) -> doc.getFieldValue("not.here", String.class));
fail("get field value should have failed");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("field [not] not present as part of path [not.here]"));
}
try {
doWithAccessPattern(FLEXIBLE, (doc) -> doc.getFieldValue("not.here", String.class));
fail("get field value should have failed");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("field [not.here] not present as part of path [not.here]"));
}
}
public void testGetFieldValueNotFoundNullParent() throws Exception {
try {
doWithRandomAccessPattern((doc) -> doc.getFieldValue("fizz.foo_null.not_there", String.class));
fail("get field value should have failed");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("cannot resolve [not_there] from null as part of path [fizz.foo_null.not_there]"));
}
}
public void testGetFieldValueNull() throws Exception {
try {
doWithRandomAccessPattern((doc) -> doc.getFieldValue(null, String.class));
fail("get field value should have failed");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("path cannot be null nor empty"));
}
}
public void testGetFieldValueEmpty() throws Exception {
try {
doWithRandomAccessPattern((doc) -> doc.getFieldValue("", String.class));
fail("get field value should have failed");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("path cannot be null nor empty"));
}
}
public void testHasField() throws Exception {
doWithRandomAccessPattern((doc) -> {
assertTrue(doc.hasField("fizz"));
assertTrue(doc.hasField("_index"));
assertTrue(doc.hasField("_id"));
assertTrue(doc.hasField("_source.fizz"));
assertTrue(doc.hasField("_ingest.timestamp"));
});
doWithAccessPattern(FLEXIBLE, (doc) -> assertTrue(doc.hasField("dotted.bar.baz")));
}
public void testHasFieldNested() throws Exception {
doWithRandomAccessPattern((doc) -> {
assertTrue(doc.hasField("fizz.buzz"));
assertTrue(doc.hasField("_source._ingest.timestamp"));
});
doWithAccessPattern(FLEXIBLE, (doc) -> {
assertTrue(doc.hasField("dots"));
{
assertFalse(doc.hasField("dots.foo"));
assertFalse(doc.hasField("dots.foo.bar"));
assertTrue(doc.hasField("dots.foo.bar.baz"));
}
assertFalse(doc.hasField("dots.dotted"));
assertTrue(doc.hasField("dots.dotted.integers"));
{
assertTrue(doc.hasField("dots.dotted.integers.a"));
assertFalse(doc.hasField("dots.dotted.integers.b"));
assertTrue(doc.hasField("dots.dotted.integers.b.c"));
assertFalse(doc.hasField("dots.dotted.integers.d"));
assertFalse(doc.hasField("dots.dotted.integers.d.e"));
assertTrue(doc.hasField("dots.dotted.integers.d.e.f"));
assertFalse(doc.hasField("dots.dotted.integers.g"));
assertFalse(doc.hasField("dots.dotted.integers.g.h"));
assertFalse(doc.hasField("dots.dotted.integers.g.h.i"));
assertTrue(doc.hasField("dots.dotted.integers.g.h.i.j"));
assertFalse(doc.hasField("dots.dotted.integers.k"));
assertFalse(doc.hasField("dots.dotted.integers.k.l"));
assertFalse(doc.hasField("dots.dotted.integers.k.l.m"));
assertFalse(doc.hasField("dots.dotted.integers.k.l.m.n"));
assertTrue(doc.hasField("dots.dotted.integers.k.l.m.n.o"));
}
assertTrue(doc.hasField("dots.inaccessible"));
{
assertTrue(doc.hasField("dots.inaccessible.a"));
assertTrue(doc.hasField("dots.inaccessible.a.b"));
assertTrue(doc.hasField("dots.inaccessible.a.b.c"));
}
assertTrue(doc.hasField("dots.arrays"));
{
assertTrue(doc.hasField("dots.arrays.dotted.strings"));
assertTrue(doc.hasField("dots.arrays.dotted.objects"));
}
assertTrue(doc.hasField("dots.single_fieldname"));
{
assertFalse(doc.hasField("dots.single_fieldname.multiple"));
assertTrue(doc.hasField("dots.single_fieldname.multiple.fieldnames"));
assertTrue(doc.hasField("dots.single_fieldname.multiple.fieldnames.single_fieldname_again"));
assertFalse(doc.hasField("dots.single_fieldname.multiple.fieldnames.single_fieldname_again.multiple"));
assertFalse(doc.hasField("dots.single_fieldname.multiple.fieldnames.single_fieldname_again.multiple.fieldnames"));
assertTrue(doc.hasField("dots.single_fieldname.multiple.fieldnames.single_fieldname_again.multiple.fieldnames.again"));
}
assertFalse(doc.hasField("dotted.foo.bar.baz.qux"));
assertTrue(doc.hasField("dotted.foo.bar.baz.qux.quux"));
});
}
public void testListHasField() throws Exception {
assertTrue(document.hasField("list.0.field"));
doWithAccessPattern(FLEXIBLE, (doc) -> {
// TODO: Flexible will have a new notation for list indexing - For now it does not support traversing lists
// Until then, traversing arrays in the hasFields method returns false
assertFalse(doc.hasField("dots.arrays.dotted.strings.0"));
assertFalse(doc.hasField("dots.arrays.dotted.objects.0"));
assertFalse(doc.hasField("dots.arrays.dotted.objects.0.foo"));
});
}
public void testListHasFieldNull() throws Exception {
doWithAccessPattern(CLASSIC, (doc) -> assertTrue(doc.hasField("list.1")));
// TODO: Flexible will have a new notation for list indexing - For now it does not locate indexed fields
doWithAccessPattern(FLEXIBLE, (doc) -> assertFalse(doc.hasField("list.1")));
doWithAccessPattern(FLEXIBLE, (doc) -> assertFalse(doc.hasField("dots.arrays.dotted.other.0")));
}
public void testListHasFieldIndexOutOfBounds() throws Exception {
doWithAccessPattern(CLASSIC, (doc) -> assertFalse(doc.hasField("list.10")));
// TODO: Flexible will have a new notation for list indexing - For now it does not locate indexed fields
doWithAccessPattern(FLEXIBLE, (doc) -> assertFalse(doc.hasField("list.10")));
doWithAccessPattern(FLEXIBLE, (doc) -> assertFalse(doc.hasField("dots.arrays.dotted.strings.10")));
}
public void testListHasFieldIndexOutOfBounds_fail() throws Exception {
doWithAccessPattern(CLASSIC, doc -> {
assertTrue(doc.hasField("list.0", true));
assertTrue(doc.hasField("list.1", true));
Exception e = expectThrows(IllegalArgumentException.class, () -> doc.hasField("list.2", true));
assertThat(e.getMessage(), equalTo("[2] is out of bounds for array with length [2] as part of path [list.2]"));
e = expectThrows(IllegalArgumentException.class, () -> doc.hasField("list.10", true));
assertThat(e.getMessage(), equalTo("[10] is out of bounds for array with length [2] as part of path [list.10]"));
});
doWithAccessPattern(FLEXIBLE, doc -> {
assertFalse(doc.hasField("list.0", true));
assertFalse(doc.hasField("list.1", true));
// TODO: Flexible will have a new notation for list indexing - we fail fast, and currently don't check the bounds
assertFalse(doc.hasField("list.2", true));
assertFalse(doc.hasField("list.10", true));
});
}
public void testListHasFieldIndexNotNumeric() throws Exception {
doWithRandomAccessPattern((doc) -> assertFalse(doc.hasField("list.test")));
}
public void testNestedHasFieldTypeMismatch() throws Exception {
doWithRandomAccessPattern((doc) -> assertFalse(doc.hasField("foo.foo.bar")));
}
public void testHasFieldNotFound() throws Exception {
doWithRandomAccessPattern((doc) -> assertFalse(doc.hasField("not.here")));
}
public void testHasFieldNotFoundNullParent() throws Exception {
doWithRandomAccessPattern((doc) -> assertFalse(doc.hasField("fizz.foo_null.not_there")));
}
public void testHasFieldNestedNotFound() throws Exception {
doWithRandomAccessPattern((doc) -> assertFalse(doc.hasField("fizz.doesnotexist")));
}
public void testHasFieldNull() throws Exception {
try {
doWithRandomAccessPattern((doc) -> doc.hasField(null));
fail("has field should have failed");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("path cannot be null nor empty"));
}
}
public void testHasFieldNullValue() throws Exception {
doWithRandomAccessPattern((doc) -> assertTrue(doc.hasField("fizz.foo_null")));
doWithAccessPattern(FLEXIBLE, (doc) -> assertTrue(doc.hasField("dotted.bar.baz_null")));
}
public void testHasFieldEmpty() throws Exception {
try {
doWithRandomAccessPattern((doc) -> doc.hasField(""));
fail("has field should have failed");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("path cannot be null nor empty"));
}
}
public void testHasFieldSourceObject() throws Exception {
doWithRandomAccessPattern((doc) -> assertThat(doc.hasField("_source"), equalTo(false)));
}
public void testHasFieldIngestObject() throws Exception {
doWithRandomAccessPattern((doc) -> assertThat(doc.hasField("_ingest"), equalTo(true)));
}
public void testHasFieldEmptyPathAfterStrippingOutPrefix() throws Exception {
try {
doWithRandomAccessPattern((doc) -> doc.hasField("_source."));
fail("has field value should have failed");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("path [_source.] is not valid"));
}
try {
doWithRandomAccessPattern((doc) -> doc.hasField("_ingest."));
fail("has field value should have failed");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("path [_ingest.] is not valid"));
}
}
public void testSimpleSetFieldValue() throws Exception {
doWithRandomAccessPattern((doc) -> {
doc.setFieldValue("new_field", "foo");
assertThat(doc.getSourceAndMetadata().get("new_field"), equalTo("foo"));
doc.setFieldValue("_ttl", "ttl");
assertThat(doc.getSourceAndMetadata().get("_ttl"), equalTo("ttl"));
doc.setFieldValue("_source.another_field", "bar");
assertThat(doc.getSourceAndMetadata().get("another_field"), equalTo("bar"));
doc.setFieldValue("_ingest.new_field", "new_value");
// Metadata contains timestamp, the new_field added above, and the pipeline that is synthesized from doWithRandomAccessPattern
assertThat(doc.getIngestMetadata().size(), equalTo(3));
assertThat(doc.getIngestMetadata().get("new_field"), equalTo("new_value"));
doc.setFieldValue("_ingest.timestamp", "timestamp");
assertThat(doc.getIngestMetadata().get("timestamp"), equalTo("timestamp"));
});
doWithAccessPattern(FLEXIBLE, (doc) -> {
doc.setFieldValue("dotted.bar.buzz", "fizz");
assertThat(doc.getSourceAndMetadata().get("dotted.bar.buzz"), equalTo("fizz"));
doc.setFieldValue("_source.dotted.another.buzz", "fizz");
assertThat(doc.getSourceAndMetadata().get("dotted.another.buzz"), equalTo("fizz"));
doc.setFieldValue("_ingest.dotted.bar.buzz", "fizz");
// Metadata contains timestamp, both fields added above, and the pipeline that is synthesized from doWithRandomAccessPattern
assertThat(doc.getIngestMetadata().size(), equalTo(4));
assertThat(doc.getIngestMetadata().get("dotted.bar.buzz"), equalTo("fizz"));
doc.setFieldValue("dotted.foo", "foo");
assertThat(doc.getSourceAndMetadata().get("dotted.foo"), instanceOf(String.class));
assertThat(doc.getSourceAndMetadata().get("dotted.foo"), equalTo("foo"));
});
}
public void testSetFieldValueNullValue() throws Exception {
doWithRandomAccessPattern((doc) -> {
doc.setFieldValue("new_field", (Object) null);
assertThat(doc.getSourceAndMetadata().containsKey("new_field"), equalTo(true));
assertThat(doc.getSourceAndMetadata().get("new_field"), nullValue());
});
doWithAccessPattern(FLEXIBLE, (doc) -> {
doc.setFieldValue("dotted.new.field", (Object) null);
assertThat(doc.getSourceAndMetadata().containsKey("dotted.new.field"), equalTo(true));
assertThat(doc.getSourceAndMetadata().get("dotted.new.field"), nullValue());
});
}
@SuppressWarnings("unchecked")
public void testNestedSetFieldValue() throws Exception {
doWithAccessPattern(CLASSIC, (doc) -> {
doc.setFieldValue("a.b.c.d", "foo");
assertThat(doc.getSourceAndMetadata().get("a"), instanceOf(Map.class));
Map<String, Object> a = (Map<String, Object>) doc.getSourceAndMetadata().get("a");
assertThat(a.get("b"), instanceOf(Map.class));
Map<String, Object> b = (Map<String, Object>) a.get("b");
assertThat(b.get("c"), instanceOf(Map.class));
Map<String, Object> c = (Map<String, Object>) b.get("c");
assertThat(c.get("d"), instanceOf(String.class));
String d = (String) c.get("d");
assertThat(d, equalTo("foo"));
});
doWithAccessPattern(FLEXIBLE, (doc) -> {
doc.setFieldValue("dotted.a.b.c.d", "foo");
assertThat(doc.getSourceAndMetadata().get("dotted.a.b.c.d"), instanceOf(String.class));
assertThat(doc.getSourceAndMetadata().get("dotted.a.b.c.d"), equalTo("foo"));
doc.setFieldValue("dotted.foo.bar.baz.blank", "foo");
assertThat(doc.getSourceAndMetadata().get("dotted.foo.bar.baz"), instanceOf(Map.class));
Map<String, Object> dottedFooBarBaz = (Map<String, Object>) doc.getSourceAndMetadata().get("dotted.foo.bar.baz");
assertThat(dottedFooBarBaz.get("blank"), instanceOf(String.class));
assertThat(dottedFooBarBaz.get("blank"), equalTo("foo"));
});
}
public void testSetFieldValueOnExistingField() throws Exception {
doWithRandomAccessPattern((doc) -> {
doc.setFieldValue("foo", "newbar");
assertThat(doc.getSourceAndMetadata().get("foo"), equalTo("newbar"));
});
doWithAccessPattern(FLEXIBLE, (doc) -> {
doc.setFieldValue("dotted.bar.baz", "newbaz");
assertThat(doc.getSourceAndMetadata().get("dotted.bar.baz"), equalTo("newbaz"));
});
}
@SuppressWarnings("unchecked")
public void testSetFieldValueOnExistingParent() throws Exception {
doWithRandomAccessPattern((doc) -> {
doc.setFieldValue("fizz.new", "bar");
assertThat(doc.getSourceAndMetadata().get("fizz"), instanceOf(Map.class));
Map<String, Object> innerMap = (Map<String, Object>) doc.getSourceAndMetadata().get("fizz");
assertThat(innerMap.get("new"), instanceOf(String.class));
String value = (String) innerMap.get("new");
assertThat(value, equalTo("bar"));
});
doWithAccessPattern(FLEXIBLE, (doc) -> {
doc.setFieldValue("dots.dotted.integers.new", "qux");
assertThat(doc.getSourceAndMetadata().get("dots"), instanceOf(Map.class));
Map<String, Object> innerMap = (Map<String, Object>) doc.getSourceAndMetadata().get("dots");
assertThat(innerMap.get("dotted.integers"), instanceOf(Map.class));
Map<String, Object> innermost = (Map<String, Object>) innerMap.get("dotted.integers");
assertThat(innermost.get("new"), instanceOf(String.class));
assertThat(innermost.get("new"), equalTo("qux"));
});
}
public void testSetFieldValueOnExistingParentTypeMismatch() throws Exception {
try {
doWithRandomAccessPattern((doc) -> doc.setFieldValue("fizz.buzz.new", "bar"));
fail("add field should have failed");
} catch (IllegalArgumentException e) {
assertThat(
e.getMessage(),
equalTo("cannot set [new] with parent object of type [java.lang.String] as part of path [fizz.buzz.new]")
);
}
}
public void testSetFieldValueOnExistingNullParent() throws Exception {
try {
doWithRandomAccessPattern((doc) -> doc.setFieldValue("fizz.foo_null.test", "bar"));
fail("add field should have failed");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("cannot set [test] with null parent as part of path [fizz.foo_null.test]"));
}
}
public void testSetFieldValueNullName() throws Exception {
try {
doWithRandomAccessPattern((doc) -> doc.setFieldValue(null, "bar"));
fail("add field should have failed");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("path cannot be null nor empty"));
}
}
public void testSetSourceObject() throws Exception {
doWithRandomAccessPattern((doc) -> {
doc.setFieldValue("_source", "value");
assertThat(doc.getSourceAndMetadata().get("_source"), equalTo("value"));
});
}
public void testSetIngestObject() throws Exception {
doWithRandomAccessPattern((doc) -> {
doc.setFieldValue("_ingest", "value");
assertThat(doc.getSourceAndMetadata().get("_ingest"), equalTo("value"));
});
}
public void testSetIngestSourceObject() throws Exception {
doWithRandomAccessPattern((doc) -> {
// test that we don't strip out the _source prefix when _ingest is used
doc.setFieldValue("_ingest._source", "value");
assertThat(doc.getIngestMetadata().get("_source"), equalTo("value"));
});
}
public void testSetEmptyPathAfterStrippingOutPrefix() throws Exception {
try {
doWithRandomAccessPattern((doc) -> doc.setFieldValue("_source.", "value"));
fail("set field value should have failed");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("path [_source.] is not valid"));
}
try {
doWithRandomAccessPattern((doc) -> doc.setFieldValue("_ingest.", "_value"));
fail("set field value should have failed");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("path [_ingest.] is not valid"));
}
}
public void testListSetFieldValueNoIndexProvided() throws Exception {
doWithRandomAccessPattern((doc) -> {
doc.setFieldValue("list", "value");
Object object = doc.getSourceAndMetadata().get("list");
assertThat(object, instanceOf(String.class));
assertThat(object, equalTo("value"));
});
doWithAccessPattern(FLEXIBLE, (doc) -> {
doc.setFieldValue("dots.arrays.dotted.strings", "value");
Object object = doc.getSourceAndMetadata().get("dots");
assertThat(object, instanceOf(Map.class));
@SuppressWarnings("unchecked")
Object arraysField = ((Map<String, Object>) object).get("arrays");
assertThat(arraysField, instanceOf(Map.class));
@SuppressWarnings("unchecked")
Object dottedStringsField = ((Map<String, Object>) arraysField).get("dotted.strings");
assertThat(dottedStringsField, instanceOf(String.class));
assertThat(dottedStringsField, equalTo("value"));
});
}
public void testListAppendFieldValue() throws Exception {
doWithRandomAccessPattern((doc) -> {
doc.appendFieldValue("list", "new_value");
Object object = doc.getSourceAndMetadata().get("list");
assertThat(object, instanceOf(List.class));
@SuppressWarnings("unchecked")
List<Object> list = (List<Object>) object;
assertThat(list.size(), equalTo(3));
assertThat(list.get(0), equalTo(Map.of("field", "value")));
assertThat(list.get(1), nullValue());
assertThat(list.get(2), equalTo("new_value"));
});
doWithAccessPattern(FLEXIBLE, (doc) -> {
doc.appendFieldValue("dots.arrays.dotted.strings", "value");
Object object = doc.getSourceAndMetadata().get("dots");
assertThat(object, instanceOf(Map.class));
@SuppressWarnings("unchecked")
Object arraysField = ((Map<String, Object>) object).get("arrays");
assertThat(arraysField, instanceOf(Map.class));
@SuppressWarnings("unchecked")
Object dottedStringsField = ((Map<String, Object>) arraysField).get("dotted.strings");
assertThat(dottedStringsField, instanceOf(List.class));
assertThat(dottedStringsField, equalTo(List.of("a", "b", "c", "d", "value")));
});
}
public void testListAppendFieldValueWithDuplicate() throws Exception {
doWithRandomAccessPattern((doc) -> {
doc.appendFieldValue("list2", "foo", false);
Object object = doc.getSourceAndMetadata().get("list2");
assertThat(object, instanceOf(List.class));
@SuppressWarnings("unchecked")
List<Object> list = (List<Object>) object;
assertThat(list.size(), equalTo(3));
assertThat(list, equalTo(List.of("foo", "bar", "baz")));
});
doWithAccessPattern(FLEXIBLE, (doc) -> {
doc.appendFieldValue("dots.arrays.dotted.strings", "a", false);
Object object = doc.getSourceAndMetadata().get("dots");
assertThat(object, instanceOf(Map.class));
@SuppressWarnings("unchecked")
Object arraysField = ((Map<String, Object>) object).get("arrays");
assertThat(arraysField, instanceOf(Map.class));
@SuppressWarnings("unchecked")
Object dottedStringsField = ((Map<String, Object>) arraysField).get("dotted.strings");
assertThat(dottedStringsField, instanceOf(List.class));
assertThat(dottedStringsField, equalTo(List.of("a", "b", "c", "d")));
});
}
public void testListAppendFieldValueWithoutDuplicate() throws Exception {
doWithRandomAccessPattern((doc) -> {
doc.appendFieldValue("list2", "foo2", false);
Object object = doc.getSourceAndMetadata().get("list2");
assertThat(object, instanceOf(List.class));
@SuppressWarnings("unchecked")
List<Object> list = (List<Object>) object;
assertThat(list.size(), equalTo(4));
assertThat(list, equalTo(List.of("foo", "bar", "baz", "foo2")));
});
doWithAccessPattern(FLEXIBLE, (doc) -> {
doc.appendFieldValue("dots.arrays.dotted.strings", "e", false);
Object object = doc.getSourceAndMetadata().get("dots");
assertThat(object, instanceOf(Map.class));
@SuppressWarnings("unchecked")
Object arraysField = ((Map<String, Object>) object).get("arrays");
assertThat(arraysField, instanceOf(Map.class));
@SuppressWarnings("unchecked")
Object dottedStringsField = ((Map<String, Object>) arraysField).get("dotted.strings");
assertThat(dottedStringsField, instanceOf(List.class));
assertThat(dottedStringsField, equalTo(List.of("a", "b", "c", "d", "e")));
});
}
public void testListAppendFieldValues() throws Exception {
doWithRandomAccessPattern((doc) -> {
doc.appendFieldValue("list", List.of("item1", "item2", "item3"));
Object object = doc.getSourceAndMetadata().get("list");
assertThat(object, instanceOf(List.class));
@SuppressWarnings("unchecked")
List<Object> list = (List<Object>) object;
assertThat(list.size(), equalTo(5));
assertThat(list.get(0), equalTo(Map.of("field", "value")));
assertThat(list.get(1), nullValue());
assertThat(list.get(2), equalTo("item1"));
assertThat(list.get(3), equalTo("item2"));
assertThat(list.get(4), equalTo("item3"));
});
doWithAccessPattern(FLEXIBLE, (doc) -> {
doc.appendFieldValue("dots.arrays.dotted.strings", List.of("e", "f", "g"));
Object object = doc.getSourceAndMetadata().get("dots");
assertThat(object, instanceOf(Map.class));
@SuppressWarnings("unchecked")
Object arraysField = ((Map<String, Object>) object).get("arrays");
assertThat(arraysField, instanceOf(Map.class));
@SuppressWarnings("unchecked")
Object dottedStringsField = ((Map<String, Object>) arraysField).get("dotted.strings");
assertThat(dottedStringsField, instanceOf(List.class));
assertThat(dottedStringsField, equalTo(List.of("a", "b", "c", "d", "e", "f", "g")));
});
}
public void testListAppendFieldValuesWithoutDuplicates() throws Exception {
doWithRandomAccessPattern((doc) -> {
doc.appendFieldValue("list2", List.of("foo", "bar", "baz", "foo2"), false);
Object object = doc.getSourceAndMetadata().get("list2");
assertThat(object, instanceOf(List.class));
@SuppressWarnings("unchecked")
List<Object> list = (List<Object>) object;
assertThat(list.size(), equalTo(4));
assertThat(list.get(0), equalTo("foo"));
assertThat(list.get(1), equalTo("bar"));
assertThat(list.get(2), equalTo("baz"));
assertThat(list.get(3), equalTo("foo2"));
});
doWithAccessPattern(FLEXIBLE, (doc) -> {
doc.appendFieldValue("dots.arrays.dotted.strings", List.of("a", "b", "c", "d", "e"), false);
Object object = doc.getSourceAndMetadata().get("dots");
assertThat(object, instanceOf(Map.class));
@SuppressWarnings("unchecked")
Object arraysField = ((Map<String, Object>) object).get("arrays");
assertThat(arraysField, instanceOf(Map.class));
@SuppressWarnings("unchecked")
Object dottedStringsField = ((Map<String, Object>) arraysField).get("dotted.strings");
assertThat(dottedStringsField, instanceOf(List.class));
assertThat(dottedStringsField, equalTo(List.of("a", "b", "c", "d", "e")));
});
}
public void testAppendFieldValueToNonExistingList() throws Exception {
doWithRandomAccessPattern((doc) -> {
doc.appendFieldValue("non_existing_list", "new_value");
Object object = doc.getSourceAndMetadata().get("non_existing_list");
assertThat(object, instanceOf(List.class));
@SuppressWarnings("unchecked")
List<Object> list = (List<Object>) object;
assertThat(list.size(), equalTo(1));
assertThat(list.get(0), equalTo("new_value"));
});
doWithAccessPattern(FLEXIBLE, (doc) -> {
doc.appendFieldValue("dots.arrays.dotted.missing", "a");
Object object = doc.getSourceAndMetadata().get("dots");
assertThat(object, instanceOf(Map.class));
@SuppressWarnings("unchecked")
Object arraysField = ((Map<String, Object>) object).get("arrays");
assertThat(arraysField, instanceOf(Map.class));
@SuppressWarnings("unchecked")
Object dottedStringsField = ((Map<String, Object>) arraysField).get("dotted.missing");
assertThat(dottedStringsField, instanceOf(List.class));
assertThat(dottedStringsField, equalTo(List.of("a")));
});
}
public void testAppendFieldValuesToNonExistingList() throws Exception {
doWithRandomAccessPattern((doc) -> {
doc.appendFieldValue("non_existing_list", List.of("item1", "item2", "item3"));
Object object = doc.getSourceAndMetadata().get("non_existing_list");
assertThat(object, instanceOf(List.class));
@SuppressWarnings("unchecked")
List<Object> list = (List<Object>) object;
assertThat(list.size(), equalTo(3));
assertThat(list.get(0), equalTo("item1"));
assertThat(list.get(1), equalTo("item2"));
assertThat(list.get(2), equalTo("item3"));
});
doWithAccessPattern(FLEXIBLE, (doc) -> {
doc.appendFieldValue("dots.arrays.dotted.missing", List.of("a", "b", "c"));
Object object = doc.getSourceAndMetadata().get("dots");
assertThat(object, instanceOf(Map.class));
@SuppressWarnings("unchecked")
Object arraysField = ((Map<String, Object>) object).get("arrays");
assertThat(arraysField, instanceOf(Map.class));
@SuppressWarnings("unchecked")
Object dottedStringsField = ((Map<String, Object>) arraysField).get("dotted.missing");
assertThat(dottedStringsField, instanceOf(List.class));
assertThat(dottedStringsField, equalTo(List.of("a", "b", "c")));
});
}
public void testAppendFieldValueConvertStringToList() throws Exception {
doWithRandomAccessPattern((doc) -> {
doc.appendFieldValue("fizz.buzz", "new_value");
Object object = doc.getSourceAndMetadata().get("fizz");
assertThat(object, instanceOf(Map.class));
@SuppressWarnings("unchecked")
Map<String, Object> map = (Map<String, Object>) object;
object = map.get("buzz");
assertThat(object, instanceOf(List.class));
@SuppressWarnings("unchecked")
List<Object> list = (List<Object>) object;
assertThat(list.size(), equalTo(2));
assertThat(list.get(0), equalTo("hello world"));
assertThat(list.get(1), equalTo("new_value"));
});
doWithAccessPattern(FLEXIBLE, (doc) -> {
doc.appendFieldValue("dots.foo.bar.baz", "new_value");
Object object = doc.getSourceAndMetadata().get("dots");
assertThat(object, instanceOf(Map.class));
@SuppressWarnings("unchecked")
Object foobarbaz = ((Map<String, Object>) object).get("foo.bar.baz");
assertThat(foobarbaz, instanceOf(List.class));
assertThat(foobarbaz, equalTo(List.of("fizzbuzz", "new_value")));
});
}
public void testAppendFieldValuesConvertStringToList() throws Exception {
doWithRandomAccessPattern((doc) -> {
doc.appendFieldValue("fizz.buzz", List.of("item1", "item2", "item3"));
Object object = doc.getSourceAndMetadata().get("fizz");
assertThat(object, instanceOf(Map.class));
@SuppressWarnings("unchecked")
Map<String, Object> map = (Map<String, Object>) object;
object = map.get("buzz");
assertThat(object, instanceOf(List.class));
@SuppressWarnings("unchecked")
List<Object> list = (List<Object>) object;
assertThat(list.size(), equalTo(4));
assertThat(list.get(0), equalTo("hello world"));
assertThat(list.get(1), equalTo("item1"));
assertThat(list.get(2), equalTo("item2"));
assertThat(list.get(3), equalTo("item3"));
});
doWithAccessPattern(FLEXIBLE, (doc) -> {
doc.appendFieldValue("dots.foo.bar.baz", List.of("fizz", "buzz", "quack"));
Object object = doc.getSourceAndMetadata().get("dots");
assertThat(object, instanceOf(Map.class));
@SuppressWarnings("unchecked")
Object foobarbaz = ((Map<String, Object>) object).get("foo.bar.baz");
assertThat(foobarbaz, instanceOf(List.class));
assertThat(foobarbaz, equalTo(List.of("fizzbuzz", "fizz", "buzz", "quack")));
});
}
public void testAppendFieldValueConvertIntegerToList() throws Exception {
doWithRandomAccessPattern((doc) -> {
document.appendFieldValue("int", 456);
Object object = document.getSourceAndMetadata().get("int");
assertThat(object, instanceOf(List.class));
@SuppressWarnings("unchecked")
List<Object> list = (List<Object>) object;
assertThat(list.size(), equalTo(2));
assertThat(list.get(0), equalTo(123));
assertThat(list.get(1), equalTo(456));
});
doWithAccessPattern(FLEXIBLE, (doc) -> {
doc.appendFieldValue("dots.dotted.integers.a", 2);
Object dots = doc.getSourceAndMetadata().get("dots");
assertThat(dots, instanceOf(Map.class));
@SuppressWarnings("unchecked")
Object dottedIntegers = ((Map<String, Object>) dots).get("dotted.integers");
assertThat(dottedIntegers, instanceOf(Map.class));
@SuppressWarnings("unchecked")
Object a = ((Map<String, Object>) dottedIntegers).get("a");
assertThat(a, instanceOf(List.class));
assertThat(a, equalTo(List.of(1, 2)));
});
}
public void testAppendFieldValuesConvertIntegerToList() throws Exception {
doWithRandomAccessPattern((doc) -> {
doc.appendFieldValue("int", List.of(456, 789));
Object object = doc.getSourceAndMetadata().get("int");
assertThat(object, instanceOf(List.class));
@SuppressWarnings("unchecked")
List<Object> list = (List<Object>) object;
assertThat(list.size(), equalTo(3));
assertThat(list.get(0), equalTo(123));
assertThat(list.get(1), equalTo(456));
assertThat(list.get(2), equalTo(789));
});
doWithAccessPattern(FLEXIBLE, (doc) -> {
doc.appendFieldValue("dots.dotted.integers.a", List.of(2, 3));
Object dots = doc.getSourceAndMetadata().get("dots");
assertThat(dots, instanceOf(Map.class));
@SuppressWarnings("unchecked")
Object dottedIntegers = ((Map<String, Object>) dots).get("dotted.integers");
assertThat(dottedIntegers, instanceOf(Map.class));
@SuppressWarnings("unchecked")
Object a = ((Map<String, Object>) dottedIntegers).get("a");
assertThat(a, instanceOf(List.class));
assertThat(a, equalTo(List.of(1, 2, 3)));
});
}
public void testAppendFieldValueConvertMapToList() throws Exception {
doWithRandomAccessPattern((doc) -> {
doc.appendFieldValue("fizz", Map.of("field", "value"));
Object object = doc.getSourceAndMetadata().get("fizz");
assertThat(object, instanceOf(List.class));
List<?> list = (List<?>) object;
assertThat(list.size(), equalTo(2));
assertThat(list.get(0), instanceOf(Map.class));
@SuppressWarnings("unchecked")
Map<String, Object> map = (Map<String, Object>) list.get(0);
assertThat(map.size(), equalTo(4));
assertThat(list.get(1), equalTo(Map.of("field", "value")));
});
doWithAccessPattern(FLEXIBLE, (doc) -> {
doc.appendFieldValue("dots.dotted.integers", Map.of("x", "y"));
Object dots = doc.getSourceAndMetadata().get("dots");
assertThat(dots, instanceOf(Map.class));
@SuppressWarnings("unchecked")
Object dottedIntegers = ((Map<String, Object>) dots).get("dotted.integers");
assertThat(dottedIntegers, instanceOf(List.class));
List<?> dottedIntegersList = (List<?>) dottedIntegers;
assertThat(dottedIntegersList.size(), equalTo(2));
assertThat(dottedIntegersList.get(0), instanceOf(Map.class));
@SuppressWarnings("unchecked")
Map<String, Object> originalMap = (Map<String, Object>) dottedIntegersList.get(0);
assertThat(originalMap.size(), equalTo(5)); // 5 entries in the original map
assertThat(dottedIntegersList.get(1), equalTo(Map.of("x", "y")));
});
}
public void testAppendFieldValueToNull() throws Exception {
doWithRandomAccessPattern((doc) -> {
doc.appendFieldValue("fizz.foo_null", "new_value");
Object object = doc.getSourceAndMetadata().get("fizz");
assertThat(object, instanceOf(Map.class));
@SuppressWarnings("unchecked")
Map<String, Object> map = (Map<String, Object>) object;
object = map.get("foo_null");
assertThat(object, instanceOf(List.class));
List<?> list = (List<?>) object;
assertThat(list.size(), equalTo(2));
assertThat(list.get(0), nullValue());
assertThat(list.get(1), equalTo("new_value"));
});
doWithAccessPattern(FLEXIBLE, (doc) -> {
doc.appendFieldValue("dotted.bar.baz_null", "new_value");
Object object = doc.getSourceAndMetadata().get("dotted.bar.baz_null");
assertThat(object, instanceOf(List.class));
List<?> list = (List<?>) object;
assertThat(list.size(), equalTo(2));
assertThat(list.get(0), nullValue());
assertThat(list.get(1), equalTo("new_value"));
});
}
public void testAppendFieldValueToListElement() throws Exception {
doWithAccessPattern(CLASSIC, (doc) -> {
doc.appendFieldValue("fizz.list.0", "item2");
Object object = doc.getSourceAndMetadata().get("fizz");
assertThat(object, instanceOf(Map.class));
@SuppressWarnings("unchecked")
Map<String, Object> map = (Map<String, Object>) object;
object = map.get("list");
assertThat(object, instanceOf(List.class));
@SuppressWarnings("unchecked")
List<Object> list = (List<Object>) object;
assertThat(list.size(), equalTo(1));
object = list.get(0);
assertThat(object, instanceOf(List.class));
@SuppressWarnings("unchecked")
List<String> innerList = (List<String>) object;
assertThat(innerList.size(), equalTo(2));
assertThat(innerList.get(0), equalTo("item1"));
assertThat(innerList.get(1), equalTo("item2"));
});
// TODO: Flexible will have a new notation for list indexing - For now it does not support traversing lists
doWithAccessPattern(FLEXIBLE, (doc) -> {
var illegalArgument = expectThrows(
IllegalArgumentException.class,
() -> doc.appendFieldValue("dots.arrays.dotted.strings.0", "a1")
);
assertThat(illegalArgument.getMessage(), equalTo("path [dots.arrays.dotted.strings.0] is not valid"));
});
}
public void testAppendFieldValuesToListElement() throws Exception {
doWithAccessPattern(CLASSIC, (doc) -> {
doc.appendFieldValue("fizz.list.0", List.of("item2", "item3", "item4"));
Object object = doc.getSourceAndMetadata().get("fizz");
assertThat(object, instanceOf(Map.class));
@SuppressWarnings("unchecked")
Map<String, Object> map = (Map<String, Object>) object;
object = map.get("list");
assertThat(object, instanceOf(List.class));
@SuppressWarnings("unchecked")
List<Object> list = (List<Object>) object;
assertThat(list.size(), equalTo(1));
object = list.get(0);
assertThat(object, instanceOf(List.class));
@SuppressWarnings("unchecked")
List<String> innerList = (List<String>) object;
assertThat(innerList.size(), equalTo(4));
assertThat(innerList.get(0), equalTo("item1"));
assertThat(innerList.get(1), equalTo("item2"));
assertThat(innerList.get(2), equalTo("item3"));
assertThat(innerList.get(3), equalTo("item4"));
});
// TODO: Flexible will have a new notation for list indexing - For now it does not support traversing lists
doWithAccessPattern(FLEXIBLE, (doc) -> {
var illegalArgument = expectThrows(
IllegalArgumentException.class,
() -> doc.appendFieldValue("dots.arrays.dotted.strings.0", List.of("a1", "a2", "a3"))
);
assertThat(illegalArgument.getMessage(), equalTo("path [dots.arrays.dotted.strings.0] is not valid"));
});
}
public void testAppendFieldValueConvertStringListElementToList() throws Exception {
doWithAccessPattern(CLASSIC, (doc) -> {
doc.appendFieldValue("fizz.list.0.0", "new_value");
Object object = doc.getSourceAndMetadata().get("fizz");
assertThat(object, instanceOf(Map.class));
@SuppressWarnings("unchecked")
Map<String, Object> map = (Map<String, Object>) object;
object = map.get("list");
assertThat(object, instanceOf(List.class));
@SuppressWarnings("unchecked")
List<Object> list = (List<Object>) object;
assertThat(list.size(), equalTo(1));
object = list.get(0);
assertThat(object, instanceOf(List.class));
@SuppressWarnings("unchecked")
List<Object> innerList = (List<Object>) object;
object = innerList.get(0);
assertThat(object, instanceOf(List.class));
@SuppressWarnings("unchecked")
List<String> innerInnerList = (List<String>) object;
assertThat(innerInnerList.size(), equalTo(2));
assertThat(innerInnerList.get(0), equalTo("item1"));
assertThat(innerInnerList.get(1), equalTo("new_value"));
});
// TODO: Flexible will have a new notation for list indexing - For now it does not support traversing lists
doWithAccessPattern(FLEXIBLE, (doc) -> {
var illegalArgument = expectThrows(IllegalArgumentException.class, () -> doc.appendFieldValue("fizz.list.0.0", "new_value"));
assertThat(illegalArgument.getMessage(), equalTo("path [fizz.list.0.0] is not valid"));
});
}
public void testAppendFieldValuesConvertStringListElementToList() throws Exception {
doWithAccessPattern(CLASSIC, (doc) -> {
doc.appendFieldValue("fizz.list.0.0", List.of("item2", "item3", "item4"));
Object object = doc.getSourceAndMetadata().get("fizz");
assertThat(object, instanceOf(Map.class));
@SuppressWarnings("unchecked")
Map<String, Object> map = (Map<String, Object>) object;
object = map.get("list");
assertThat(object, instanceOf(List.class));
@SuppressWarnings("unchecked")
List<Object> list = (List<Object>) object;
assertThat(list.size(), equalTo(1));
object = list.get(0);
assertThat(object, instanceOf(List.class));
@SuppressWarnings("unchecked")
List<Object> innerList = (List<Object>) object;
object = innerList.get(0);
assertThat(object, instanceOf(List.class));
@SuppressWarnings("unchecked")
List<String> innerInnerList = (List<String>) object;
assertThat(innerInnerList.size(), equalTo(4));
assertThat(innerInnerList.get(0), equalTo("item1"));
assertThat(innerInnerList.get(1), equalTo("item2"));
assertThat(innerInnerList.get(2), equalTo("item3"));
assertThat(innerInnerList.get(3), equalTo("item4"));
});
// TODO: Flexible will have a new notation for list indexing - For now it does not support traversing lists
doWithAccessPattern(FLEXIBLE, (doc) -> {
var illegalArgument = expectThrows(
IllegalArgumentException.class,
() -> doc.appendFieldValue("fizz.list.0.0", List.of("item2", "item3", "item4"))
);
assertThat(illegalArgument.getMessage(), equalTo("path [fizz.list.0.0] is not valid"));
});
}
public void testAppendFieldValueListElementConvertMapToList() throws Exception {
doWithAccessPattern(CLASSIC, (doc) -> {
doc.appendFieldValue("list.0", Map.of("item2", "value2"));
Object object = doc.getSourceAndMetadata().get("list");
assertThat(object, instanceOf(List.class));
List<?> list = (List<?>) object;
assertThat(list.size(), equalTo(2));
assertThat(list.get(0), instanceOf(List.class));
assertThat(list.get(1), nullValue());
list = (List<?>) list.get(0);
assertThat(list.size(), equalTo(2));
assertThat(list.get(0), equalTo(Map.of("field", "value")));
assertThat(list.get(1), equalTo(Map.of("item2", "value2")));
});
// TODO: Flexible will have a new notation for list indexing - For now it does not support traversing lists
doWithAccessPattern(FLEXIBLE, (doc) -> {
var illegalArgument = expectThrows(
IllegalArgumentException.class,
() -> doc.appendFieldValue("list.0", Map.of("item2", "value2"))
);
assertThat(illegalArgument.getMessage(), equalTo("path [list.0] is not valid"));
});
}
public void testAppendFieldValueToNullListElement() throws Exception {
doWithAccessPattern(CLASSIC, (doc) -> {
doc.appendFieldValue("list.1", "new_value");
Object object = doc.getSourceAndMetadata().get("list");
assertThat(object, instanceOf(List.class));
List<?> list = (List<?>) object;
assertThat(list.get(1), instanceOf(List.class));
list = (List<?>) list.get(1);
assertThat(list.size(), equalTo(2));
assertThat(list.get(0), nullValue());
assertThat(list.get(1), equalTo("new_value"));
});
// TODO: Flexible will have a new notation for list indexing - For now it does not support traversing lists
doWithAccessPattern(FLEXIBLE, (doc) -> {
var illegalArgument = expectThrows(IllegalArgumentException.class, () -> doc.appendFieldValue("list.1", "new_value"));
assertThat(illegalArgument.getMessage(), equalTo("path [list.1] is not valid"));
});
}
public void testAppendFieldValueToListOfMaps() throws Exception {
doWithRandomAccessPattern((doc) -> {
doc.appendFieldValue("list", Map.of("item2", "value2"));
Object object = doc.getSourceAndMetadata().get("list");
assertThat(object, instanceOf(List.class));
@SuppressWarnings("unchecked")
List<Object> list = (List<Object>) object;
assertThat(list.size(), equalTo(3));
assertThat(list.get(0), equalTo(Map.of("field", "value")));
assertThat(list.get(1), nullValue());
assertThat(list.get(2), equalTo(Map.of("item2", "value2")));
});
doWithAccessPattern(FLEXIBLE, (doc) -> {
doc.appendFieldValue("dots.arrays.dotted.objects", Map.of("item2", "value2"));
Object object = doc.getSourceAndMetadata().get("dots");
assertThat(object, instanceOf(Map.class));
@SuppressWarnings("unchecked")
Object arrays = ((Map<String, Object>) object).get("arrays");
assertThat(arrays, instanceOf(Map.class));
@SuppressWarnings("unchecked")
Object dottedObjects = ((Map<String, Object>) arrays).get("dotted.objects");
assertThat(dottedObjects, instanceOf(List.class));
@SuppressWarnings("unchecked")
List<Object> list = (List<Object>) dottedObjects;
assertThat(list.size(), equalTo(3));
assertThat(list.get(0), equalTo(Map.of("foo", "bar")));
assertThat(list.get(1), equalTo(Map.of("baz", "qux")));
assertThat(list.get(2), equalTo(Map.of("item2", "value2")));
});
}
public void testListSetFieldValueIndexProvided() throws Exception {
doWithAccessPattern(CLASSIC, (doc) -> {
doc.setFieldValue("list.1", "value");
Object object = doc.getSourceAndMetadata().get("list");
assertThat(object, instanceOf(List.class));
@SuppressWarnings("unchecked")
List<Object> list = (List<Object>) object;
assertThat(list.size(), equalTo(2));
assertThat(list.get(0), equalTo(Map.of("field", "value")));
assertThat(list.get(1), equalTo("value"));
});
// TODO: Flexible will have a new notation for list indexing - For now it does not support traversing lists
doWithAccessPattern(FLEXIBLE, (doc) -> {
var illegalArgument = expectThrows(IllegalArgumentException.class, () -> doc.setFieldValue("list.1", "value"));
assertThat(illegalArgument.getMessage(), equalTo("path [list.1] is not valid"));
});
}
public void testSetFieldValueListAsPartOfPath() throws Exception {
doWithAccessPattern(CLASSIC, (doc) -> {
doc.setFieldValue("list.0.field", "new_value");
Object object = doc.getSourceAndMetadata().get("list");
assertThat(object, instanceOf(List.class));
@SuppressWarnings("unchecked")
List<Object> list = (List<Object>) object;
assertThat(list.size(), equalTo(2));
assertThat(list.get(0), equalTo(Map.of("field", "new_value")));
assertThat(list.get(1), nullValue());
});
// TODO: Flexible will have a new notation for list indexing - For now it does not support traversing lists
doWithAccessPattern(FLEXIBLE, (doc) -> {
var illegalArgument = expectThrows(IllegalArgumentException.class, () -> doc.setFieldValue("list.0.field", "new_value"));
assertThat(illegalArgument.getMessage(), equalTo("path [list.0.field] is not valid"));
});
}
public void testListSetFieldValueIndexNotNumeric() throws Exception {
try {
doWithAccessPattern(CLASSIC, (doc) -> doc.setFieldValue("list.test", "value"));
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("[test] is not an integer, cannot be used as an index as part of path [list.test]"));
}
try {
doWithAccessPattern(CLASSIC, (doc) -> doc.setFieldValue("list.test.field", "new_value"));
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("[test] is not an integer, cannot be used as an index as part of path [list.test.field]"));
}
try {
doWithAccessPattern(FLEXIBLE, (doc) -> doc.setFieldValue("list.test", "value"));
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("path [list.test] is not valid"));
}
try {
doWithAccessPattern(FLEXIBLE, (doc) -> doc.setFieldValue("list.test.field", "new_value"));
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("path [list.test.field] is not valid"));
}
}
public void testListSetFieldValueIndexOutOfBounds() throws Exception {
try {
doWithAccessPattern(CLASSIC, (doc) -> doc.setFieldValue("list.10", "value"));
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("[10] is out of bounds for array with length [2] as part of path [list.10]"));
}
try {
doWithAccessPattern(CLASSIC, (doc) -> doc.setFieldValue("list.10.field", "value"));
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("[10] is out of bounds for array with length [2] as part of path [list.10.field]"));
}
try {
doWithAccessPattern(FLEXIBLE, (doc) -> doc.setFieldValue("list.10", "value"));
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("path [list.10] is not valid"));
}
try {
doWithAccessPattern(FLEXIBLE, (doc) -> doc.setFieldValue("list.10.field", "value"));
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("path [list.10.field] is not valid"));
}
}
public void testSetFieldValueEmptyName() throws Exception {
try {
doWithRandomAccessPattern((doc) -> doc.setFieldValue("", "bar"));
fail("add field should have failed");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("path cannot be null nor empty"));
}
}
public void testRemoveField() throws Exception {
doWithRandomAccessPattern((doc) -> {
doc.removeField("foo");
assertThat(doc.getSourceAndMetadata().size(), equalTo(14));
assertThat(doc.getSourceAndMetadata().containsKey("foo"), equalTo(false));
doc.removeField("_index");
assertThat(doc.getSourceAndMetadata().size(), equalTo(13));
assertThat(doc.getSourceAndMetadata().containsKey("_index"), equalTo(false));
doc.removeField("_source.fizz");
assertThat(doc.getSourceAndMetadata().size(), equalTo(12));
assertThat(doc.getSourceAndMetadata().containsKey("fizz"), equalTo(false));
assertThat(doc.getIngestMetadata().size(), equalTo(2));
doc.removeField("_ingest.timestamp");
assertThat(doc.getSourceAndMetadata().size(), equalTo(12));
assertThat(doc.getIngestMetadata().size(), equalTo(1));
doc.removeField("_ingest.pipeline");
assertThat(doc.getSourceAndMetadata().size(), equalTo(12));
assertThat(doc.getIngestMetadata().size(), equalTo(0));
});
doWithAccessPattern(FLEXIBLE, (doc) -> {
doc.removeField("dotted.bar.baz");
assertThat(doc.getSourceAndMetadata().size(), equalTo(11));
assertThat(doc.getSourceAndMetadata().containsKey("dotted.bar.baz"), equalTo(false));
});
}
public void testRemoveFieldIgnoreMissing() throws Exception {
doWithRandomAccessPattern((doc) -> {
doc.removeField("foo", randomBoolean());
assertThat(doc.getSourceAndMetadata().size(), equalTo(14));
assertThat(doc.getSourceAndMetadata().containsKey("foo"), equalTo(false));
doc.removeField("_index", randomBoolean());
assertThat(doc.getSourceAndMetadata().size(), equalTo(13));
assertThat(doc.getSourceAndMetadata().containsKey("_index"), equalTo(false));
});
// if ignoreMissing is false, we throw an exception for values that aren't found
switch (randomIntBetween(0, 2)) {
case 0 -> doWithRandomAccessPattern((doc) -> {
doc.setFieldValue("fizz.some", (Object) null);
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> doc.removeField("fizz.some.nonsense", false)
);
assertThat(e.getMessage(), is("cannot remove [nonsense] from null as part of path [fizz.some.nonsense]"));
});
case 1 -> {
// Different error messages for each access pattern when trying to remove an element from a list incorrectly
doWithAccessPattern(CLASSIC, (doc) -> {
doc.setFieldValue("fizz.some", List.of("foo", "bar"));
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> doc.removeField("fizz.some.nonsense", false)
);
assertThat(
e.getMessage(),
is("[nonsense] is not an integer, cannot be used as an index as part of path [fizz.some.nonsense]")
);
});
doWithAccessPattern(FLEXIBLE, (doc) -> {
doc.setFieldValue("fizz.other", List.of("foo", "bar"));
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> doc.removeField("fizz.other.nonsense", false)
);
assertThat(e.getMessage(), is("path [fizz.other.nonsense] is not valid"));
});
}
case 2 -> {
// Different error messages when removing a nested field that does not exist
doWithAccessPattern(CLASSIC, (doc) -> {
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> doc.removeField("fizz.some.nonsense", false)
);
assertThat(e.getMessage(), is("field [some] not present as part of path [fizz.some.nonsense]"));
});
doWithAccessPattern(FLEXIBLE, (doc) -> {
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> doc.removeField("fizz.some.nonsense", false)
);
assertThat(e.getMessage(), is("field [some.nonsense] not present as part of path [fizz.some.nonsense]"));
});
}
default -> throw new AssertionError("failure, got illegal switch case");
}
// but no exception is thrown if ignoreMissing is true
doWithRandomAccessPattern((doc) -> doc.removeField("fizz.some.nonsense", true));
}
public void testRemoveInnerField() throws Exception {
doWithRandomAccessPattern((doc) -> {
doc.removeField("fizz.buzz");
assertThat(doc.getSourceAndMetadata().size(), equalTo(15));
assertThat(doc.getSourceAndMetadata().get("fizz"), instanceOf(Map.class));
@SuppressWarnings("unchecked")
Map<String, Object> map = (Map<String, Object>) doc.getSourceAndMetadata().get("fizz");
assertThat(map.size(), equalTo(3));
assertThat(map.containsKey("buzz"), equalTo(false));
doc.removeField("fizz.foo_null");
assertThat(map.size(), equalTo(2));
assertThat(doc.getSourceAndMetadata().size(), equalTo(15));
assertThat(doc.getSourceAndMetadata().containsKey("fizz"), equalTo(true));
doc.removeField("fizz.1");
assertThat(map.size(), equalTo(1));
assertThat(doc.getSourceAndMetadata().size(), equalTo(15));
assertThat(doc.getSourceAndMetadata().containsKey("fizz"), equalTo(true));
doc.removeField("fizz.list");
assertThat(map.size(), equalTo(0));
assertThat(doc.getSourceAndMetadata().size(), equalTo(15));
assertThat(doc.getSourceAndMetadata().containsKey("fizz"), equalTo(true));
});
doWithAccessPattern(FLEXIBLE, (doc) -> {
doc.removeField("dots.foo.bar.baz");
assertThat(doc.getSourceAndMetadata().size(), equalTo(15));
assertThat(doc.getSourceAndMetadata().get("dots"), instanceOf(Map.class));
@SuppressWarnings("unchecked")
Map<String, Object> dots = (Map<String, Object>) doc.getSourceAndMetadata().get("dots");
assertThat(dots.size(), equalTo(5));
assertThat(dots.containsKey("foo.bar.baz"), equalTo(false));
doc.removeField("dots.foo.bar.null");
assertThat(dots.size(), equalTo(4));
assertThat(dots.containsKey("foo.bar.null"), equalTo(false));
assertThat(doc.getSourceAndMetadata().size(), equalTo(15));
assertThat(doc.getSourceAndMetadata().containsKey("dots"), equalTo(true));
doc.removeField("dots.arrays.dotted.strings");
@SuppressWarnings("unchecked")
Map<String, Object> arrays = (Map<String, Object>) dots.get("arrays");
assertThat(dots.size(), equalTo(4));
assertThat(arrays.size(), equalTo(2));
assertThat(arrays.containsKey("dotted.strings"), equalTo(false));
assertThat(doc.getSourceAndMetadata().size(), equalTo(15));
assertThat(doc.getSourceAndMetadata().containsKey("dots"), equalTo(true));
});
}
public void testRemoveNonExistingField() throws Exception {
try {
doWithRandomAccessPattern((doc) -> doc.removeField("does_not_exist"));
fail("remove field should have failed");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("field [does_not_exist] not present as part of path [does_not_exist]"));
}
try {
doWithAccessPattern(CLASSIC, (doc) -> doc.removeField("does.not.exist"));
fail("remove field should have failed");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("field [does] not present as part of path [does.not.exist]"));
}
try {
doWithAccessPattern(FLEXIBLE, (doc) -> doc.removeField("does.not.exist"));
fail("remove field should have failed");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("field [does.not.exist] not present as part of path [does.not.exist]"));
}
}
public void testRemoveExistingParentTypeMismatch() throws Exception {
try {
doWithRandomAccessPattern((doc) -> doc.removeField("foo.foo.bar"));
fail("remove field should have failed");
} catch (IllegalArgumentException e) {
assertThat(
e.getMessage(),
equalTo("cannot resolve [foo] from object of type [java.lang.String] as part of path [foo.foo.bar]")
);
}
try {
doWithAccessPattern(FLEXIBLE, (doc) -> doc.removeField("dots.foo.bar.baz.qux.quux"));
fail("remove field should have failed");
} catch (IllegalArgumentException e) {
assertThat(
e.getMessage(),
equalTo("cannot resolve [qux] from object of type [java.lang.String] as part of path [dots.foo.bar.baz.qux.quux]")
);
}
}
public void testRemoveSourceObject() throws Exception {
try {
doWithRandomAccessPattern((doc) -> doc.removeField("_source"));
fail("remove field should have failed");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("field [_source] not present as part of path [_source]"));
}
}
public void testRemoveIngestObject() throws Exception {
doWithRandomAccessPattern((doc) -> {
doc.removeField("_ingest");
assertThat(doc.getSourceAndMetadata().size(), equalTo(14));
assertThat(doc.getSourceAndMetadata().containsKey("_ingest"), equalTo(false));
});
}
public void testRemoveEmptyPathAfterStrippingOutPrefix() throws Exception {
try {
doWithRandomAccessPattern((doc) -> doc.removeField("_source."));
fail("set field value should have failed");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("path [_source.] is not valid"));
}
try {
doWithRandomAccessPattern((doc) -> doc.removeField("_ingest."));
fail("set field value should have failed");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("path [_ingest.] is not valid"));
}
}
public void testListRemoveField() throws Exception {
doWithAccessPattern(CLASSIC, (doc) -> {
doc.removeField("list.0.field");
assertThat(doc.getSourceAndMetadata().size(), equalTo(15));
assertThat(doc.getSourceAndMetadata().containsKey("list"), equalTo(true));
Object object = doc.getSourceAndMetadata().get("list");
assertThat(object, instanceOf(List.class));
@SuppressWarnings("unchecked")
List<Object> list = (List<Object>) object;
assertThat(list.size(), equalTo(2));
object = list.get(0);
assertThat(object, instanceOf(Map.class));
@SuppressWarnings("unchecked")
Map<String, Object> map = (Map<String, Object>) object;
assertThat(map.size(), equalTo(0));
document.removeField("list.0");
assertThat(list.size(), equalTo(1));
assertThat(list.get(0), nullValue());
});
// TODO: Flexible will have a new notation for list indexing - For now it does not support traversing lists
doWithAccessPattern(FLEXIBLE, (doc) -> {
var illegalArgument = expectThrows(IllegalArgumentException.class, () -> doc.removeField("list.0.field"));
assertThat(illegalArgument.getMessage(), equalTo("path [list.0.field] is not valid"));
});
}
public void testRemoveFieldValueNotFoundNullParent() throws Exception {
try {
doWithRandomAccessPattern((doc) -> doc.removeField("fizz.foo_null.not_there"));
fail("get field value should have failed");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("cannot remove [not_there] from null as part of path [fizz.foo_null.not_there]"));
}
try {
doWithAccessPattern(FLEXIBLE, (doc) -> doc.removeField("dots.foo.bar.null.not_there"));
fail("get field value should have failed");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("cannot remove [not_there] from null as part of path [dots.foo.bar.null.not_there]"));
}
}
public void testNestedRemoveFieldTypeMismatch() throws Exception {
try {
doWithRandomAccessPattern((doc) -> doc.removeField("fizz.1.bar"));
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("cannot remove [bar] from object of type [java.lang.String] as part of path [fizz.1.bar]"));
}
try {
doWithAccessPattern(FLEXIBLE, (doc) -> doc.removeField("dots.dotted.integers.a.bar.baz"));
} catch (IllegalArgumentException e) {
assertThat(
e.getMessage(),
equalTo("cannot resolve [bar] from object of type [java.lang.Integer] as part of path [dots.dotted.integers.a.bar.baz]")
);
}
}
public void testListRemoveFieldIndexNotNumeric() throws Exception {
try {
doWithAccessPattern(CLASSIC, (doc) -> doc.removeField("list.test"));
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("[test] is not an integer, cannot be used as an index as part of path [list.test]"));
}
// Flexible mode does not allow for interactions with arrays yet
try {
doWithAccessPattern(FLEXIBLE, (doc) -> doc.removeField("list.test"));
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("path [list.test] is not valid"));
}
}
public void testListRemoveFieldIndexOutOfBounds() throws Exception {
try {
doWithAccessPattern(CLASSIC, (doc) -> doc.removeField("list.10"));
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("[10] is out of bounds for array with length [2] as part of path [list.10]"));
}
// Flexible mode does not allow for interactions with arrays yet
try {
doWithAccessPattern(FLEXIBLE, (doc) -> doc.removeField("list.10"));
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("path [list.10] is not valid"));
}
}
public void testRemoveNullField() throws Exception {
try {
doWithRandomAccessPattern((doc) -> doc.removeField(null));
fail("remove field should have failed");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("path cannot be null nor empty"));
}
}
public void testRemoveEmptyField() throws Exception {
try {
doWithRandomAccessPattern((doc) -> doc.removeField(""));
fail("remove field should have failed");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("path cannot be null nor empty"));
}
}
public void testIngestMetadataTimestamp() {
long before = System.currentTimeMillis();
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
long after = System.currentTimeMillis();
ZonedDateTime timestamp = (ZonedDateTime) ingestDocument.getIngestMetadata().get(IngestDocument.TIMESTAMP);
long actualMillis = timestamp.toInstant().toEpochMilli();
assertThat(timestamp, notNullValue());
assertThat(actualMillis, greaterThanOrEqualTo(before));
assertThat(actualMillis, lessThanOrEqualTo(after));
}
public void testCopyConstructor() {
{
// generic test with a random document and copy
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
IngestDocument copy = new IngestDocument(ingestDocument);
// these fields should not be the same instance
assertThat(ingestDocument.getSourceAndMetadata(), not(sameInstance(copy.getSourceAndMetadata())));
assertThat(ingestDocument.getCtxMap(), not(sameInstance(copy.getCtxMap())));
assertThat(ingestDocument.getCtxMap().getMetadata(), not(sameInstance(copy.getCtxMap().getMetadata())));
// but the two objects should be very much equal to each other
assertIngestDocument(ingestDocument, copy);
}
{
// manually punch in a few values
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
ingestDocument.setFieldValue("_index", "foo1");
ingestDocument.setFieldValue("_id", "bar1");
ingestDocument.setFieldValue("hello", "world1");
IngestDocument copy = new IngestDocument(ingestDocument);
// make sure the copy matches
assertIngestDocument(ingestDocument, copy);
// change the copy
copy.setFieldValue("_index", "foo2");
copy.setFieldValue("_id", "bar2");
copy.setFieldValue("hello", "world2");
// the original shouldn't have changed
assertThat(ingestDocument.getFieldValue("_index", String.class), equalTo("foo1"));
assertThat(ingestDocument.getFieldValue("_id", String.class), equalTo("bar1"));
assertThat(ingestDocument.getFieldValue("hello", String.class), equalTo("world1"));
}
{
// the copy constructor rejects self-references
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
List<Object> someList = new ArrayList<>();
someList.add("some string");
someList.add(someList); // the list contains itself
ingestDocument.setFieldValue("someList", someList);
Exception e = expectThrows(IllegalArgumentException.class, () -> new IngestDocument(ingestDocument));
assertThat(e.getMessage(), equalTo("Iterable object is self-referencing itself"));
}
}
public void testCopyConstructorWithExecutedPipelines() {
/*
* This is similar to the first part of testCopyConstructor, except that we're executing a pipeilne, and running the
* assertions inside the processor so that we can test that executedPipelines is correct.
*/
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
TestProcessor processor = new TestProcessor(ingestDocument1 -> {
assertThat(ingestDocument1.getPipelineStack().size(), equalTo(1));
IngestDocument copy = new IngestDocument(ingestDocument1);
assertThat(ingestDocument1.getSourceAndMetadata(), not(sameInstance(copy.getSourceAndMetadata())));
assertThat(ingestDocument1.getCtxMap(), not(sameInstance(copy.getCtxMap())));
assertThat(ingestDocument1.getCtxMap().getMetadata(), not(sameInstance(copy.getCtxMap().getMetadata())));
assertIngestDocument(ingestDocument1, copy);
assertThat(copy.getPipelineStack(), equalTo(ingestDocument1.getPipelineStack()));
});
Pipeline pipeline = new Pipeline("pipeline1", "test pipeline", 1, Map.of(), new CompoundProcessor(processor));
ingestDocument.executePipeline(pipeline, (ingestDocument1, exception) -> {
assertNotNull(ingestDocument1);
assertNull(exception);
});
assertThat(processor.getInvokedCounter(), equalTo(1));
}
public void testCopyConstructorWithZonedDateTime() {
ZoneId timezone = ZoneId.of("Europe/London");
Map<String, Object> sourceAndMetadata = new HashMap<>();
sourceAndMetadata.put("beforeClockChange", ZonedDateTime.ofInstant(Instant.ofEpochSecond(1509237000), timezone));
sourceAndMetadata.put("afterClockChange", ZonedDateTime.ofInstant(Instant.ofEpochSecond(1509240600), timezone));
IngestDocument original = TestIngestDocument.withDefaultVersion(sourceAndMetadata);
IngestDocument copy = new IngestDocument(original);
assertThat(copy.getSourceAndMetadata().get("beforeClockChange"), equalTo(original.getSourceAndMetadata().get("beforeClockChange")));
assertThat(copy.getSourceAndMetadata().get("afterClockChange"), equalTo(original.getSourceAndMetadata().get("afterClockChange")));
}
public void testSetInvalidSourceField() {
Map<String, Object> document = new HashMap<>();
Object randomObject = randomFrom(new ArrayList<>(), new HashMap<>(), 12, 12.34);
document.put("source_field", randomObject);
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document);
try {
ingestDocument.getFieldValueAsBytes("source_field");
fail("Expected an exception due to invalid source field, but did not happen");
} catch (IllegalArgumentException e) {
String expectedClassName = randomObject.getClass().getName();
assertThat(
e.getMessage(),
containsString("field [source_field] of unknown type [" + expectedClassName + "], must be string or byte array")
);
}
}
public void testDeepCopy() {
IngestDocument copiedDoc = new IngestDocument(
IngestDocument.deepCopyMap(document.getSourceAndMetadata()),
IngestDocument.deepCopyMap(document.getIngestMetadata())
);
assertArrayEquals(
copiedDoc.getFieldValue(DOUBLE_ARRAY_FIELD, double[].class),
document.getFieldValue(DOUBLE_ARRAY_FIELD, double[].class),
1e-10
);
assertArrayEquals(
copiedDoc.getFieldValue(DOUBLE_DOUBLE_ARRAY_FIELD, double[][].class),
document.getFieldValue(DOUBLE_DOUBLE_ARRAY_FIELD, double[][].class)
);
}
public void testGetAllFields() {
Map<String, Object> address = new HashMap<>();
address.put("street", "Ipiranga Street");
address.put("number", 123);
Map<String, Object> source = new HashMap<>();
source.put("_id", "a123");
source.put("name", "eric clapton");
source.put("address", address);
source.put("name.display", "Eric Clapton");
Set<String> result = IngestDocument.getAllFields(source);
assertThat(result, containsInAnyOrder("_id", "name", "address", "address.street", "address.number", "name.display"));
}
public void testIsMetadata() {
assertTrue(IngestDocument.Metadata.isMetadata("_type"));
assertTrue(IngestDocument.Metadata.isMetadata("_index"));
assertTrue(IngestDocument.Metadata.isMetadata("_version"));
assertFalse(IngestDocument.Metadata.isMetadata("name"));
assertFalse(IngestDocument.Metadata.isMetadata("address"));
}
public void testIndexHistory() {
// the index history contains the original index
String index1 = document.getFieldValue("_index", String.class);
assertThat(index1, equalTo("index"));
assertThat(document.getIndexHistory(), Matchers.contains(index1));
// it can be updated to include another index
String index2 = "another_index";
assertTrue(document.updateIndexHistory(index2));
assertThat(document.getIndexHistory(), Matchers.contains(index1, index2));
// an index cycle cannot be introduced, however
assertFalse(document.updateIndexHistory(index1));
assertThat(document.getIndexHistory(), Matchers.contains(index1, index2));
}
public void testSourceHashMapIsNotCopied() {
// an ingest document's ctxMap will, as an optimization, just use the passed-in map reference
{
Map<String, Object> source = new HashMap<>(Map.of("foo", 1));
IngestDocument document = new IngestDocument("index", "id", 1, null, null, source);
assertThat(document.getSource(), sameInstance(source));
assertThat(document.getCtxMap().getSource(), sameInstance(source));
}
{
Map<String, Object> source = XContentHelper.convertToMap(new BytesArray("{ \"foo\": 1 }"), false, XContentType.JSON).v2();
IngestDocument document = new IngestDocument("index", "id", 1, null, null, source);
assertThat(document.getSource(), sameInstance(source));
assertThat(document.getCtxMap().getSource(), sameInstance(source));
}
{
Map<String, Object> source = Map.of("foo", 1);
IngestDocument document = new IngestDocument("index", "id", 1, null, null, source);
assertThat(document.getSource(), sameInstance(source));
assertThat(document.getCtxMap().getSource(), sameInstance(source));
}
// a cloned ingest document will copy the map, though
{
Map<String, Object> source = Map.of("foo", 1);
IngestDocument document1 = new IngestDocument("index", "id", 1, null, null, source);
document1.getIngestMetadata().put("bar", 2);
IngestDocument document2 = new IngestDocument(document1);
assertThat(document2.getCtxMap().getMetadata(), equalTo(document1.getCtxMap().getMetadata()));
assertThat(document2.getSource(), not(sameInstance(source)));
assertThat(document2.getCtxMap().getMetadata(), equalTo(document1.getCtxMap().getMetadata()));
assertThat(document2.getCtxMap().getSource(), not(sameInstance(source)));
// it also copies these other nearby maps
assertThat(document2.getIngestMetadata(), equalTo(document1.getIngestMetadata()));
assertThat(document2.getIngestMetadata(), not(sameInstance(document1.getIngestMetadata())));
assertThat(document2.getCtxMap().getMetadata(), not(sameInstance(document1.getCtxMap().getMetadata())));
assertThat(document2.getCtxMap().getMetadata(), not(sameInstance(document1.getCtxMap().getMetadata())));
}
}
/**
* When executing nested pipelines on an ingest document, the document should keep track of each pipeline's access pattern for the
* lifetime of each pipeline execution. When a pipeline execution concludes, it should clear access pattern from the document and
* restore the previous pipeline's access pattern.
*/
public void testNestedAccessPatternPropagation() {
Map<String, Object> source = new HashMap<>(Map.of("foo", 1));
IngestDocument document = new IngestDocument("index", "id", 1, null, null, source);
// 1-3 nested calls
doTestNestedAccessPatternPropagation(0, randomIntBetween(1, 5), document);
// At the end of the test, there should be neither pipeline ids nor access patterns left in the stack.
assertThat(document.getPipelineStack(), is(empty()));
assertThat(document.getCurrentAccessPattern().isEmpty(), is(true));
}
/**
* Recursively execute some number of pipelines at various call depths to simulate a robust chain of pipelines being called on a
* document.
* @param level The current call depth. This is how many pipelines deep into the nesting we are.
* @param maxCallDepth How much further in the call depth we should go in the test. If this is greater than the current level, we will
* recurse in at least one of the pipelines executed at this level. If the current level is equal to the max call
* depth we will run some pipelines but recurse no further before returning.
* @param document The document to repeatedly use and verify against.
*/
void doTestNestedAccessPatternPropagation(int level, int maxCallDepth, IngestDocument document) {
// 1-5 pipelines to be run at any given level
logger.debug("LEVEL {}/{}: BEGIN", level, maxCallDepth);
int pipelinesAtThisLevel = randomIntBetween(1, 7);
logger.debug("Run pipelines: {}", pipelinesAtThisLevel);
boolean recursed = false;
if (level >= maxCallDepth) {
// If we're at max call depth, do no recursions
recursed = true;
logger.debug("No more recursions");
}
for (int pipelineIdx = 0; pipelineIdx < pipelinesAtThisLevel; pipelineIdx++) {
String expectedPipelineId = randomAlphaOfLength(20);
IngestPipelineFieldAccessPattern expectedAccessPattern = randomFrom(IngestPipelineFieldAccessPattern.values());
// We mock the pipeline because it's easier to verify calls and doesn't
// need us to force a stall in the execution logic to half apply it.
Pipeline mockPipeline = mock(Pipeline.class);
when(mockPipeline.getId()).thenReturn(expectedPipelineId);
when(mockPipeline.getProcessors()).thenReturn(List.of(new TestProcessor((doc) -> {})));
when(mockPipeline.getFieldAccessPattern()).thenReturn(expectedAccessPattern);
@SuppressWarnings("unchecked")
BiConsumer<IngestDocument, Exception> mockHandler = mock(BiConsumer.class);
// Execute pipeline
logger.debug("LEVEL {}/{}: Executing {}/{}", level, maxCallDepth, pipelineIdx, pipelinesAtThisLevel);
document.executePipeline(mockPipeline, mockHandler);
// Verify pipeline was called, capture completion handler
ArgumentCaptor<BiConsumer<IngestDocument, Exception>> argumentCaptor = ArgumentCaptor.captor();
verify(mockPipeline).execute(eq(document), argumentCaptor.capture());
// Assert expected state
assertThat(document.getPipelineStack().getFirst(), is(expectedPipelineId));
assertThat(document.getCurrentAccessPattern().isPresent(), is(true));
assertThat(document.getCurrentAccessPattern().get(), is(expectedAccessPattern));
// Randomly recurse: We recurse only one time per level to avoid hogging test time, but we randomize which
// pipeline to recurse on, eventually requiring a recursion on the last pipeline run if one hasn't happened yet.
if (recursed == false && (randomBoolean() || pipelineIdx == pipelinesAtThisLevel - 1)) {
logger.debug("Recursed on pipeline {}", pipelineIdx);
doTestNestedAccessPatternPropagation(level + 1, maxCallDepth, document);
recursed = true;
}
// Pull up the captured completion handler to conclude the pipeline run
argumentCaptor.getValue().accept(document, null);
// Assert expected state
assertThat(document.getPipelineStack().size(), is(equalTo(level)));
if (level == 0) {
// Top level means access pattern should be empty
assertThat(document.getCurrentAccessPattern().isEmpty(), is(true));
} else {
// If we're nested below the top level we should still have an access
// pattern on the document for the pipeline above us
assertThat(document.getCurrentAccessPattern().isPresent(), is(true));
}
}
logger.debug("LEVEL {}/{}: COMPLETE", level, maxCallDepth);
}
@SuppressWarnings("unchecked")
public void testGetUnmodifiableSourceAndMetadata() {
assertMutatingThrows(ctx -> ctx.remove("foo"));
assertMutatingThrows(ctx -> ctx.put("foo", "bar"));
assertMutatingThrows(ctx -> ((List<Object>) ctx.get("listField")).add("bar"));
assertMutatingThrows(ctx -> ((List<Object>) ctx.get("listField")).remove("bar"));
assertMutatingThrows(ctx -> ((Set<Object>) ctx.get("setField")).add("bar"));
assertMutatingThrows(ctx -> ((Set<Object>) ctx.get("setField")).remove("bar"));
assertMutatingThrows(ctx -> ((Map<String, Object>) ctx.get("mapField")).put("bar", "baz"));
assertMutatingThrows(ctx -> ((Map<?, ?>) ctx.get("mapField")).remove("bar"));
assertMutatingThrows(ctx -> ((List<Object>) ((Set<Object>) ctx.get("setField")).iterator().next()).add("bar"));
assertMutatingThrows(
ctx -> ((List<Object>) ((List<Object>) ((Set<Object>) ctx.get("setField")).iterator().next()).iterator().next()).add("bar")
);
/*
* The source can also have a byte array. But we do not throw an UnsupportedOperationException when a byte array is changed --
* we just ignore the change.
*/
Map<String, Object> document = new HashMap<>();
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document);
ingestDocument.setFieldValue("byteArrayField", randomByteArrayOfLength(10));
Map<String, Object> unmodifiableDocument = ingestDocument.getUnmodifiableSourceAndMetadata();
byte originalByteValue = ((byte[]) unmodifiableDocument.get("byteArrayField"))[0];
((byte[]) unmodifiableDocument.get("byteArrayField"))[0] = (byte) (originalByteValue + 1);
assertThat(((byte[]) unmodifiableDocument.get("byteArrayField"))[0], equalTo(originalByteValue));
}
@SuppressWarnings("unchecked")
public void assertMutatingThrows(Consumer<Map<String, Object>> mutation) {
Map<String, Object> document = new HashMap<>();
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document);
ingestDocument.setFieldValue("listField", new ArrayList<>());
ingestDocument.setFieldValue("mapField", new HashMap<>());
ingestDocument.setFieldValue("setField", new HashSet<>());
List<Object> listWithinSet = new ArrayList<>();
listWithinSet.add(new ArrayList<>());
ingestDocument.getFieldValue("setField", Set.class).add(listWithinSet);
Map<String, Object> unmodifiableDocument = ingestDocument.getUnmodifiableSourceAndMetadata();
assertThrows(UnsupportedOperationException.class, () -> mutation.accept(unmodifiableDocument));
mutation.accept(ingestDocument.getSourceAndMetadata()); // no exception expected
}
}
|
IngestDocumentTests
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/context/annotation/AnnotationBeanNameGeneratorTests.java
|
{
"start": 8013,
"end": 8127
}
|
class ____ {
}
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
@Component
@
|
ComponentFromStringMeta
|
java
|
apache__camel
|
components/camel-netty/src/main/java/org/apache/camel/component/netty/codec/ObjectEncoder.java
|
{
"start": 1049,
"end": 1290
}
|
class ____ extends io.netty.handler.codec.serialization.ObjectEncoder {
@Override
public void encode(ChannelHandlerContext ctx, Serializable msg, ByteBuf out) throws Exception {
super.encode(ctx, msg, out);
}
}
|
ObjectEncoder
|
java
|
reactor__reactor-core
|
reactor-core/src/main/java/reactor/core/publisher/FluxDefaultIfEmpty.java
|
{
"start": 1062,
"end": 1602
}
|
class ____<T> extends InternalFluxOperator<T, T> {
final T value;
FluxDefaultIfEmpty(Flux<? extends T> source, T value) {
super(source);
this.value = Objects.requireNonNull(value, "value");
}
@Override
public CoreSubscriber<? super T> subscribeOrReturn(CoreSubscriber<? super T> actual) {
return new DefaultIfEmptySubscriber<>(actual, value);
}
@Override
public @Nullable Object scanUnsafe(Attr key) {
if (key == Attr.RUN_STYLE) return Attr.RunStyle.SYNC;
return super.scanUnsafe(key);
}
static final
|
FluxDefaultIfEmpty
|
java
|
google__guava
|
android/guava/src/com/google/common/cache/LocalCache.java
|
{
"start": 42449,
"end": 44584
}
|
class ____<K, V> extends WeakEntry<K, V> {
WeakAccessWriteEntry(
ReferenceQueue<K> queue, K key, int hash, @Nullable ReferenceEntry<K, V> next) {
super(queue, key, hash, next);
}
// The code below is exactly the same for each access entry type.
volatile long accessTime = Long.MAX_VALUE;
@Override
public long getAccessTime() {
return accessTime;
}
@Override
public void setAccessTime(long time) {
this.accessTime = time;
}
// Guarded By Segment.this
@Weak ReferenceEntry<K, V> nextAccess = nullEntry();
@Override
public ReferenceEntry<K, V> getNextInAccessQueue() {
return nextAccess;
}
@Override
public void setNextInAccessQueue(ReferenceEntry<K, V> next) {
this.nextAccess = next;
}
// Guarded By Segment.this
@Weak ReferenceEntry<K, V> previousAccess = nullEntry();
@Override
public ReferenceEntry<K, V> getPreviousInAccessQueue() {
return previousAccess;
}
@Override
public void setPreviousInAccessQueue(ReferenceEntry<K, V> previous) {
this.previousAccess = previous;
}
// The code below is exactly the same for each write entry type.
volatile long writeTime = Long.MAX_VALUE;
@Override
public long getWriteTime() {
return writeTime;
}
@Override
public void setWriteTime(long time) {
this.writeTime = time;
}
// Guarded By Segment.this
@Weak ReferenceEntry<K, V> nextWrite = nullEntry();
@Override
public ReferenceEntry<K, V> getNextInWriteQueue() {
return nextWrite;
}
@Override
public void setNextInWriteQueue(ReferenceEntry<K, V> next) {
this.nextWrite = next;
}
// Guarded By Segment.this
@Weak ReferenceEntry<K, V> previousWrite = nullEntry();
@Override
public ReferenceEntry<K, V> getPreviousInWriteQueue() {
return previousWrite;
}
@Override
public void setPreviousInWriteQueue(ReferenceEntry<K, V> previous) {
this.previousWrite = previous;
}
}
/** References a weak value. */
private static
|
WeakAccessWriteEntry
|
java
|
quarkusio__quarkus
|
integration-tests/amazon-lambda/src/test/java/io/quarkus/it/amazon/lambda/AmazonLambdaWithProfileSimpleTestCase.java
|
{
"start": 379,
"end": 902
}
|
class ____ {
@Test
public void testSimpleLambdaSuccess() throws Exception {
InputObject in = new InputObject();
in.setGreeting("Hello");
in.setName("Stu");
given()
.contentType("application/json")
.accept("application/json")
.body(in)
.when()
.post()
.then()
.statusCode(200)
.body(containsString("Hello Stu"));
}
}
|
AmazonLambdaWithProfileSimpleTestCase
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/association/AssociationTest.java
|
{
"start": 488,
"end": 1601
}
|
class ____ {
@AfterEach
public void tearDown(EntityManagerFactoryScope scope) {
scope.getEntityManagerFactory().getSchemaManager().truncate();
}
@Test
public void testBidirOneToOne(EntityManagerFactoryScope scope) {
final String id = "10";
scope.inTransaction(
entityManager -> {
Incident i = entityManager.find( Incident.class, id );
if ( i == null ) {
i = new Incident( id );
IncidentStatus ist = new IncidentStatus( id );
i.setIncidentStatus( ist );
ist.setIncident( i );
entityManager.persist( i );
}
} );
}
@Test
public void testMergeAndBidirOneToOne(EntityManagerFactoryScope scope) {
final Oven persistedOven = scope.fromTransaction(
entityManager -> {
Oven oven = new Oven();
Kitchen kitchen = new Kitchen();
entityManager.persist( oven );
entityManager.persist( kitchen );
kitchen.setOven( oven );
oven.setKitchen( kitchen );
return oven;
} );
Oven mergedOven = scope.fromTransaction(
entityManager -> {
return entityManager.merge( persistedOven );
}
);
}
}
|
AssociationTest
|
java
|
apache__dubbo
|
dubbo-remoting/dubbo-remoting-netty4/src/main/java/org/apache/dubbo/remoting/transport/netty4/NettySslContextOperator.java
|
{
"start": 962,
"end": 1104
}
|
class ____ implements ContextOperator {
@Override
public SslContext buildContext() {
return null;
}
}
|
NettySslContextOperator
|
java
|
spring-projects__spring-framework
|
spring-test/src/main/java/org/springframework/test/context/aot/AotTestContextInitializers.java
|
{
"start": 2308,
"end": 3755
}
|
class ____ an AOT-optimized application context
* initializer.
* <p>If this method returns {@code true}, {@link #getContextInitializer(Class)}
* should not return {@code null}.
*/
public boolean isSupportedTestClass(Class<?> testClass) {
return this.contextInitializers.containsKey(testClass.getName());
}
/**
* Get the AOT {@link ApplicationContextInitializer} for the specified test class.
* @return the AOT context initializer, or {@code null} if there is no AOT context
* initializer for the specified test class
* @see #isSupportedTestClass(Class)
* @see #getContextInitializerClass(Class)
*/
public @Nullable ApplicationContextInitializer<ConfigurableApplicationContext> getContextInitializer(Class<?> testClass) {
Supplier<ApplicationContextInitializer<ConfigurableApplicationContext>> supplier =
this.contextInitializers.get(testClass.getName());
return (supplier != null ? supplier.get() : null);
}
/**
* Get the AOT {@link ApplicationContextInitializer} {@link Class} for the
* specified test class.
* @return the AOT context initializer class, or {@code null} if there is no
* AOT context initializer for the specified test class
* @see #isSupportedTestClass(Class)
* @see #getContextInitializer(Class)
*/
public @Nullable Class<ApplicationContextInitializer<?>> getContextInitializerClass(Class<?> testClass) {
return this.contextInitializerClasses.get(testClass.getName());
}
}
|
has
|
java
|
mybatis__mybatis-3
|
src/test/java/org/apache/ibatis/reflection/wrapper/CollectionWrapperUnitTest.java
|
{
"start": 1452,
"end": 4028
}
|
class ____ extends ObjectWrapperBase {
@Mock
private Collection<Object> collection;
@Mock
private PropertyTokenizer tokenizer;
private ObjectWrapper wrapper;
@BeforeEach
void setup() {
MetaObject metaObject = SystemMetaObject.forObject(collection);
this.wrapper = new CollectionWrapper(metaObject, collection);
}
@Test
@Override
void shouldGet() {
assertThatExceptionOfType(UnsupportedOperationException.class).isThrownBy(() -> wrapper.get(tokenizer));
}
@Test
@Override
void shouldSet() {
assertThatExceptionOfType(UnsupportedOperationException.class).isThrownBy(() -> wrapper.set(tokenizer, null));
}
@Test
@Override
void shouldFindProperty() {
assertThatExceptionOfType(UnsupportedOperationException.class).isThrownBy(() -> wrapper.findProperty("abc", true));
}
@Test
@Override
void shouldGetGetterNames() {
assertThatExceptionOfType(UnsupportedOperationException.class).isThrownBy(() -> wrapper.getGetterNames());
}
@Test
@Override
void shouldGetSetterNames() {
assertThatExceptionOfType(UnsupportedOperationException.class).isThrownBy(() -> wrapper.getSetterNames());
}
@Test
@Override
void shouldGetGetterType() {
assertThatExceptionOfType(UnsupportedOperationException.class).isThrownBy(() -> wrapper.getGetterType("abc"));
}
@Test
@Override
void shouldGetSetterType() {
assertThatExceptionOfType(UnsupportedOperationException.class).isThrownBy(() -> wrapper.getSetterType("abc"));
}
@Test
@Override
void shouldHasGetter() {
assertThatExceptionOfType(UnsupportedOperationException.class).isThrownBy(() -> wrapper.hasGetter("abc"));
}
@Test
@Override
void shouldHasSetter() {
assertThatExceptionOfType(UnsupportedOperationException.class).isThrownBy(() -> wrapper.hasSetter("abc"));
}
@Test
@Override
void shouldIsCollection() {
assertTrue(wrapper.isCollection());
}
@Test
@Override
void shouldInstantiatePropertyValue() {
assertThatExceptionOfType(UnsupportedOperationException.class)
.isThrownBy(() -> wrapper.instantiatePropertyValue("abc", tokenizer, null));
}
@Test
@Override
void shouldAddElement() {
wrapper.add("bdc");
verify(collection).add("bdc");
}
@Test
@Override
void shouldAddAll() {
List<Object> list = new ArrayList<>() {
private static final long serialVersionUID = 1L;
{
add("1");
add("2");
add("3");
}
};
wrapper.addAll(list);
verify(collection).addAll(list);
}
}
|
CollectionWrapperUnitTest
|
java
|
apache__avro
|
lang/java/avro/src/main/java/org/apache/avro/file/SnappyCodec.java
|
{
"start": 1046,
"end": 1133
}
|
class ____ extends Codec {
private final CRC32 crc32 = new CRC32();
static
|
SnappyCodec
|
java
|
spring-projects__spring-framework
|
spring-webflux/src/main/java/org/springframework/web/reactive/function/server/RouterFunctions.java
|
{
"start": 52547,
"end": 53354
}
|
class ____<T extends ServerResponse, S extends ServerResponse>
implements RouterFunction<S> {
private final RouterFunction<T> routerFunction;
private final HandlerFilterFunction<T, S> filterFunction;
public FilteredRouterFunction(
RouterFunction<T> routerFunction,
HandlerFilterFunction<T, S> filterFunction) {
this.routerFunction = routerFunction;
this.filterFunction = filterFunction;
}
@Override
public Mono<HandlerFunction<S>> route(ServerRequest request) {
return this.routerFunction.route(request).map(this.filterFunction::apply);
}
@Override
public void accept(Visitor visitor) {
this.routerFunction.accept(visitor);
}
@Override
public String toString() {
return this.routerFunction.toString();
}
}
private static final
|
FilteredRouterFunction
|
java
|
netty__netty
|
transport/src/test/java/io/netty/channel/socket/nio/NioServerDomainSocketChannelTest.java
|
{
"start": 1710,
"end": 5343
}
|
class ____ extends AbstractNioDomainChannelTest<NioServerDomainSocketChannel> {
@Test
public void testCloseOnError() throws Exception {
ServerSocketChannel jdkChannel = NioServerDomainSocketChannel.newChannel(SelectorProvider.provider());
NioServerDomainSocketChannel serverSocketChannel = new NioServerDomainSocketChannel(jdkChannel);
EventLoopGroup group = new MultiThreadIoEventLoopGroup(1, NioIoHandler.newFactory());
File file = newRandomTmpFile();
try {
group.register(serverSocketChannel).syncUninterruptibly();
serverSocketChannel.bind(newUnixDomainSocketAddress(file.getAbsolutePath()))
.syncUninterruptibly();
assertFalse(serverSocketChannel.closeOnReadError(new IOException()));
serverSocketChannel.close().syncUninterruptibly();
} finally {
group.shutdownGracefully();
file.delete();
}
}
@Test
public void testIsActiveFalseAfterClose() throws Exception {
NioServerDomainSocketChannel serverSocketChannel = new NioServerDomainSocketChannel();
EventLoopGroup group = new MultiThreadIoEventLoopGroup(1, NioIoHandler.newFactory());
File file = newRandomTmpFile();
try {
group.register(serverSocketChannel).syncUninterruptibly();
Channel channel = serverSocketChannel.bind(
newUnixDomainSocketAddress(file.getAbsolutePath()))
.syncUninterruptibly().channel();
assertTrue(channel.isActive());
assertTrue(channel.isOpen());
channel.close().syncUninterruptibly();
assertFalse(channel.isOpen());
assertFalse(channel.isActive());
} finally {
group.shutdownGracefully();
file.delete();
}
}
@ParameterizedTest
@ValueSource(booleans = { false, true })
public void testCreateChannelFromJdkChannel(boolean bindJdkChannel) throws Exception {
File file = newRandomTmpFile();
EventLoopGroup group = new MultiThreadIoEventLoopGroup(1, NioIoHandler.newFactory());
try {
SocketAddress localAddress = newUnixDomainSocketAddress(file.getAbsolutePath());
ServerSocketChannel jdkChannel = NioServerDomainSocketChannel.newChannel(SelectorProvider.provider());
if (bindJdkChannel) {
jdkChannel.bind(localAddress);
}
NioServerDomainSocketChannel serverSocketChannel = new NioServerDomainSocketChannel(jdkChannel);
group.register(serverSocketChannel).syncUninterruptibly();
assertTrue(serverSocketChannel.isOpen());
assertEquals(bindJdkChannel, serverSocketChannel.isActive());
serverSocketChannel.close().syncUninterruptibly();
assertFalse(serverSocketChannel.isOpen());
assertFalse(serverSocketChannel.isActive());
} finally {
group.shutdownGracefully();
file.delete();
}
}
@Override
protected NioServerDomainSocketChannel newNioChannel() {
return new NioServerDomainSocketChannel();
}
@Override
protected NetworkChannel jdkChannel(NioServerDomainSocketChannel channel) {
return channel.javaChannel();
}
@Override
protected SocketOption<?> newInvalidOption() {
return StandardSocketOptions.IP_MULTICAST_IF;
}
private static File newRandomTmpFile() {
return new File(System.getProperty("java.io.tmpdir"), UUID.randomUUID().toString());
}
}
|
NioServerDomainSocketChannelTest
|
java
|
playframework__playframework
|
core/play/src/main/java/play/mvc/Http.java
|
{
"start": 77393,
"end": 78245
}
|
interface ____ {
/** Content-Type of text. */
String TEXT = "text/plain";
/** Content-Type of html. */
String HTML = "text/html";
/** Content-Type of json. */
String JSON = "application/json";
/** Content-Type of xml. */
String XML = "application/xml";
/** Content-Type of xhtml. */
String XHTML = "application/xhtml+xml";
/** Content-Type of css. */
String CSS = "text/css";
/** Content-Type of javascript. */
String JAVASCRIPT = "application/javascript";
/** Content-Type of form-urlencoded. */
String FORM = "application/x-www-form-urlencoded";
/** Content-Type of server sent events. */
String EVENT_STREAM = "text/event-stream";
/** Content-Type of binary data. */
String BINARY = "application/octet-stream";
}
/** Standard HTTP Verbs */
public
|
MimeTypes
|
java
|
google__guava
|
guava-testlib/test/com/google/common/testing/NullPointerTesterTest.java
|
{
"start": 32409,
"end": 32466
}
|
enum ____ {
MALE,
FEMALE
}
private static
|
Gender
|
java
|
apache__flink
|
flink-end-to-end-tests/flink-stream-state-ttl-test/src/main/java/org/apache/flink/streaming/tests/verify/AbstractTtlStateVerifier.java
|
{
"start": 1382,
"end": 4211
}
|
class ____<
D extends StateDescriptor<S, SV>, S extends State, SV, UV, GV>
implements TtlStateVerifier<UV, GV> {
static final Random RANDOM = new Random();
@Nonnull final D stateDesc;
AbstractTtlStateVerifier(@Nonnull D stateDesc) {
this.stateDesc = stateDesc;
}
@Nonnull
static String randomString() {
return StringUtils.getRandomString(RANDOM, 2, 20);
}
@SuppressWarnings("unchecked")
@Override
@Nonnull
public State createState(
@Nonnull FunctionInitializationContext context, @Nonnull StateTtlConfig ttlConfig) {
stateDesc.enableTimeToLive(ttlConfig);
return createState(context);
}
abstract State createState(FunctionInitializationContext context);
@SuppressWarnings("unchecked")
@Override
@Nonnull
public TypeSerializer<UV> getUpdateSerializer() {
return (TypeSerializer<UV>) stateDesc.getSerializer();
}
@SuppressWarnings("unchecked")
@Override
public GV get(@Nonnull State state) throws Exception {
return getInternal((S) state);
}
abstract GV getInternal(@Nonnull S state) throws Exception;
@SuppressWarnings("unchecked")
@Override
public void update(@Nonnull State state, Object update) throws Exception {
updateInternal((S) state, (UV) update);
}
abstract void updateInternal(@Nonnull S state, UV update) throws Exception;
@SuppressWarnings("unchecked")
@Override
public boolean verify(@Nonnull TtlVerificationContext<?, ?> verificationContextRaw) {
TtlVerificationContext<UV, GV> verificationContext =
(TtlVerificationContext<UV, GV>) verificationContextRaw;
long currentTimestamp = verificationContext.getUpdateContext().getTimestamp();
GV valueBeforeUpdate = verificationContext.getUpdateContext().getValueBeforeUpdate();
List<ValueWithTs<UV>> updates = new ArrayList<>(verificationContext.getPrevUpdates());
GV expectedValueBeforeUpdate = expected(updates, currentTimestamp);
GV valueAfterUpdate = verificationContext.getUpdateContext().getValueAfterUpdate();
ValueWithTs<UV> update = verificationContext.getUpdateContext().getUpdateWithTs();
updates.add(update);
GV expectedValueAfterUpdate = expected(updates, currentTimestamp);
return Objects.equals(valueBeforeUpdate, expectedValueBeforeUpdate)
&& Objects.equals(valueAfterUpdate, expectedValueAfterUpdate);
}
abstract GV expected(@Nonnull List<ValueWithTs<UV>> updates, long currentTimestamp);
boolean expired(long lastTimestamp, long currentTimestamp) {
return lastTimestamp + stateDesc.getTtlConfig().getTimeToLive().toMillis()
<= currentTimestamp;
}
}
|
AbstractTtlStateVerifier
|
java
|
lettuce-io__lettuce-core
|
src/test/java/io/lettuce/core/output/SuggestionParserUnitTests.java
|
{
"start": 606,
"end": 6065
}
|
class ____ {
@Test
void shouldParseBasicSuggestions() {
SuggestionParser<String> parser = new SuggestionParser<>(false, false);
ArrayComplexData data = new ArrayComplexData(3);
data.store("suggestion1");
data.store("suggestion2");
data.store("suggestion3");
List<Suggestion<String>> suggestions = parser.parse(data);
assertThat(suggestions).hasSize(3);
assertThat(suggestions.get(0).getValue()).isEqualTo("suggestion1");
assertThat(suggestions.get(0).hasScore()).isFalse();
assertThat(suggestions.get(0).hasPayload()).isFalse();
assertThat(suggestions.get(1).getValue()).isEqualTo("suggestion2");
assertThat(suggestions.get(2).getValue()).isEqualTo("suggestion3");
}
@Test
void shouldParseSuggestionsWithScores() {
SuggestionParser<String> parser = new SuggestionParser<>(true, false);
ArrayComplexData data = new ArrayComplexData(4);
data.store("suggestion1");
data.store(1.5);
data.store("suggestion2");
data.store(2.0);
List<Suggestion<String>> suggestions = parser.parse(data);
assertThat(suggestions).hasSize(2);
assertThat(suggestions.get(0).getValue()).isEqualTo("suggestion1");
assertThat(suggestions.get(0).hasScore()).isTrue();
assertThat(suggestions.get(0).getScore()).isEqualTo(1.5);
assertThat(suggestions.get(0).hasPayload()).isFalse();
assertThat(suggestions.get(1).getValue()).isEqualTo("suggestion2");
assertThat(suggestions.get(1).getScore()).isEqualTo(2.0);
}
@Test
void shouldParseSuggestionsWithPayloads() {
SuggestionParser<String> parser = new SuggestionParser<>(false, true);
ArrayComplexData data = new ArrayComplexData(4);
data.store("suggestion1");
data.store("payload1");
data.store("suggestion2");
data.store("payload2");
List<Suggestion<String>> suggestions = parser.parse(data);
assertThat(suggestions).hasSize(2);
assertThat(suggestions.get(0).getValue()).isEqualTo("suggestion1");
assertThat(suggestions.get(0).hasScore()).isFalse();
assertThat(suggestions.get(0).hasPayload()).isTrue();
assertThat(suggestions.get(0).getPayload()).isEqualTo("payload1");
assertThat(suggestions.get(1).getValue()).isEqualTo("suggestion2");
assertThat(suggestions.get(1).getPayload()).isEqualTo("payload2");
}
@Test
void shouldParseSuggestionsWithScoresAndPayloads() {
SuggestionParser<String> parser = new SuggestionParser<>(true, true);
ArrayComplexData data = new ArrayComplexData(6);
data.store("suggestion1");
data.store(1.5);
data.store("payload1");
data.store("suggestion2");
data.store(2.0);
data.store("payload2");
List<Suggestion<String>> suggestions = parser.parse(data);
assertThat(suggestions).hasSize(2);
assertThat(suggestions.get(0).getValue()).isEqualTo("suggestion1");
assertThat(suggestions.get(0).hasScore()).isTrue();
assertThat(suggestions.get(0).getScore()).isEqualTo(1.5);
assertThat(suggestions.get(0).hasPayload()).isTrue();
assertThat(suggestions.get(0).getPayload()).isEqualTo("payload1");
assertThat(suggestions.get(1).getValue()).isEqualTo("suggestion2");
assertThat(suggestions.get(1).getScore()).isEqualTo(2.0);
assertThat(suggestions.get(1).getPayload()).isEqualTo("payload2");
}
@Test
void shouldHandleEmptyList() {
SuggestionParser<String> parser = new SuggestionParser<>(false, false);
ArrayComplexData data = new ArrayComplexData(0);
List<Suggestion<String>> suggestions = parser.parse(data);
assertThat(suggestions).isEmpty();
}
@Test
void shouldThrowExceptionForNullData() {
SuggestionParser<String> parser = new SuggestionParser<>(false, false);
List<Suggestion<String>> suggestions = parser.parse(null);
assertThat(suggestions).isEmpty();
}
@Test
void shouldThrowExceptionForInvalidScoreFormat() {
SuggestionParser<String> parser = new SuggestionParser<>(true, false);
ArrayComplexData data = new ArrayComplexData(3);
data.store("suggestion1");
data.store("suggestion2");
data.store("suggestion3");
List<Suggestion<String>> suggestions = parser.parse(data);
assertThat(suggestions).hasSize(0);
}
@Test
void shouldThrowExceptionForInvalidPayloadFormat() {
SuggestionParser<String> parser = new SuggestionParser<>(false, true);
ArrayComplexData data = new ArrayComplexData(3);
data.store("suggestion1");
data.store("payload1");
data.store("suggestion2");
List<Suggestion<String>> suggestions = parser.parse(data);
assertThat(suggestions).hasSize(0);
}
@Test
void shouldThrowExceptionForInvalidScoreAndPayloadFormat() {
SuggestionParser<String> parser = new SuggestionParser<>(true, true);
ArrayComplexData data = new ArrayComplexData(5);
data.store("suggestion1");
data.store(1.5);
data.store("payload1");
data.store("suggestion2");
data.store(2.0);
List<Suggestion<String>> suggestions = parser.parse(data);
assertThat(suggestions).hasSize(0);
}
}
|
SuggestionParserUnitTests
|
java
|
apache__camel
|
core/camel-support/src/main/java/org/apache/camel/support/cache/ProducerServicePool.java
|
{
"start": 1069,
"end": 1375
}
|
class ____ extends ServicePool<AsyncProducer> {
public ProducerServicePool(ThrowingFunction<Endpoint, AsyncProducer, Exception> creator,
Function<AsyncProducer, Endpoint> getEndpoint, int capacity) {
super(creator, getEndpoint, capacity);
}
}
|
ProducerServicePool
|
java
|
quarkusio__quarkus
|
extensions/hibernate-reactive/runtime/src/main/java/io/quarkus/hibernate/reactive/runtime/boot/registry/ReactiveHibernateInitiatorListProvider.java
|
{
"start": 3260,
"end": 3364
}
|
class ____ implements InitialInitiatorListProvider {
//N.B. this
|
ReactiveHibernateInitiatorListProvider
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/dialect/function/array/PostgreSQLArrayConstructorFunction.java
|
{
"start": 705,
"end": 1814
}
|
class ____ extends ArrayConstructorFunction {
public PostgreSQLArrayConstructorFunction(boolean list) {
super( list, true );
}
@Override
public void render(
SqlAppender sqlAppender,
List<? extends SqlAstNode> sqlAstArguments,
ReturnableType<?> returnType,
SqlAstTranslator<?> walker) {
String arrayTypeName = null;
if ( returnType instanceof BasicPluralType<?, ?> pluralType ) {
if ( needsArrayCasting( pluralType.getElementType() ) ) {
arrayTypeName = DdlTypeHelper.getCastTypeName(
returnType,
walker.getSessionFactory().getTypeConfiguration()
);
sqlAppender.append( "cast(" );
}
}
super.render( sqlAppender, sqlAstArguments, returnType, walker );
if ( arrayTypeName != null ) {
sqlAppender.appendSql( " as " );
sqlAppender.appendSql( arrayTypeName );
sqlAppender.appendSql( ')' );
}
}
private static boolean needsArrayCasting(BasicType<?> elementType) {
// PostgreSQL doesn't do implicit conversion between text[] and varchar[], so we need casting
return elementType.getJdbcType().isString();
}
}
|
PostgreSQLArrayConstructorFunction
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCoderBase.java
|
{
"start": 1370,
"end": 15919
}
|
class ____ {
protected static Random RAND = new Random();
protected boolean allowDump = true;
private Configuration conf;
protected int numDataUnits;
protected int numParityUnits;
protected int baseChunkSize = 1024;
private int chunkSize = baseChunkSize;
private BufferAllocator allocator;
private byte[] zeroChunkBytes;
private boolean startBufferWithZero = true;
// Indexes of erased data units.
protected int[] erasedDataIndexes = new int[] {0};
// Indexes of erased parity units.
protected int[] erasedParityIndexes = new int[] {0};
// Data buffers are either direct or on-heap, for performance the two cases
// may go to different coding implementations.
protected boolean usingDirectBuffer = true;
protected boolean usingFixedData = true;
// Using this the generated data can be repeatable across multiple calls to
// encode(), in order for troubleshooting.
private static int FIXED_DATA_GENERATOR = 0;
protected byte[][] fixedData;
protected boolean allowChangeInputs;
protected void setChunkSize(int chunkSize) {
this.chunkSize = chunkSize;
this.zeroChunkBytes = new byte[chunkSize]; // With ZERO by default
}
protected byte[] getZeroChunkBytes() {
return zeroChunkBytes;
}
protected void prepareBufferAllocator(boolean usingSlicedBuffer) {
if (usingSlicedBuffer) {
int roughEstimationSpace =
chunkSize * (numDataUnits + numParityUnits) * 10;
allocator = new SlicedBufferAllocator(usingDirectBuffer,
roughEstimationSpace);
} else {
allocator = new SimpleBufferAllocator(usingDirectBuffer);
}
}
/**
* Prepare before running the case.
* @param conf
* @param numDataUnits
* @param numParityUnits
* @param erasedDataIndexes
* @param erasedParityIndexes
* @param usingFixedData Using fixed or pre-generated data to test instead of
* generating data
*/
protected void prepare(Configuration conf, int numDataUnits,
int numParityUnits, int[] erasedDataIndexes,
int[] erasedParityIndexes, boolean usingFixedData) {
this.conf = conf != null ? conf : new Configuration();
this.numDataUnits = numDataUnits;
this.numParityUnits = numParityUnits;
this.erasedDataIndexes = erasedDataIndexes != null ?
erasedDataIndexes : new int[] {0};
this.erasedParityIndexes = erasedParityIndexes != null ?
erasedParityIndexes : new int[] {0};
this.usingFixedData = usingFixedData;
if (usingFixedData) {
prepareFixedData();
}
}
/**
* Prepare before running the case.
* @param conf
* @param numDataUnits
* @param numParityUnits
* @param erasedDataIndexes
* @param erasedParityIndexes
*/
protected void prepare(Configuration conf, int numDataUnits,
int numParityUnits, int[] erasedDataIndexes,
int[] erasedParityIndexes) {
prepare(conf, numDataUnits, numParityUnits, erasedDataIndexes,
erasedParityIndexes, false);
}
/**
* Prepare before running the case.
* @param numDataUnits
* @param numParityUnits
* @param erasedDataIndexes
* @param erasedParityIndexes
*/
protected void prepare(int numDataUnits, int numParityUnits,
int[] erasedDataIndexes, int[] erasedParityIndexes) {
prepare(null, numDataUnits, numParityUnits, erasedDataIndexes,
erasedParityIndexes, false);
}
/**
* Get the conf the test.
* @return configuration
*/
protected Configuration getConf() {
return this.conf;
}
/**
* Compare and verify if erased chunks are equal to recovered chunks
* @param erasedChunks
* @param recoveredChunks
*/
protected void compareAndVerify(ECChunk[] erasedChunks,
ECChunk[] recoveredChunks) {
byte[][] erased = toArrays(erasedChunks);
byte[][] recovered = toArrays(recoveredChunks);
boolean result = Arrays.deepEquals(erased, recovered);
if (!result) {
assertTrue(result, "Decoding and comparing failed.");
}
}
/**
* Adjust and return erased indexes altogether, including erased data indexes
* and parity indexes.
* @return erased indexes altogether
*/
protected int[] getErasedIndexesForDecoding() {
int[] erasedIndexesForDecoding =
new int[erasedDataIndexes.length + erasedParityIndexes.length];
int idx = 0;
for (int i = 0; i < erasedDataIndexes.length; i++) {
erasedIndexesForDecoding[idx ++] = erasedDataIndexes[i];
}
for (int i = 0; i < erasedParityIndexes.length; i++) {
erasedIndexesForDecoding[idx ++] = erasedParityIndexes[i] + numDataUnits;
}
return erasedIndexesForDecoding;
}
/**
* Return input chunks for decoding, which is dataChunks + parityChunks.
* @param dataChunks
* @param parityChunks
* @return
*/
protected ECChunk[] prepareInputChunksForDecoding(ECChunk[] dataChunks,
ECChunk[] parityChunks) {
ECChunk[] inputChunks = new ECChunk[numDataUnits + numParityUnits];
int idx = 0;
for (int i = 0; i < numDataUnits; i++) {
inputChunks[idx ++] = dataChunks[i];
}
for (int i = 0; i < numParityUnits; i++) {
inputChunks[idx ++] = parityChunks[i];
}
return inputChunks;
}
/**
* Erase some data chunks to test the recovering of them. As they're erased,
* we don't need to read them and will not have the buffers at all, so just
* set them as null.
* @param dataChunks
* @param parityChunks
* @return clone of erased chunks
*/
protected ECChunk[] backupAndEraseChunks(ECChunk[] dataChunks,
ECChunk[] parityChunks) {
ECChunk[] toEraseChunks = new ECChunk[erasedDataIndexes.length +
erasedParityIndexes.length];
int idx = 0;
for (int i = 0; i < erasedDataIndexes.length; i++) {
toEraseChunks[idx ++] = dataChunks[erasedDataIndexes[i]];
dataChunks[erasedDataIndexes[i]] = null;
}
for (int i = 0; i < erasedParityIndexes.length; i++) {
toEraseChunks[idx ++] = parityChunks[erasedParityIndexes[i]];
parityChunks[erasedParityIndexes[i]] = null;
}
return toEraseChunks;
}
/**
* Erase data from the specified chunks, just setting them as null.
* @param chunks
*/
protected void eraseDataFromChunks(ECChunk[] chunks) {
for (int i = 0; i < chunks.length; i++) {
chunks[i] = null;
}
}
protected void markChunks(ECChunk[] chunks) {
for (int i = 0; i < chunks.length; i++) {
if (chunks[i] != null) {
chunks[i].getBuffer().mark();
}
}
}
protected void restoreChunksFromMark(ECChunk[] chunks) {
for (int i = 0; i < chunks.length; i++) {
if (chunks[i] != null) {
chunks[i].getBuffer().reset();
}
}
}
/**
* Clone chunks along with copying the associated data. It respects how the
* chunk buffer is allocated, direct or non-direct. It avoids affecting the
* original chunk buffers.
* @param chunks
* @return
*/
protected ECChunk[] cloneChunksWithData(ECChunk[] chunks) {
ECChunk[] results = new ECChunk[chunks.length];
for (int i = 0; i < chunks.length; i++) {
results[i] = cloneChunkWithData(chunks[i]);
}
return results;
}
/**
* Clone chunk along with copying the associated data. It respects how the
* chunk buffer is allocated, direct or non-direct. It avoids affecting the
* original chunk.
* @param chunk
* @return a new chunk
*/
protected ECChunk cloneChunkWithData(ECChunk chunk) {
if (chunk == null) {
return null;
}
ByteBuffer srcBuffer = chunk.getBuffer();
byte[] bytesArr = new byte[srcBuffer.remaining()];
srcBuffer.mark();
srcBuffer.get(bytesArr, 0, bytesArr.length);
srcBuffer.reset();
ByteBuffer destBuffer = allocateOutputBuffer(bytesArr.length);
int pos = destBuffer.position();
destBuffer.put(bytesArr);
destBuffer.flip();
destBuffer.position(pos);
return new ECChunk(destBuffer);
}
/**
* Allocate a chunk for output or writing.
* @return
*/
protected ECChunk allocateOutputChunk() {
ByteBuffer buffer = allocateOutputBuffer(chunkSize);
return new ECChunk(buffer);
}
/**
* Allocate a buffer for output or writing. It can prepare for two kinds of
* data buffers: one with position as 0, the other with position > 0
* @return a buffer ready to write chunkSize bytes from current position
*/
protected ByteBuffer allocateOutputBuffer(int bufferLen) {
/**
* When startBufferWithZero, will prepare a buffer as:---------------
* otherwise, the buffer will be like: ___TO--BE--WRITTEN___,
* and in the beginning, dummy data are prefixed, to simulate a buffer of
* position > 0.
*/
int startOffset = startBufferWithZero ? 0 : 11; // 11 is arbitrary
int allocLen = startOffset + bufferLen + startOffset;
ByteBuffer buffer = allocator.allocate(allocLen);
buffer.limit(startOffset + bufferLen);
fillDummyData(buffer, startOffset);
startBufferWithZero = ! startBufferWithZero;
return buffer;
}
/**
* Prepare data chunks for each data unit, by generating random data.
* @return
*/
protected ECChunk[] prepareDataChunksForEncoding() {
if (usingFixedData) {
ECChunk[] chunks = new ECChunk[numDataUnits];
for (int i = 0; i < chunks.length; i++) {
chunks[i] = makeChunkUsingData(fixedData[i]);
}
return chunks;
}
return generateDataChunks();
}
private ECChunk makeChunkUsingData(byte[] data) {
ECChunk chunk = allocateOutputChunk();
ByteBuffer buffer = chunk.getBuffer();
int pos = buffer.position();
buffer.put(data, 0, chunkSize);
buffer.flip();
buffer.position(pos);
return chunk;
}
private ECChunk[] generateDataChunks() {
ECChunk[] chunks = new ECChunk[numDataUnits];
for (int i = 0; i < chunks.length; i++) {
chunks[i] = generateDataChunk();
}
return chunks;
}
private void prepareFixedData() {
// We may load test data from a resource, or just generate randomly.
// The generated data will be used across subsequent encode/decode calls.
this.fixedData = new byte[numDataUnits][];
for (int i = 0; i < numDataUnits; i++) {
fixedData[i] = generateFixedData(baseChunkSize * 2);
}
}
/**
* Generate data chunk by making random data.
* @return
*/
protected ECChunk generateDataChunk() {
ByteBuffer buffer = allocateOutputBuffer(chunkSize);
int pos = buffer.position();
buffer.put(generateData(chunkSize));
buffer.flip();
buffer.position(pos);
return new ECChunk(buffer);
}
/**
* Fill len of dummy data in the buffer at the current position.
* @param buffer
* @param len
*/
protected void fillDummyData(ByteBuffer buffer, int len) {
byte[] dummy = new byte[len];
RAND.nextBytes(dummy);
buffer.put(dummy);
}
protected byte[] generateData(int len) {
byte[] buffer = new byte[len];
for (int i = 0; i < buffer.length; i++) {
buffer[i] = (byte) RAND.nextInt(256);
}
return buffer;
}
protected byte[] generateFixedData(int len) {
byte[] buffer = new byte[len];
for (int i = 0; i < buffer.length; i++) {
buffer[i] = (byte) FIXED_DATA_GENERATOR++;
if (FIXED_DATA_GENERATOR == 256) {
FIXED_DATA_GENERATOR = 0;
}
}
return buffer;
}
/**
* Prepare parity chunks for encoding, each chunk for each parity unit.
* @return
*/
protected ECChunk[] prepareParityChunksForEncoding() {
ECChunk[] chunks = new ECChunk[numParityUnits];
for (int i = 0; i < chunks.length; i++) {
chunks[i] = allocateOutputChunk();
}
return chunks;
}
/**
* Prepare output chunks for decoding, each output chunk for each erased
* chunk.
* @return
*/
protected ECChunk[] prepareOutputChunksForDecoding() {
ECChunk[] chunks = new ECChunk[erasedDataIndexes.length +
erasedParityIndexes.length];
for (int i = 0; i < chunks.length; i++) {
chunks[i] = allocateOutputChunk();
}
return chunks;
}
/**
* Convert an array of this chunks to an array of byte array.
* Note the chunk buffers are not affected.
* @param chunks
* @return an array of byte array
*/
protected byte[][] toArrays(ECChunk[] chunks) {
byte[][] bytesArr = new byte[chunks.length][];
for (int i = 0; i < chunks.length; i++) {
if (chunks[i] != null) {
bytesArr[i] = chunks[i].toBytesArray();
}
}
return bytesArr;
}
/**
* Dump all the settings used in the test case if isAllowingVerboseDump is enabled.
*/
protected void dumpSetting() {
if (allowDump) {
StringBuilder sb = new StringBuilder("Erasure coder test settings:\n");
sb.append(" numDataUnits=").append(numDataUnits);
sb.append(" numParityUnits=").append(numParityUnits);
sb.append(" chunkSize=").append(chunkSize).append("\n");
sb.append(" erasedDataIndexes=").
append(Arrays.toString(erasedDataIndexes));
sb.append(" erasedParityIndexes=").
append(Arrays.toString(erasedParityIndexes));
sb.append(" usingDirectBuffer=").append(usingDirectBuffer);
sb.append(" allowChangeInputs=").append(allowChangeInputs);
sb.append(" allowVerboseDump=").append(allowDump);
sb.append("\n");
System.out.println(sb.toString());
}
}
/**
* Dump chunks prefixed with a header if isAllowingVerboseDump is enabled.
* @param header
* @param chunks
*/
protected void dumpChunks(String header, ECChunk[] chunks) {
if (allowDump) {
DumpUtil.dumpChunks(header, chunks);
}
}
/**
* Make some chunk messy or not correct any more
* @param chunks
*/
protected void corruptSomeChunk(ECChunk[] chunks) {
int idx = new Random().nextInt(chunks.length);
ByteBuffer buffer = chunks[idx].getBuffer();
if (buffer.hasRemaining()) {
buffer.position(buffer.position() + 1);
}
}
/**
* Pollute some chunk.
* @param chunks
*/
protected void polluteSomeChunk(ECChunk[] chunks) {
int idx = new Random().nextInt(chunks.length);
ByteBuffer buffer = chunks[idx].getBuffer();
buffer.mark();
buffer.put((byte) ((buffer.get(buffer.position()) + 1)));
buffer.reset();
}
}
|
TestCoderBase
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/schemaupdate/tablecomment/TestEntity.java
|
{
"start": 684,
"end": 1564
}
|
class ____ {
@Id
private Long id;
@Column(name = "NAME_COLUMN")
private String name;
@Column(name = "SECOND_NAME", table = JpaTableCommentTest.SECONDARY_TABLE_NAME)
private String secondName;
@ManyToOne(fetch = FetchType.LAZY)
@JoinTable(
name = JpaTableCommentTest.JOIN_TABLE_NAME,
comment = JpaTableCommentTest.JOIN_TABLE_COMMENT
)
private TestEntity testEntity;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getSecondName() {
return secondName;
}
public void setSecondName(String secondName) {
this.secondName = secondName;
}
public TestEntity getTestEntity() {
return testEntity;
}
public void setTestEntity(TestEntity testEntity) {
this.testEntity = testEntity;
}
}
|
TestEntity
|
java
|
apache__dubbo
|
dubbo-rpc/dubbo-rpc-api/src/main/java/org/apache/dubbo/rpc/AsyncRpcResult.java
|
{
"start": 1823,
"end": 2857
}
|
class ____ an unfinished RPC call, it will hold some context information for this call, for example RpcContext and Invocation,
* so that when the call finishes and the result returns, it can guarantee all the contexts being recovered as the same as when the call was made
* before any callback is invoked.
* <p>
* TODO if it's reasonable or even right to keep a reference to Invocation?
* <p>
* As {@link Result} implements CompletionStage, {@link AsyncRpcResult} allows you to easily build a async filter chain whose status will be
* driven entirely by the state of the underlying RPC call.
* <p>
* AsyncRpcResult does not contain any concrete value (except the underlying value bring by CompletableFuture), consider it as a status transfer node.
* {@link #getValue()} and {@link #getException()} are all inherited from {@link Result} interface, implementing them are mainly
* for compatibility consideration. Because many legacy {@link Filter} implementation are most possibly to call getValue directly.
*/
public
|
represents
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/ComponentBuilderTest.java
|
{
"start": 10107,
"end": 10517
}
|
interface ____");
});
}
@Test
public void testBindsInstanceNotAllowedOnBothSetterAndParameter() {
Source componentFile =
CompilerTests.javaSource(
"test.SimpleComponent",
"package test;",
"",
"import dagger.BindsInstance;",
"import dagger.Component;",
"",
"@Component",
"abstract
|
Builder
|
java
|
google__guava
|
android/guava/src/com/google/common/io/TempFileCreator.java
|
{
"start": 7777,
"end": 10228
}
|
class ____'t available under all environments that we support. We use it if
* available and fall back if not.
*/
String fromSystemProperty = requireNonNull(USER_NAME.value());
try {
Class<?> processHandleClass = Class.forName("java.lang.ProcessHandle");
Class<?> processHandleInfoClass = Class.forName("java.lang.ProcessHandle$Info");
Class<?> optionalClass = Class.forName("java.util.Optional");
/*
* We don't *need* to use reflection to access Optional: It's available on all JDKs we
* support, and Android code won't get this far, anyway, because ProcessHandle is
* unavailable. But given how much other reflection we're using, we might as well use it
* here, too, so that we don't need to also suppress an AndroidApiChecker error.
*/
Method currentMethod = processHandleClass.getMethod("current");
Method infoMethod = processHandleClass.getMethod("info");
Method userMethod = processHandleInfoClass.getMethod("user");
Method orElseMethod = optionalClass.getMethod("orElse", Object.class);
Object current = currentMethod.invoke(null);
Object info = infoMethod.invoke(current);
Object user = userMethod.invoke(info);
return (String) requireNonNull(orElseMethod.invoke(user, fromSystemProperty));
} catch (ClassNotFoundException runningUnderAndroidOrJava8) {
/*
* I'm not sure that we could actually get here for *Android*: I would expect us to enter
* the POSIX code path instead. And if we tried this code path, we'd have trouble unless we
* were running under a new enough version of Android to support NIO.
*
* So this is probably just the "Windows Java 8" case. In that case, if we wanted *another*
* layer of fallback before consulting the system property, we could try
* com.sun.security.auth.module.NTSystem.
*
* But for now, we use the value from the system property as our best guess.
*/
return fromSystemProperty;
} catch (InvocationTargetException e) {
throwIfUnchecked(e.getCause()); // in case it's an Error or something
return fromSystemProperty; // should be impossible
} catch (NoSuchMethodException | IllegalAccessException shouldBeImpossible) {
return fromSystemProperty;
}
}
}
private static final
|
isn
|
java
|
lettuce-io__lettuce-core
|
src/main/java/io/lettuce/core/output/ComplexOutput.java
|
{
"start": 1121,
"end": 4593
}
|
class ____<K, V, T> extends CommandOutput<K, V, T> {
private final Deque<ComplexData> dataStack;
private final ComplexDataParser<T> parser;
protected ComplexData data;
/**
* Constructs a new instance of the {@link ComplexOutput}
*
* @param codec the {@link RedisCodec} to be applied
*/
public ComplexOutput(RedisCodec<K, V> codec, ComplexDataParser<T> parser) {
super(codec, null);
dataStack = LettuceFactories.newSpScQueue();
this.parser = parser;
}
@Override
public T get() {
return parser.parse(data);
}
@Override
public void set(long integer) {
if (data == null) {
throw new RuntimeException("Invalid output received for dynamic aggregate output."
+ "Integer value should have been preceded by some sort of aggregation.");
}
data.store(integer);
}
@Override
public void set(double number) {
if (data == null) {
throw new RuntimeException("Invalid output received for dynamic aggregate output."
+ "Double value should have been preceded by some sort of aggregation.");
}
data.store(number);
}
@Override
public void set(boolean value) {
if (data == null) {
throw new RuntimeException("Invalid output received for dynamic aggregate output."
+ "Double value should have been preceded by some sort of aggregation.");
}
data.store(value);
}
@Override
public void set(ByteBuffer bytes) {
if (data == null) {
// Sometimes the server would return null for commands that otherwise would end up returning a complex data
// structure. In these cases, we should simply return null too.
return;
}
data.storeObject(bytes == null ? null : codec.decodeValue(bytes));
}
@Override
public void setSingle(ByteBuffer bytes) {
if (data == null) {
throw new RuntimeException("Invalid output received for dynamic aggregate output."
+ "String value should have been preceded by some sort of aggregation.");
}
data.store(bytes == null ? null : StringCodec.UTF8.decodeValue(bytes));
}
@Override
public void complete(int depth) {
if (!dataStack.isEmpty() && depth == dataStack.size()) {
data = dataStack.pop();
}
}
private void multi(ComplexData newData) {
// if there is no data set, then we are at the root object
if (data == null) {
data = newData;
return;
}
// otherwise we need to nest the provided structure
data.storeObject(newData);
dataStack.push(data);
data = newData;
}
@Override
public void multiSet(int count) {
if (count < 0) {
return;
}
SetComplexData dynamicData = new SetComplexData(count);
multi(dynamicData);
}
@Override
public void multiArray(int count) {
if (count < 0) {
return;
}
ArrayComplexData dynamicData = new ArrayComplexData(count);
multi(dynamicData);
}
@Override
public void multiMap(int count) {
if (count < 0) {
return;
}
MapComplexData dynamicData = new MapComplexData(count);
multi(dynamicData);
}
}
|
ComplexOutput
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/checkpoints/CheckpointStatistics.java
|
{
"start": 3772,
"end": 18592
}
|
class ____ implements ResponseBody {
public static final String FIELD_NAME_ID = "id";
public static final String FIELD_NAME_STATUS = "status";
public static final String FIELD_NAME_IS_SAVEPOINT = "is_savepoint";
public static final String FIELD_NAME_SAVEPOINT_FORMAT = "savepointFormat";
public static final String FIELD_NAME_TRIGGER_TIMESTAMP = "trigger_timestamp";
public static final String FIELD_NAME_LATEST_ACK_TIMESTAMP = "latest_ack_timestamp";
public static final String FIELD_NAME_CHECKPOINTED_SIZE = "checkpointed_size";
/**
* The accurate name of this field should be 'checkpointed_data_size', keep it as before to not
* break backwards compatibility for old web UI.
*
* @see <a href="https://issues.apache.org/jira/browse/FLINK-13390">FLINK-13390</a>
*/
public static final String FIELD_NAME_STATE_SIZE = "state_size";
public static final String FIELD_NAME_DURATION = "end_to_end_duration";
public static final String FIELD_NAME_ALIGNMENT_BUFFERED = "alignment_buffered";
public static final String FIELD_NAME_PROCESSED_DATA = "processed_data";
public static final String FIELD_NAME_PERSISTED_DATA = "persisted_data";
public static final String FIELD_NAME_NUM_SUBTASKS = "num_subtasks";
public static final String FIELD_NAME_NUM_ACK_SUBTASKS = "num_acknowledged_subtasks";
public static final String FIELD_NAME_TASKS = "tasks";
public static final String FIELD_NAME_CHECKPOINT_TYPE = "checkpoint_type";
@JsonProperty(FIELD_NAME_ID)
private final long id;
@JsonProperty(FIELD_NAME_STATUS)
private final CheckpointStatsStatus status;
@JsonProperty(FIELD_NAME_IS_SAVEPOINT)
private final boolean savepoint;
@JsonProperty(FIELD_NAME_SAVEPOINT_FORMAT)
@Nullable
private final String savepointFormat;
@JsonProperty(FIELD_NAME_TRIGGER_TIMESTAMP)
private final long triggerTimestamp;
@JsonProperty(FIELD_NAME_LATEST_ACK_TIMESTAMP)
private final long latestAckTimestamp;
@JsonProperty(FIELD_NAME_CHECKPOINTED_SIZE)
private final long checkpointedSize;
@JsonProperty(FIELD_NAME_STATE_SIZE)
private final long stateSize;
@JsonProperty(FIELD_NAME_DURATION)
private final long duration;
@JsonProperty(FIELD_NAME_ALIGNMENT_BUFFERED)
private final long alignmentBuffered;
@JsonProperty(FIELD_NAME_PROCESSED_DATA)
private final long processedData;
@JsonProperty(FIELD_NAME_PERSISTED_DATA)
private final long persistedData;
@JsonProperty(FIELD_NAME_NUM_SUBTASKS)
private final int numSubtasks;
@JsonProperty(FIELD_NAME_NUM_ACK_SUBTASKS)
private final int numAckSubtasks;
@JsonProperty(FIELD_NAME_CHECKPOINT_TYPE)
private final RestAPICheckpointType checkpointType;
@JsonProperty(FIELD_NAME_TASKS)
@JsonSerialize(keyUsing = JobVertexIDKeySerializer.class)
private final Map<JobVertexID, TaskCheckpointStatistics> checkpointStatisticsPerTask;
@JsonCreator
private CheckpointStatistics(
@JsonProperty(FIELD_NAME_ID) long id,
@JsonProperty(FIELD_NAME_STATUS) CheckpointStatsStatus status,
@JsonProperty(FIELD_NAME_IS_SAVEPOINT) boolean savepoint,
@JsonProperty(FIELD_NAME_SAVEPOINT_FORMAT) String savepointFormat,
@JsonProperty(FIELD_NAME_TRIGGER_TIMESTAMP) long triggerTimestamp,
@JsonProperty(FIELD_NAME_LATEST_ACK_TIMESTAMP) long latestAckTimestamp,
@JsonProperty(FIELD_NAME_CHECKPOINTED_SIZE) long checkpointedSize,
@JsonProperty(FIELD_NAME_STATE_SIZE) long stateSize,
@JsonProperty(FIELD_NAME_DURATION) long duration,
@JsonProperty(FIELD_NAME_ALIGNMENT_BUFFERED) long alignmentBuffered,
@JsonProperty(FIELD_NAME_PROCESSED_DATA) long processedData,
@JsonProperty(FIELD_NAME_PERSISTED_DATA) long persistedData,
@JsonProperty(FIELD_NAME_NUM_SUBTASKS) int numSubtasks,
@JsonProperty(FIELD_NAME_NUM_ACK_SUBTASKS) int numAckSubtasks,
@JsonProperty(FIELD_NAME_CHECKPOINT_TYPE) RestAPICheckpointType checkpointType,
@JsonDeserialize(keyUsing = JobVertexIDKeyDeserializer.class)
@JsonProperty(FIELD_NAME_TASKS)
Map<JobVertexID, TaskCheckpointStatistics> checkpointStatisticsPerTask) {
this.id = id;
this.status = Preconditions.checkNotNull(status);
this.savepoint = savepoint;
this.savepointFormat = savepointFormat;
this.triggerTimestamp = triggerTimestamp;
this.latestAckTimestamp = latestAckTimestamp;
this.checkpointedSize = checkpointedSize;
this.stateSize = stateSize;
this.duration = duration;
this.alignmentBuffered = alignmentBuffered;
this.processedData = processedData;
this.persistedData = persistedData;
this.numSubtasks = numSubtasks;
this.numAckSubtasks = numAckSubtasks;
this.checkpointType = Preconditions.checkNotNull(checkpointType);
this.checkpointStatisticsPerTask = Preconditions.checkNotNull(checkpointStatisticsPerTask);
}
public long getId() {
return id;
}
public CheckpointStatsStatus getStatus() {
return status;
}
public boolean isSavepoint() {
return savepoint;
}
public long getTriggerTimestamp() {
return triggerTimestamp;
}
public long getLatestAckTimestamp() {
return latestAckTimestamp;
}
public long getCheckpointedSize() {
return checkpointedSize;
}
public long getStateSize() {
return stateSize;
}
public long getDuration() {
return duration;
}
public int getNumSubtasks() {
return numSubtasks;
}
public int getNumAckSubtasks() {
return numAckSubtasks;
}
public RestAPICheckpointType getCheckpointType() {
return checkpointType;
}
@Nullable
public Map<JobVertexID, TaskCheckpointStatistics> getCheckpointStatisticsPerTask() {
return checkpointStatisticsPerTask;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
CheckpointStatistics that = (CheckpointStatistics) o;
return id == that.id
&& savepoint == that.savepoint
&& Objects.equals(savepointFormat, that.savepointFormat)
&& triggerTimestamp == that.triggerTimestamp
&& latestAckTimestamp == that.latestAckTimestamp
&& stateSize == that.stateSize
&& duration == that.duration
&& alignmentBuffered == that.alignmentBuffered
&& processedData == that.processedData
&& persistedData == that.persistedData
&& numSubtasks == that.numSubtasks
&& numAckSubtasks == that.numAckSubtasks
&& status == that.status
&& Objects.equals(checkpointType, that.checkpointType)
&& Objects.equals(checkpointStatisticsPerTask, that.checkpointStatisticsPerTask);
}
@Override
public int hashCode() {
return Objects.hash(
id,
status,
savepoint,
savepointFormat,
triggerTimestamp,
latestAckTimestamp,
stateSize,
duration,
alignmentBuffered,
processedData,
persistedData,
numSubtasks,
numAckSubtasks,
checkpointType,
checkpointStatisticsPerTask);
}
// -------------------------------------------------------------------------
// Static factory methods
// -------------------------------------------------------------------------
public static CheckpointStatistics generateCheckpointStatistics(
AbstractCheckpointStats checkpointStats, boolean includeTaskCheckpointStatistics) {
Preconditions.checkNotNull(checkpointStats);
Map<JobVertexID, TaskCheckpointStatistics> checkpointStatisticsPerTask;
if (includeTaskCheckpointStatistics) {
Collection<TaskStateStats> taskStateStats = checkpointStats.getAllTaskStateStats();
checkpointStatisticsPerTask =
CollectionUtil.newHashMapWithExpectedSize(taskStateStats.size());
for (TaskStateStats taskStateStat : taskStateStats) {
checkpointStatisticsPerTask.put(
taskStateStat.getJobVertexId(),
new TaskCheckpointStatistics(
checkpointStats.getCheckpointId(),
checkpointStats.getStatus(),
taskStateStat.getLatestAckTimestamp(),
taskStateStat.getCheckpointedSize(),
taskStateStat.getStateSize(),
taskStateStat.getEndToEndDuration(
checkpointStats.getTriggerTimestamp()),
0,
taskStateStat.getProcessedDataStats(),
taskStateStat.getPersistedDataStats(),
taskStateStat.getNumberOfSubtasks(),
taskStateStat.getNumberOfAcknowledgedSubtasks()));
}
} else {
checkpointStatisticsPerTask = Collections.emptyMap();
}
String savepointFormat = null;
SnapshotType snapshotType = checkpointStats.getProperties().getCheckpointType();
if (snapshotType instanceof SavepointType) {
savepointFormat = ((SavepointType) snapshotType).getFormatType().name();
}
if (checkpointStats instanceof CompletedCheckpointStats) {
final CompletedCheckpointStats completedCheckpointStats =
((CompletedCheckpointStats) checkpointStats);
return new CheckpointStatistics.CompletedCheckpointStatistics(
completedCheckpointStats.getCheckpointId(),
completedCheckpointStats.getStatus(),
snapshotType.isSavepoint(),
savepointFormat,
completedCheckpointStats.getTriggerTimestamp(),
completedCheckpointStats.getLatestAckTimestamp(),
completedCheckpointStats.getCheckpointedSize(),
completedCheckpointStats.getStateSize(),
completedCheckpointStats.getEndToEndDuration(),
0,
completedCheckpointStats.getProcessedData(),
completedCheckpointStats.getPersistedData(),
completedCheckpointStats.getNumberOfSubtasks(),
completedCheckpointStats.getNumberOfAcknowledgedSubtasks(),
RestAPICheckpointType.valueOf(
completedCheckpointStats.getProperties().getCheckpointType(),
completedCheckpointStats.isUnalignedCheckpoint()),
checkpointStatisticsPerTask,
completedCheckpointStats.getExternalPath(),
completedCheckpointStats.isDiscarded());
} else if (checkpointStats instanceof FailedCheckpointStats) {
final FailedCheckpointStats failedCheckpointStats =
((FailedCheckpointStats) checkpointStats);
return new CheckpointStatistics.FailedCheckpointStatistics(
failedCheckpointStats.getCheckpointId(),
failedCheckpointStats.getStatus(),
failedCheckpointStats.getProperties().isSavepoint(),
savepointFormat,
failedCheckpointStats.getTriggerTimestamp(),
failedCheckpointStats.getLatestAckTimestamp(),
failedCheckpointStats.getCheckpointedSize(),
failedCheckpointStats.getStateSize(),
failedCheckpointStats.getEndToEndDuration(),
0,
failedCheckpointStats.getProcessedData(),
failedCheckpointStats.getPersistedData(),
failedCheckpointStats.getNumberOfSubtasks(),
failedCheckpointStats.getNumberOfAcknowledgedSubtasks(),
RestAPICheckpointType.valueOf(
failedCheckpointStats.getProperties().getCheckpointType(),
failedCheckpointStats.isUnalignedCheckpoint()),
checkpointStatisticsPerTask,
failedCheckpointStats.getFailureTimestamp(),
failedCheckpointStats.getFailureMessage());
} else if (checkpointStats instanceof PendingCheckpointStats) {
final PendingCheckpointStats pendingCheckpointStats =
((PendingCheckpointStats) checkpointStats);
return new CheckpointStatistics.PendingCheckpointStatistics(
pendingCheckpointStats.getCheckpointId(),
pendingCheckpointStats.getStatus(),
pendingCheckpointStats.getProperties().isSavepoint(),
savepointFormat,
pendingCheckpointStats.getTriggerTimestamp(),
pendingCheckpointStats.getLatestAckTimestamp(),
pendingCheckpointStats.getCheckpointedSize(),
pendingCheckpointStats.getStateSize(),
pendingCheckpointStats.getEndToEndDuration(),
0,
pendingCheckpointStats.getProcessedData(),
pendingCheckpointStats.getPersistedData(),
pendingCheckpointStats.getNumberOfSubtasks(),
pendingCheckpointStats.getNumberOfAcknowledgedSubtasks(),
RestAPICheckpointType.valueOf(
pendingCheckpointStats.getProperties().getCheckpointType(),
pendingCheckpointStats.isUnalignedCheckpoint()),
checkpointStatisticsPerTask);
} else {
throw new IllegalArgumentException(
"Given checkpoint stats object of type "
+ checkpointStats.getClass().getName()
+ " cannot be converted.");
}
}
/**
* Backward compatibility layer between internal {@link CheckpointType} and a field used in
* {@link CheckpointStatistics}.
*/
|
CheckpointStatistics
|
java
|
apache__dubbo
|
dubbo-cluster/src/main/java/org/apache/dubbo/rpc/cluster/loadbalance/ShortestResponseLoadBalance.java
|
{
"start": 3062,
"end": 7455
}
|
class ____ {
private long succeededOffset;
private long succeededElapsedOffset;
private final RpcStatus rpcStatus;
public SlideWindowData(RpcStatus rpcStatus) {
this.rpcStatus = rpcStatus;
this.succeededOffset = 0;
this.succeededElapsedOffset = 0;
}
public void reset() {
this.succeededOffset = rpcStatus.getSucceeded();
this.succeededElapsedOffset = rpcStatus.getSucceededElapsed();
}
private long getSucceededAverageElapsed() {
long succeed = this.rpcStatus.getSucceeded() - this.succeededOffset;
if (succeed == 0) {
return 0;
}
return (this.rpcStatus.getSucceededElapsed() - this.succeededElapsedOffset) / succeed;
}
public long getEstimateResponse() {
int active = this.rpcStatus.getActive() + 1;
return getSucceededAverageElapsed() * active;
}
}
@Override
protected <T> Invoker<T> doSelect(List<Invoker<T>> invokers, URL url, Invocation invocation) {
// Number of invokers
int length = invokers.size();
// Estimated shortest response time of all invokers
long shortestResponse = Long.MAX_VALUE;
// The number of invokers having the same estimated shortest response time
int shortestCount = 0;
// The index of invokers having the same estimated shortest response time
int[] shortestIndexes = new int[length];
// the weight of every invokers
int[] weights = new int[length];
// The sum of the warmup weights of all the shortest response invokers
int totalWeight = 0;
// The weight of the first shortest response invokers
int firstWeight = 0;
// Every shortest response invoker has the same weight value?
boolean sameWeight = true;
// Filter out all the shortest response invokers
for (int i = 0; i < length; i++) {
Invoker<T> invoker = invokers.get(i);
RpcStatus rpcStatus = RpcStatus.getStatus(invoker.getUrl(), RpcUtils.getMethodName(invocation));
SlideWindowData slideWindowData =
ConcurrentHashMapUtils.computeIfAbsent(methodMap, rpcStatus, SlideWindowData::new);
// Calculate the estimated response time from the product of active connections and succeeded average
// elapsed time.
long estimateResponse = slideWindowData.getEstimateResponse();
int afterWarmup = getWeight(invoker, invocation);
weights[i] = afterWarmup;
// Same as LeastActiveLoadBalance
if (estimateResponse < shortestResponse) {
shortestResponse = estimateResponse;
shortestCount = 1;
shortestIndexes[0] = i;
totalWeight = afterWarmup;
firstWeight = afterWarmup;
sameWeight = true;
} else if (estimateResponse == shortestResponse) {
shortestIndexes[shortestCount++] = i;
totalWeight += afterWarmup;
if (sameWeight && i > 0 && afterWarmup != firstWeight) {
sameWeight = false;
}
}
}
if (System.currentTimeMillis() - lastUpdateTime > slidePeriod
&& onResetSlideWindow.compareAndSet(false, true)) {
// reset slideWindowData in async way
executorService.execute(() -> {
methodMap.values().forEach(SlideWindowData::reset);
lastUpdateTime = System.currentTimeMillis();
onResetSlideWindow.set(false);
});
}
if (shortestCount == 1) {
return invokers.get(shortestIndexes[0]);
}
if (!sameWeight && totalWeight > 0) {
int offsetWeight = ThreadLocalRandom.current().nextInt(totalWeight);
for (int i = 0; i < shortestCount; i++) {
int shortestIndex = shortestIndexes[i];
offsetWeight -= weights[shortestIndex];
if (offsetWeight < 0) {
return invokers.get(shortestIndex);
}
}
}
return invokers.get(shortestIndexes[ThreadLocalRandom.current().nextInt(shortestCount)]);
}
}
|
SlideWindowData
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/utils/JdbcUtilsTest1.java
|
{
"start": 162,
"end": 407
}
|
class ____ extends TestCase {
public void test_jdbc() throws Exception {
assertTrue(JdbcUtils.createDriver(MockDriver.class.getName()) instanceof MockDriver);
}
public void test_jdbc_1() throws Exception {
|
JdbcUtilsTest1
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHXORErasureDecodingStep.java
|
{
"start": 1482,
"end": 13494
}
|
class ____ extends HHErasureCodingStep {
private int pbIndex;
private int[] piggyBackIndex;
private int[] piggyBackFullIndex;
private int[] erasedIndexes;
private RawErasureDecoder rsRawDecoder;
private RawErasureEncoder xorRawEncoder;
/**
* The constructor with all the necessary info.
* @param inputBlocks inputBlocks.
* @param erasedIndexes the indexes of erased blocks in inputBlocks array
* @param outputBlocks outputBlocks.
* @param rawDecoder underlying RS decoder for hitchhiker decoding
* @param rawEncoder underlying XOR encoder for hitchhiker decoding
*/
public HHXORErasureDecodingStep(ECBlock[] inputBlocks, int[] erasedIndexes,
ECBlock[] outputBlocks, RawErasureDecoder rawDecoder,
RawErasureEncoder rawEncoder) {
super(inputBlocks, outputBlocks);
this.pbIndex = rawDecoder.getNumParityUnits() - 1;
this.erasedIndexes = erasedIndexes;
this.rsRawDecoder = rawDecoder;
this.xorRawEncoder = rawEncoder;
this.piggyBackIndex = HHUtil.initPiggyBackIndexWithoutPBVec(
rawDecoder.getNumDataUnits(), rawDecoder.getNumParityUnits());
this.piggyBackFullIndex = HHUtil.initPiggyBackFullIndexVec(
rawDecoder.getNumDataUnits(), piggyBackIndex);
}
@Override
public void performCoding(ECChunk[] inputChunks, ECChunk[] outputChunks)
throws IOException {
if (erasedIndexes.length == 0) {
return;
}
ByteBuffer[] inputBuffers = ECChunk.toBuffers(inputChunks);
ByteBuffer[] outputBuffers = ECChunk.toBuffers(outputChunks);
performCoding(inputBuffers, outputBuffers);
}
private void performCoding(ByteBuffer[] inputs, ByteBuffer[] outputs)
throws IOException {
final int numDataUnits = rsRawDecoder.getNumDataUnits();
final int numParityUnits = rsRawDecoder.getNumParityUnits();
final int numTotalUnits = numDataUnits + numParityUnits;
final int subPacketSize = getSubPacketSize();
ByteBuffer fisrtValidInput = HHUtil.findFirstValidInput(inputs);
final int bufSize = fisrtValidInput.remaining();
if (inputs.length != numTotalUnits * getSubPacketSize()) {
throw new IllegalArgumentException("Invalid inputs length");
}
if (outputs.length != erasedIndexes.length * getSubPacketSize()) {
throw new IllegalArgumentException("Invalid outputs length");
}
// notes:inputs length = numDataUnits * subPacketizationSize
// first numDataUnits length is first sub-stripe,
// second numDataUnits length is second sub-stripe
ByteBuffer[][] newIn = new ByteBuffer[subPacketSize][numTotalUnits];
for (int i = 0; i < subPacketSize; ++i) {
for (int j = 0; j < numTotalUnits; ++j) {
newIn[i][j] = inputs[i * numTotalUnits + j];
}
}
ByteBuffer[][] newOut = new ByteBuffer[subPacketSize][erasedIndexes.length];
for (int i = 0; i < subPacketSize; ++i) {
for (int j = 0; j < erasedIndexes.length; ++j) {
newOut[i][j] = outputs[i * erasedIndexes.length + j];
}
}
if (erasedIndexes.length == 1 && erasedIndexes[0] < numDataUnits) {
// Only reconstruct one data unit missing
doDecodeSingle(newIn, newOut, erasedIndexes[0], bufSize,
fisrtValidInput.isDirect());
} else {
doDecodeMultiAndParity(newIn, newOut, erasedIndexes, bufSize);
}
}
private void doDecodeSingle(ByteBuffer[][] inputs, ByteBuffer[][] outputs,
int erasedLocationToFix, int bufSize,
boolean isDirect) throws IOException {
final int numDataUnits = rsRawDecoder.getNumDataUnits();
final int numParityUnits = rsRawDecoder.getNumParityUnits();
final int subPacketSize = getSubPacketSize();
int[][] inputPositions = new int[subPacketSize][inputs[0].length];
for (int i = 0; i < subPacketSize; ++i) {
for (int j = 0; j < inputs[i].length; ++j) {
if (inputs[i][j] != null) {
inputPositions[i][j] = inputs[i][j].position();
}
}
}
ByteBuffer[] tempInputs = new ByteBuffer[numDataUnits + numParityUnits];
for (int i = 0; i < tempInputs.length; ++i) {
tempInputs[i] = inputs[1][i];
}
ByteBuffer[][] tmpOutputs = new ByteBuffer[subPacketSize][numParityUnits];
for (int i = 0; i < getSubPacketSize(); ++i) {
for (int j = 0; j < erasedIndexes.length; ++j) {
tmpOutputs[i][j] = outputs[i][j];
}
for (int m = erasedIndexes.length; m < numParityUnits; ++m) {
tmpOutputs[i][m] = HHUtil.allocateByteBuffer(isDirect, bufSize);
}
}
// First consider the second subPacket
int[] erasedLocation = new int[numParityUnits];
erasedLocation[0] = erasedLocationToFix;
// assign the erased locations based on the locations not read for
// second subPacket but from decoding
for (int i = 1; i < numParityUnits; i++) {
erasedLocation[i] = numDataUnits + i;
tempInputs[numDataUnits + i] = null;
}
rsRawDecoder.decode(tempInputs, erasedLocation, tmpOutputs[1]);
int piggyBackParityIndex = piggyBackFullIndex[erasedLocationToFix];
ByteBuffer piggyBack = HHUtil.getPiggyBackForDecode(inputs, tmpOutputs,
piggyBackParityIndex, numDataUnits, numParityUnits, pbIndex);
// Second consider the first subPacket.
// get the value of the piggyback associated with the erased location
if (isDirect) {
// decode the erased value in the first subPacket by using the piggyback
int idxToWrite = 0;
doDecodeByPiggyBack(inputs[0], tmpOutputs[0][idxToWrite], piggyBack,
erasedLocationToFix);
} else {
ByteBuffer buffer;
byte[][][] newInputs = new byte[getSubPacketSize()][inputs[0].length][];
int[][] inputOffsets = new int[getSubPacketSize()][inputs[0].length];
byte[][][] newOutputs = new byte[getSubPacketSize()][numParityUnits][];
int[][] outOffsets = new int[getSubPacketSize()][numParityUnits];
for (int i = 0; i < getSubPacketSize(); ++i) {
for (int j = 0; j < inputs[0].length; ++j) {
buffer = inputs[i][j];
if (buffer != null) {
inputOffsets[i][j] = buffer.arrayOffset() + buffer.position();
newInputs[i][j] = buffer.array();
}
}
}
for (int i = 0; i < getSubPacketSize(); ++i) {
for (int j = 0; j < numParityUnits; ++j) {
buffer = tmpOutputs[i][j];
if (buffer != null) {
outOffsets[i][j] = buffer.arrayOffset() + buffer.position();
newOutputs[i][j] = buffer.array();
}
}
}
byte[] newPiggyBack = piggyBack.array();
// decode the erased value in the first subPacket by using the piggyback
int idxToWrite = 0;
doDecodeByPiggyBack(newInputs[0], inputOffsets[0],
newOutputs[0][idxToWrite], outOffsets[0][idxToWrite],
newPiggyBack, erasedLocationToFix, bufSize);
}
for (int i = 0; i < subPacketSize; ++i) {
for (int j = 0; j < inputs[i].length; ++j) {
if (inputs[i][j] != null) {
inputs[i][j].position(inputPositions[i][j] + bufSize);
}
}
}
}
private void doDecodeByPiggyBack(ByteBuffer[] inputs,
ByteBuffer outputs,
ByteBuffer piggyBack,
int erasedLocationToFix) {
final int thisPiggyBackSetIdx = piggyBackFullIndex[erasedLocationToFix];
final int startIndex = piggyBackIndex[thisPiggyBackSetIdx - 1];
final int endIndex = piggyBackIndex[thisPiggyBackSetIdx];
// recover first sub-stripe data by XOR piggyback
int bufSize = piggyBack.remaining();
for (int i = piggyBack.position();
i < piggyBack.position() + bufSize; i++) {
for (int j = startIndex; j < endIndex; j++) {
if (inputs[j] != null) {
piggyBack.put(i, (byte)
(piggyBack.get(i) ^ inputs[j].get(inputs[j].position() + i)));
}
}
outputs.put(outputs.position() + i, piggyBack.get(i));
}
}
private void doDecodeByPiggyBack(byte[][] inputs, int[] inputOffsets,
byte[] outputs, int outOffset,
byte[] piggyBack, int erasedLocationToFix,
int bufSize) {
final int thisPiggyBackSetIdx = piggyBackFullIndex[erasedLocationToFix];
final int startIndex = piggyBackIndex[thisPiggyBackSetIdx - 1];
final int endIndex = piggyBackIndex[thisPiggyBackSetIdx];
// recover first sub-stripe data by XOR piggyback
for (int i = 0; i < bufSize; i++) {
for (int j = startIndex; j < endIndex; j++) {
if (inputs[j] != null) {
piggyBack[i] = (byte) (piggyBack[i] ^ inputs[j][i + inputOffsets[j]]);
}
}
outputs[i + outOffset] = piggyBack[i];
}
}
private void doDecodeMultiAndParity(ByteBuffer[][] inputs,
ByteBuffer[][] outputs,
int[] erasedLocationToFix, int bufSize)
throws IOException {
final int numDataUnits = rsRawDecoder.getNumDataUnits();
final int numParityUnits = rsRawDecoder.getNumParityUnits();
final int numTotalUnits = numDataUnits + numParityUnits;
int[] parityToFixFlag = new int[numTotalUnits];
for (int i = 0; i < erasedLocationToFix.length; ++i) {
if (erasedLocationToFix[i] >= numDataUnits) {
parityToFixFlag[erasedLocationToFix[i]] = 1;
}
}
int[] inputPositions = new int[inputs[0].length];
for (int i = 0; i < inputPositions.length; i++) {
if (inputs[0][i] != null) {
inputPositions[i] = inputs[0][i].position();
}
}
// decoded first sub-stripe
rsRawDecoder.decode(inputs[0], erasedLocationToFix, outputs[0]);
for (int i = 0; i < inputs[0].length; i++) {
if (inputs[0][i] != null) {
// dataLen bytes consumed
inputs[0][i].position(inputPositions[i]);
}
}
ByteBuffer[] tempInput = new ByteBuffer[numDataUnits];
for (int i = 0; i < numDataUnits; ++i) {
tempInput[i] = inputs[0][i];
//
// if (!isDirect && tempInput[i] != null) {
// tempInput[i].position(tempInput[i].position() - bufSize);
// }
}
for (int i = 0; i < erasedLocationToFix.length; ++i) {
if (erasedLocationToFix[i] < numDataUnits) {
tempInput[erasedLocationToFix[i]] = outputs[0][i];
}
}
ByteBuffer[] piggyBack = HHUtil.getPiggyBacksFromInput(tempInput,
piggyBackIndex, numParityUnits, 0, xorRawEncoder);
for (int j = numDataUnits + 1; j < numTotalUnits; ++j) {
if (parityToFixFlag[j] == 0 && inputs[1][j] != null) {
// f(b) + f(a1,a2,a3....)
for (int k = inputs[1][j].position(),
m = piggyBack[j - numDataUnits - 1].position();
k < inputs[1][j].limit(); ++k, ++m) {
inputs[1][j].put(k, (byte)
(inputs[1][j].get(k) ^
piggyBack[j - numDataUnits - 1].get(m)));
}
}
}
// decoded second sub-stripe
rsRawDecoder.decode(inputs[1], erasedLocationToFix, outputs[1]);
// parity index = 0, the data have no piggyBack
for (int j = 0; j < erasedLocationToFix.length; ++j) {
if (erasedLocationToFix[j] < numTotalUnits
&& erasedLocationToFix[j] > numDataUnits) {
int parityIndex = erasedLocationToFix[j] - numDataUnits - 1;
for (int k = outputs[1][j].position(),
m = piggyBack[parityIndex].position();
k < outputs[1][j].limit(); ++k, ++m) {
outputs[1][j].put(k, (byte)
(outputs[1][j].get(k) ^ piggyBack[parityIndex].get(m)));
}
}
}
for (int i = 0; i < inputs[0].length; i++) {
if (inputs[0][i] != null) {
// dataLen bytes consumed
inputs[0][i].position(inputPositions[i] + bufSize);
}
}
}
}
|
HHXORErasureDecodingStep
|
java
|
netty__netty
|
transport-native-epoll/src/test/java/io/netty/channel/epoll/EpollDatagramConnectNotExistsTest.java
|
{
"start": 869,
"end": 1126
}
|
class ____ extends DatagramConnectNotExistsTest {
@Override
protected List<TestsuitePermutation.BootstrapFactory<Bootstrap>> newFactories() {
return EpollSocketTestPermutation.INSTANCE.datagramSocket();
}
}
|
EpollDatagramConnectNotExistsTest
|
java
|
hibernate__hibernate-orm
|
hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/accesstype/MixedAccessTypeTest.java
|
{
"start": 715,
"end": 2222
}
|
class ____ {
private Integer id1;
@BeforeClassTemplate
public void initData(EntityManagerFactoryScope scope) {
scope.inTransaction(
em -> {
MixedAccessTypeEntity mate = new MixedAccessTypeEntity( "data" );
em.persist( mate );
id1 = mate.readId();
em.getTransaction().commit();
em.getTransaction().begin();
mate = em.find( MixedAccessTypeEntity.class, id1 );
mate.writeData( "data2" );
}
);
}
@Test
public void testRevisionsCounts(EntityManagerFactoryScope scope) {
scope.inEntityManager(
em -> {
List<Number> revisions = AuditReaderFactory.get( em )
.getRevisions( MixedAccessTypeEntity.class, id1 );
assertThat( revisions ).isEqualTo( Arrays.asList( 1, 2 ) );
} );
}
@Test
public void testHistoryOfId1(EntityManagerFactoryScope scope) {
scope.inEntityManager(
entityManager -> {
MixedAccessTypeEntity ver1 = new MixedAccessTypeEntity( id1, "data" );
MixedAccessTypeEntity ver2 = new MixedAccessTypeEntity( id1, "data2" );
MixedAccessTypeEntity rev1 = AuditReaderFactory.get( entityManager )
.find( MixedAccessTypeEntity.class, id1, 1 );
MixedAccessTypeEntity rev2 = AuditReaderFactory.get( entityManager )
.find( MixedAccessTypeEntity.class, id1, 2 );
assertThat( rev1.isDataSet() ).isTrue();
assertThat( rev2.isDataSet() ).isTrue();
assertThat( rev1.equals( ver1 ) ).isTrue();
assertThat( rev2.equals( ver2 ) ).isTrue();
}
);
}
}
|
MixedAccessTypeTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForBytesRef.java
|
{
"start": 686,
"end": 1692
}
|
class ____ implements ValueExtractor {
static ValueExtractorForBytesRef extractorFor(TopNEncoder encoder, boolean inKey, BytesRefBlock block) {
BytesRefVector vector = block.asVector();
if (vector != null) {
return new ValueExtractorForBytesRef.ForVector(encoder, inKey, vector);
}
return new ValueExtractorForBytesRef.ForBlock(encoder, inKey, block);
}
private final TopNEncoder encoder;
protected final BytesRef scratch = new BytesRef();
protected final boolean inKey;
ValueExtractorForBytesRef(TopNEncoder encoder, boolean inKey) {
this.encoder = encoder;
this.inKey = inKey;
}
protected final void writeCount(BreakingBytesRefBuilder values, int count) {
TopNEncoder.DEFAULT_UNSORTABLE.encodeVInt(count, values);
}
protected final void actualWriteValue(BreakingBytesRefBuilder values, BytesRef value) {
encoder.encodeBytesRef(value, values);
}
static
|
ValueExtractorForBytesRef
|
java
|
apache__camel
|
components/camel-json-patch/src/main/java/org/apache/camel/component/jsonpatch/JsonPatchComponent.java
|
{
"start": 1061,
"end": 2539
}
|
class ____ extends DefaultComponent {
@Metadata(defaultValue = "true", description = "Sets whether to use resource content cache or not")
private boolean contentCache = true;
@Metadata
private boolean allowTemplateFromHeader;
protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception {
JsonPatchEndpoint endpoint = new JsonPatchEndpoint(uri, this, remaining);
endpoint.setContentCache(contentCache);
endpoint.setAllowTemplateFromHeader(allowTemplateFromHeader);
setProperties(endpoint, parameters);
return endpoint;
}
public boolean isContentCache() {
return contentCache;
}
/**
* Sets whether to use resource content cache or not
*/
public void setContentCache(boolean contentCache) {
this.contentCache = contentCache;
}
public boolean isAllowTemplateFromHeader() {
return allowTemplateFromHeader;
}
/**
* Whether to allow to use resource template from header or not (default false).
*
* Enabling this allows to specify dynamic templates via message header. However this can be seen as a potential
* security vulnerability if the header is coming from a malicious user, so use this with care.
*/
public void setAllowTemplateFromHeader(boolean allowTemplateFromHeader) {
this.allowTemplateFromHeader = allowTemplateFromHeader;
}
}
|
JsonPatchComponent
|
java
|
elastic__elasticsearch
|
modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/EstonianAnalyzerProvider.java
|
{
"start": 884,
"end": 1461
}
|
class ____ extends AbstractIndexAnalyzerProvider<EstonianAnalyzer> {
private final EstonianAnalyzer analyzer;
EstonianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
super(name);
analyzer = new EstonianAnalyzer(
Analysis.parseStopWords(env, settings, EstonianAnalyzer.getDefaultStopSet()),
Analysis.parseStemExclusion(settings, CharArraySet.EMPTY_SET)
);
}
@Override
public EstonianAnalyzer get() {
return this.analyzer;
}
}
|
EstonianAnalyzerProvider
|
java
|
spring-projects__spring-framework
|
spring-beans/src/test/java/org/springframework/beans/factory/DefaultListableBeanFactoryTests.java
|
{
"start": 146255,
"end": 146528
}
|
class ____ {
final NonPublicEnum nonPublicEnum;
public NonPublicEnumHolder(NonPublicEnum nonPublicEnum) {
this.nonPublicEnum = nonPublicEnum;
}
public NonPublicEnum getNonPublicEnum() {
return nonPublicEnum;
}
}
@Order
private static
|
NonPublicEnumHolder
|
java
|
spring-projects__spring-framework
|
spring-webmvc/src/main/java/org/springframework/web/servlet/FrameworkServlet.java
|
{
"start": 8480,
"end": 9064
}
|
class ____ create. */
private Class<?> contextClass = DEFAULT_CONTEXT_CLASS;
/** WebApplicationContext id to assign. */
private @Nullable String contextId;
/** Namespace for this servlet. */
private @Nullable String namespace;
/** Explicit context config location. */
private @Nullable String contextConfigLocation;
/** Actual ApplicationContextInitializer instances to apply to the context. */
private final List<ApplicationContextInitializer<ConfigurableApplicationContext>> contextInitializers =
new ArrayList<>();
/** Comma-delimited ApplicationContextInitializer
|
to
|
java
|
spring-projects__spring-boot
|
module/spring-boot-data-redis/src/main/java/org/springframework/boot/data/redis/autoconfigure/observation/LettuceObservationAutoConfiguration.java
|
{
"start": 1760,
"end": 2004
}
|
class ____ {
@Bean
ClientResourcesBuilderCustomizer lettuceObservation(ObservationRegistry observationRegistry) {
return (client) -> client.tracing(new MicrometerTracing(observationRegistry, "Redis"));
}
}
|
LettuceObservationAutoConfiguration
|
java
|
apache__flink
|
flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/operators/process/RuntimeTableSemantics.java
|
{
"start": 1313,
"end": 3460
}
|
class ____ implements TableSemantics, Serializable {
private static final long serialVersionUID = 1L;
private final String argName;
private final int inputIndex;
private final DataType dataType;
private final int[] partitionByColumns;
private final RuntimeChangelogMode consumedChangelogMode;
private final boolean passColumnsThrough;
private final boolean hasSetSemantics;
private final int timeColumn;
private transient ChangelogMode changelogMode;
public RuntimeTableSemantics(
String argName,
int inputIndex,
DataType dataType,
int[] partitionByColumns,
RuntimeChangelogMode consumedChangelogMode,
boolean passColumnsThrough,
boolean hasSetSemantics,
int timeColumn) {
this.argName = argName;
this.inputIndex = inputIndex;
this.dataType = dataType;
this.partitionByColumns = partitionByColumns;
this.consumedChangelogMode = consumedChangelogMode;
this.passColumnsThrough = passColumnsThrough;
this.hasSetSemantics = hasSetSemantics;
this.timeColumn = timeColumn;
}
public String getArgName() {
return argName;
}
public int getInputIndex() {
return inputIndex;
}
public boolean passColumnsThrough() {
return passColumnsThrough;
}
public boolean hasSetSemantics() {
return hasSetSemantics;
}
public ChangelogMode getChangelogMode() {
if (changelogMode == null) {
changelogMode = consumedChangelogMode.deserialize();
}
return changelogMode;
}
@Override
public DataType dataType() {
return dataType;
}
@Override
public int[] partitionByColumns() {
return partitionByColumns;
}
@Override
public int[] orderByColumns() {
return new int[0];
}
@Override
public int timeColumn() {
return timeColumn;
}
@Override
public Optional<ChangelogMode> changelogMode() {
return Optional.of(getChangelogMode());
}
}
|
RuntimeTableSemantics
|
java
|
apache__camel
|
components/camel-braintree/src/generated/java/org/apache/camel/component/braintree/internal/MerchantAccountGatewayApiMethod.java
|
{
"start": 657,
"end": 2024
}
|
enum ____ implements ApiMethod {
ALL(
com.braintreegateway.PaginatedCollection.class,
"all"),
CREATE_FOR_CURRENCY(
com.braintreegateway.Result.class,
"createForCurrency",
arg("currencyRequest", com.braintreegateway.MerchantAccountCreateForCurrencyRequest.class)),
FETCH_MERCHANT_ACCOUNTS(
com.braintreegateway.PaginatedResult.class,
"fetchMerchantAccounts",
arg("page", int.class)),
FIND(
com.braintreegateway.MerchantAccount.class,
"find",
arg("id", String.class));
private final ApiMethod apiMethod;
MerchantAccountGatewayApiMethod(Class<?> resultType, String name, ApiMethodArg... args) {
this.apiMethod = new ApiMethodImpl(MerchantAccountGateway.class, resultType, name, args);
}
@Override
public String getName() { return apiMethod.getName(); }
@Override
public Class<?> getResultType() { return apiMethod.getResultType(); }
@Override
public List<String> getArgNames() { return apiMethod.getArgNames(); }
@Override
public List<String> getSetterArgNames() { return apiMethod.getSetterArgNames(); }
@Override
public List<Class<?>> getArgTypes() { return apiMethod.getArgTypes(); }
@Override
public Method getMethod() { return apiMethod.getMethod(); }
}
|
MerchantAccountGatewayApiMethod
|
java
|
google__guava
|
android/guava-tests/test/com/google/common/util/concurrent/AbstractServiceTest.java
|
{
"start": 15398,
"end": 23884
}
|
class ____ extends AbstractService {
final CountDownLatch hasConfirmedIsRunning = new CountDownLatch(1);
/*
* The main test thread tries to stop() the service shortly after
* confirming that it is running. Meanwhile, the service itself is trying
* to confirm that it is running. If the main thread's stop() call happens
* before it has the chance, the test will fail. To avoid this, the main
* thread calls this method, which waits until the service has performed
* its own "running" check.
*/
void awaitRunChecks() throws InterruptedException {
assertTrue(
"Service thread hasn't finished its checks. "
+ "Exception status (possibly stale): "
+ thrownByExecutionThread,
hasConfirmedIsRunning.await(10, SECONDS));
}
@Override
protected void doStart() {
assertEquals(State.STARTING, state());
invokeOnExecutionThreadForTest(
new Runnable() {
@Override
public void run() {
assertEquals(State.STARTING, state());
notifyStarted();
assertEquals(State.RUNNING, state());
hasConfirmedIsRunning.countDown();
}
});
}
@Override
protected void doStop() {
assertEquals(State.STOPPING, state());
invokeOnExecutionThreadForTest(
new Runnable() {
@Override
public void run() {
assertEquals(State.STOPPING, state());
notifyStopped();
assertEquals(State.TERMINATED, state());
}
});
}
}
private void invokeOnExecutionThreadForTest(Runnable runnable) {
executionThread = new Thread(runnable);
executionThread.setUncaughtExceptionHandler(
new UncaughtExceptionHandler() {
@Override
public void uncaughtException(Thread thread, Throwable e) {
thrownByExecutionThread = e;
}
});
executionThread.start();
}
private static void throwIfSet(Throwable t) throws Throwable {
if (t != null) {
throw t;
}
}
public void testStopUnstartedService() throws Exception {
NoOpService service = new NoOpService();
RecordingListener listener = RecordingListener.record(service);
service.stopAsync();
assertEquals(State.TERMINATED, service.state());
assertThrows(IllegalStateException.class, () -> service.startAsync());
assertEquals(State.TERMINATED, Iterables.getOnlyElement(listener.getStateHistory()));
}
public void testFailingServiceStartAndWait() throws Exception {
StartFailingService service = new StartFailingService();
RecordingListener listener = RecordingListener.record(service);
IllegalStateException e =
assertThrows(IllegalStateException.class, () -> service.startAsync().awaitRunning());
assertEquals(EXCEPTION, service.failureCause());
assertThat(e).hasCauseThat().isEqualTo(EXCEPTION);
assertEquals(ImmutableList.of(State.STARTING, State.FAILED), listener.getStateHistory());
}
public void testFailingServiceStopAndWait_stopFailing() throws Exception {
StopFailingService service = new StopFailingService();
RecordingListener listener = RecordingListener.record(service);
service.startAsync().awaitRunning();
IllegalStateException e =
assertThrows(IllegalStateException.class, () -> service.stopAsync().awaitTerminated());
assertEquals(EXCEPTION, service.failureCause());
assertThat(e).hasCauseThat().isEqualTo(EXCEPTION);
assertEquals(
ImmutableList.of(State.STARTING, State.RUNNING, State.STOPPING, State.FAILED),
listener.getStateHistory());
}
public void testFailingServiceStopAndWait_runFailing() throws Exception {
RunFailingService service = new RunFailingService();
RecordingListener listener = RecordingListener.record(service);
service.startAsync();
IllegalStateException e =
assertThrows(IllegalStateException.class, () -> service.awaitRunning());
assertEquals(EXCEPTION, service.failureCause());
assertThat(e).hasCauseThat().isEqualTo(EXCEPTION);
assertEquals(
ImmutableList.of(State.STARTING, State.RUNNING, State.FAILED), listener.getStateHistory());
}
public void testThrowingServiceStartAndWait() throws Exception {
StartThrowingService service = new StartThrowingService();
RecordingListener listener = RecordingListener.record(service);
IllegalStateException e =
assertThrows(IllegalStateException.class, () -> service.startAsync().awaitRunning());
assertEquals(service.exception, service.failureCause());
assertThat(e).hasCauseThat().isEqualTo(service.exception);
assertEquals(ImmutableList.of(State.STARTING, State.FAILED), listener.getStateHistory());
}
public void testThrowingServiceStopAndWait_stopThrowing() throws Exception {
StopThrowingService service = new StopThrowingService();
RecordingListener listener = RecordingListener.record(service);
service.startAsync().awaitRunning();
IllegalStateException e =
assertThrows(IllegalStateException.class, () -> service.stopAsync().awaitTerminated());
assertEquals(service.exception, service.failureCause());
assertThat(e).hasCauseThat().isEqualTo(service.exception);
assertEquals(
ImmutableList.of(State.STARTING, State.RUNNING, State.STOPPING, State.FAILED),
listener.getStateHistory());
}
public void testThrowingServiceStopAndWait_runThrowing() throws Exception {
RunThrowingService service = new RunThrowingService();
RecordingListener listener = RecordingListener.record(service);
service.startAsync();
IllegalStateException e =
assertThrows(IllegalStateException.class, () -> service.awaitTerminated());
assertEquals(service.exception, service.failureCause());
assertThat(e).hasCauseThat().isEqualTo(service.exception);
assertEquals(
ImmutableList.of(State.STARTING, State.RUNNING, State.FAILED), listener.getStateHistory());
}
public void testFailureCause_throwsIfNotFailed() {
StopFailingService service = new StopFailingService();
assertThrows(IllegalStateException.class, () -> service.failureCause());
service.startAsync().awaitRunning();
assertThrows(IllegalStateException.class, () -> service.failureCause());
IllegalStateException e =
assertThrows(IllegalStateException.class, () -> service.stopAsync().awaitTerminated());
assertEquals(EXCEPTION, service.failureCause());
assertThat(e).hasCauseThat().isEqualTo(EXCEPTION);
}
public void testAddListenerAfterFailureDoesntCauseDeadlock() throws InterruptedException {
StartFailingService service = new StartFailingService();
service.startAsync();
assertEquals(State.FAILED, service.state());
service.addListener(new RecordingListener(service), directExecutor());
Thread thread =
new Thread() {
@Override
public void run() {
// Internally stopAsync() grabs a lock, this could be any such method on
// AbstractService.
service.stopAsync();
}
};
thread.start();
thread.join(LONG_TIMEOUT_MILLIS);
assertFalse(thread + " is deadlocked", thread.isAlive());
}
public void testListenerDoesntDeadlockOnStartAndWaitFromRunning() throws Exception {
NoOpThreadedService service = new NoOpThreadedService();
service.addListener(
new Listener() {
@Override
public void running() {
service.awaitRunning();
}
},
directExecutor());
service.startAsync().awaitRunning(LONG_TIMEOUT_MILLIS, MILLISECONDS);
service.stopAsync();
}
public void testListenerDoesntDeadlockOnStopAndWaitFromTerminated() throws Exception {
NoOpThreadedService service = new NoOpThreadedService();
service.addListener(
new Listener() {
@Override
public void terminated(State from) {
service.stopAsync().awaitTerminated();
}
},
directExecutor());
service.startAsync().awaitRunning();
Thread thread =
new Thread() {
@Override
public void run() {
service.stopAsync().awaitTerminated();
}
};
thread.start();
thread.join(LONG_TIMEOUT_MILLIS);
assertFalse(thread + " is deadlocked", thread.isAlive());
}
private static
|
ThreadedService
|
java
|
dropwizard__dropwizard
|
dropwizard-testing/src/test/java/io/dropwizard/testing/app/TestResource.java
|
{
"start": 108,
"end": 465
}
|
class ____ {
public static final String DEFAULT_MESSAGE = "Default message";
private final String message;
public TestResource() {
this(DEFAULT_MESSAGE);
}
public TestResource(String message) {
this.message = message;
}
@Path("test")
@GET
public String test() {
return message;
}
}
|
TestResource
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/nestedbeans/unmappable/erroneous/UnmappableSourceDeepListMapper.java
|
{
"start": 603,
"end": 671
}
|
class ____ extends BaseDeepListMapper {
}
|
UnmappableSourceDeepListMapper
|
java
|
elastic__elasticsearch
|
libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserTests.java
|
{
"start": 1992,
"end": 2068
}
|
class ____ implements Entitlement {}
public static
|
TestWrongEntitlementName
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/polymorphic/UnmappedPolymorphismQueryTest.java
|
{
"start": 5191,
"end": 5483
}
|
class ____ implements IEntityC {
@Id
private Long id;
@ManyToOne( targetEntity = EntityB.class )
private IEntityB b;
@Override
public Long getId() {
return id;
}
public EntityC() {
}
public EntityC(Long id, IEntityB b) {
this.id = id;
this.b = b;
}
}
}
|
EntityC
|
java
|
apache__maven
|
src/mdo/java/InputLocation.java
|
{
"start": 16747,
"end": 17132
}
|
interface ____ {
// -----------/
// - Methods -/
// -----------/
/**
* Method toString.
*/
String toString(InputLocation location);
}
#end
@Override
public String toString() {
return String.format("%s @ %d:%d", source != null ? source.getLocation() : "n/a", lineNumber, columnNumber);
}
}
|
StringFormatter
|
java
|
assertj__assertj-core
|
assertj-core/src/main/java/org/assertj/core/internal/TypeHolder.java
|
{
"start": 6184,
"end": 6543
}
|
class ____ the given type from the giving collection of types.
* <p>
* The order of checks is the following:
* <ol>
* <li>If there is a registered message for {@code clazz} then this one is used</li>
* <li>We check if there is a registered message for a superclass of {@code clazz}</li>
* <li>We check if there is a registered message for an
|
for
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/nestedbeans/maps/AntonymsDictionary.java
|
{
"start": 253,
"end": 1101
}
|
class ____ {
private Map<Word, Word> antonyms;
public AntonymsDictionary() {
}
public AntonymsDictionary(Map<Word, Word> antonyms) {
this.antonyms = antonyms;
}
public Map<Word, Word> getAntonyms() {
return antonyms;
}
public void setAntonyms(Map<Word, Word> antonyms) {
this.antonyms = antonyms;
}
@Override
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( o == null || getClass() != o.getClass() ) {
return false;
}
AntonymsDictionary antonymsDictionary = (AntonymsDictionary) o;
return Objects.equals( antonyms, antonymsDictionary.antonyms );
}
@Override
public int hashCode() {
return antonyms != null ? antonyms.hashCode() : 0;
}
}
|
AntonymsDictionary
|
java
|
apache__maven
|
api/maven-api-core/src/main/java/org/apache/maven/api/Type.java
|
{
"start": 3089,
"end": 3357
}
|
class ____
* or module path. The path (classes or modules) is chosen by the plugin, possibly using heuristic rules.
*/
String PROCESSOR = "processor";
/**
* Artifact type name for a JAR file to unconditionally place on the annotation processor
|
path
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/processor/intercept/InterceptDoTryCatchTest.java
|
{
"start": 983,
"end": 2048
}
|
class ____ extends ContextTestSupport {
@Test
public void testIntercept() throws Exception {
getMockEndpoint("mock:foo").expectedMessageCount(1);
getMockEndpoint("mock:bar").expectedMessageCount(1);
getMockEndpoint("mock:result").expectedMessageCount(1);
getMockEndpoint("mock:intercepted").expectedMessageCount(4);
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
intercept().to("mock:intercepted");
from("direct:start")
.to("mock:foo")
.doTry()
.throwException(new IllegalArgumentException("Forced"))
.doCatch(Exception.class)
.to("mock:bar")
.end()
.to("mock:result");
}
};
}
}
|
InterceptDoTryCatchTest
|
java
|
quarkusio__quarkus
|
extensions/resteasy-classic/resteasy/runtime/src/main/java/io/quarkus/resteasy/runtime/standalone/BufferAllocator.java
|
{
"start": 89,
"end": 327
}
|
interface ____ {
ByteBuf allocateBuffer();
ByteBuf allocateBuffer(boolean direct);
ByteBuf allocateBuffer(int bufferSize);
ByteBuf allocateBuffer(boolean direct, int bufferSize);
int getBufferSize();
}
|
BufferAllocator
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/CanonicalDurationTest.java
|
{
"start": 4441,
"end": 4895
}
|
class ____ {
{
standardSeconds(86400);
standardSeconds(0).getStandardSeconds();
}
}
""")
.addOutputLines(
"out/A.java",
"""
package a;
import static org.joda.time.Duration.standardDays;
import static org.joda.time.Duration.standardSeconds;
import org.joda.time.Duration;
public
|
A
|
java
|
greenrobot__greendao
|
DaoCore/src/main/java/org/greenrobot/greendao/test/AbstractDaoSessionTest.java
|
{
"start": 969,
"end": 1113
}
|
class ____ DAO (master) related testing.
*
* @author Markus
*
* @param <T>
* Type of a concrete DAO master
*/
public abstract
|
for
|
java
|
apache__camel
|
components/camel-sjms/src/main/java/org/apache/camel/component/sjms/consumer/EndpointMessageListener.java
|
{
"start": 15453,
"end": 19730
}
|
class ____ implements AsyncCallback {
private final Session session;
private final Message message;
private final Exchange exchange;
private final SjmsEndpoint endpoint;
private final boolean sendReply;
private final Object replyDestination;
private EndpointMessageListenerAsyncCallback(Session session, Message message, Exchange exchange, SjmsEndpoint endpoint,
boolean sendReply, Object replyDestination) {
this.session = session;
this.message = message;
this.exchange = exchange;
this.endpoint = endpoint;
this.sendReply = sendReply;
this.replyDestination = replyDestination;
}
@Override
public void done(boolean doneSync) {
LOG.trace("onMessage.process END");
// now we evaluate the processing of the exchange and determine if it was a success or failure
// we also grab information from the exchange to be used for sending back a reply (if we are to do so)
// so the following logic seems a bit complicated at first glance
// if we send back a reply it can either be the message body or transferring a caused exception
org.apache.camel.Message body = null;
Exception cause = null;
RuntimeCamelException rce = null;
if (exchange.isFailed() || exchange.isRollbackOnly()) {
if (exchange.isRollbackOnly()) {
// rollback only so wrap an exception so we can rethrow the exception to cause rollback
rce = wrapRuntimeCamelException(new RollbackExchangeException(exchange));
} else if (exchange.getException() != null) {
// an exception occurred while processing
if (endpoint.isTransferException()) {
// send the exception as reply, so null body and set the exception as the cause
body = null;
cause = exchange.getException();
} else {
// only throw exception if endpoint is not configured to transfer exceptions back to caller
// do not send a reply but wrap and rethrow the exception
rce = wrapRuntimeCamelException(exchange.getException());
}
}
} else {
// process OK so get the reply body if we are InOut and has a body
// If the ppl don't want to send the message back, he should use the InOnly
if (sendReply && exchange.getPattern().isOutCapable()) {
body = exchange.getMessage();
cause = null;
}
}
// send back reply if there was no error and we are supposed to send back a reply
if (rce == null && sendReply && (body != null || cause != null)) {
LOG.trace("onMessage.sendReply START");
if (replyDestination instanceof Destination) {
sendReply(session, (Destination) replyDestination, message, exchange, body, cause);
} else {
sendReply(session, (String) replyDestination, message, exchange, body, cause);
}
LOG.trace("onMessage.sendReply END");
}
// if an exception occurred
if (rce != null) {
if (doneSync) {
// we were done sync, so put exception on exchange, so we can grab it in the onMessage
// method and rethrow it
exchange.setException(rce);
} else {
// we were done async, so use the exception handler
if (endpoint.getExceptionHandler() != null) {
endpoint.getExceptionHandler().handleException(rce);
}
}
}
if (!doneSync) {
// release back when in asynchronous mode
consumer.releaseExchange(exchange, false);
}
}
}
}
|
EndpointMessageListenerAsyncCallback
|
java
|
quarkusio__quarkus
|
integration-tests/gradle/src/test/java/io/quarkus/gradle/devmode/IncludedKotlinKMPBuildDevModeTest.java
|
{
"start": 102,
"end": 526
}
|
class ____ extends QuarkusDevGradleTestBase {
@Override
protected String projectDirectoryName() {
return "included-kotlin-kmp-build";
}
@Override
protected void testDevMode() throws Exception {
assertThat(getHttpResponse("/hello/kmp")).contains("hi from KMP");
assertThat(getHttpResponse("/hello/jvm")).contains("hi from JVM-only sources");
}
}
|
IncludedKotlinKMPBuildDevModeTest
|
java
|
apache__camel
|
dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/JoltComponentBuilderFactory.java
|
{
"start": 1363,
"end": 1798
}
|
interface ____ {
/**
* JOLT (camel-jolt)
* JSON to JSON transformation using JOLT.
*
* Category: transformation
* Since: 2.16
* Maven coordinates: org.apache.camel:camel-jolt
*
* @return the dsl builder
*/
static JoltComponentBuilder jolt() {
return new JoltComponentBuilderImpl();
}
/**
* Builder for the JOLT component.
*/
|
JoltComponentBuilderFactory
|
java
|
micronaut-projects__micronaut-core
|
inject-java/src/test/groovy/io/micronaut/inject/dependent/factory/MyBean2Factory.java
|
{
"start": 304,
"end": 711
}
|
class ____ {
public static int beanCreated;
public static int beanDestroyed;
public static int destroyed;
@Bean
MyBean2 myBean2(MyBean3 myBean3) {
beanCreated++;
return new MyBean2(myBean3);
}
@PreDestroy
public void destroyMyFactory() {
TestData.DESTRUCTION_ORDER.add(MyBean2Factory.class.getSimpleName());
destroyed++;
}
}
|
MyBean2Factory
|
java
|
apache__flink
|
flink-filesystems/flink-gs-fs-hadoop/src/test/java/org/apache/flink/fs/gs/storage/MockBlobStorage.java
|
{
"start": 1757,
"end": 2545
}
|
class ____ implements GSBlobStorage.BlobMetadata {
private final BlobValue blobValue;
@Nullable private final String forcedChecksum;
BlobMetadata(BlobValue blobValue, @Nullable String forcedChecksum) {
this.blobValue = blobValue;
this.forcedChecksum = forcedChecksum;
}
@Override
public String getChecksum() {
if (forcedChecksum != null) {
return forcedChecksum;
} else {
int checksum = ChecksumUtils.CRC_HASH_FUNCTION.hashBytes(blobValue.content).asInt();
return ChecksumUtils.convertChecksumToString(checksum);
}
}
}
/** The mock write channel, which writes to the memory-based storage. */
public
|
BlobMetadata
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/boot/models/annotations/internal/ManyToManyJpaAnnotation.java
|
{
"start": 529,
"end": 2684
}
|
class ____ implements ManyToMany,
AttributeMarker.Fetchable,
AttributeMarker.Cascadeable,
AttributeMarker.Mappable {
private java.lang.Class<?> targetEntity;
private jakarta.persistence.CascadeType[] cascade;
private jakarta.persistence.FetchType fetch;
private String mappedBy;
/**
* Used in creating dynamic annotation instances (e.g. from XML)
*/
public ManyToManyJpaAnnotation(ModelsContext modelContext) {
this.targetEntity = void.class;
this.cascade = new jakarta.persistence.CascadeType[0];
this.fetch = jakarta.persistence.FetchType.LAZY;
this.mappedBy = "";
}
/**
* Used in creating annotation instances from JDK variant
*/
public ManyToManyJpaAnnotation(ManyToMany annotation, ModelsContext modelContext) {
this.targetEntity = annotation.targetEntity();
this.cascade = annotation.cascade();
this.fetch = annotation.fetch();
this.mappedBy = annotation.mappedBy();
}
/**
* Used in creating annotation instances from Jandex variant
*/
public ManyToManyJpaAnnotation(Map<String, Object> attributeValues, ModelsContext modelContext) {
this.targetEntity = (Class<?>) attributeValues.get( "targetEntity" );
this.cascade = (jakarta.persistence.CascadeType[]) attributeValues.get( "cascade" );
this.fetch = (jakarta.persistence.FetchType) attributeValues.get( "fetch" );
this.mappedBy = (String) attributeValues.get( "mappedBy" );
}
@Override
public Class<? extends Annotation> annotationType() {
return ManyToMany.class;
}
@Override
public java.lang.Class<?> targetEntity() {
return targetEntity;
}
public void targetEntity(java.lang.Class<?> value) {
this.targetEntity = value;
}
@Override
public jakarta.persistence.CascadeType[] cascade() {
return cascade;
}
public void cascade(jakarta.persistence.CascadeType[] value) {
this.cascade = value;
}
@Override
public jakarta.persistence.FetchType fetch() {
return fetch;
}
public void fetch(jakarta.persistence.FetchType value) {
this.fetch = value;
}
@Override
public String mappedBy() {
return mappedBy;
}
public void mappedBy(String value) {
this.mappedBy = value;
}
}
|
ManyToManyJpaAnnotation
|
java
|
grpc__grpc-java
|
android-interop-testing/src/androidTest/java/io/grpc/android/integrationtest/InteropInstrumentationTest.java
|
{
"start": 1453,
"end": 4865
}
|
class ____ {
private static final int TIMEOUT_SECONDS = 60;
private static final String LOG_TAG = "GrpcInteropInstrumentationTest";
private String host;
private int port;
private boolean useTls;
private String serverHostOverride;
private boolean useTestCa;
private String testCase;
private ExecutorService executor = Executors.newSingleThreadExecutor();
@Before
public void setUp() throws Exception {
host = InstrumentationRegistry.getArguments().getString("server_host", "10.0.2.2");
port =
Integer.parseInt(InstrumentationRegistry.getArguments().getString("server_port", "8080"));
useTls =
Boolean.parseBoolean(InstrumentationRegistry.getArguments().getString("use_tls", "true"));
serverHostOverride = InstrumentationRegistry.getArguments().getString("server_host_override");
useTestCa =
Boolean.parseBoolean(
InstrumentationRegistry.getArguments().getString("use_test_ca", "false"));
testCase = InstrumentationRegistry.getArguments().getString("test_case", "all");
if (useTls) {
try {
ProviderInstaller.installIfNeeded(ApplicationProvider.getApplicationContext());
} catch (GooglePlayServicesRepairableException e) {
// The provider is helpful, but it is possible to succeed without it.
// Hope that the system-provided libraries are new enough.
Log.i(LOG_TAG, "Failed installing security provider", e);
} catch (GooglePlayServicesNotAvailableException e) {
// The provider is helpful, but it is possible to succeed without it.
// Hope that the system-provided libraries are new enough.
Log.i(LOG_TAG, "Failed installing security provider", e);
}
}
}
@Test
public void interopTests() throws Exception {
if (testCase.equals("all")) {
runTest("empty_unary");
runTest("large_unary");
runTest("client_streaming");
runTest("server_streaming");
runTest("ping_pong");
runTest("empty_stream");
runTest("cancel_after_begin");
runTest("cancel_after_first_response");
runTest("full_duplex_call_should_succeed");
runTest("half_duplex_call_should_succeed");
runTest("server_streaming_should_be_flow_controlled");
runTest("very_large_request");
runTest("very_large_response");
runTest("deadline_not_exceeded");
runTest("deadline_exceeded");
runTest("deadline_exceeded_server_streaming");
runTest("unimplemented_method");
runTest("timeout_on_sleeping_server");
runTest("graceful_shutdown");
} else {
runTest(testCase);
}
}
private void runTest(String testCase) throws Exception {
InputStream testCa;
if (useTestCa) {
testCa = ApplicationProvider.getApplicationContext().getResources().openRawResource(R.raw.ca);
} else {
testCa = null;
}
String result = null;
try {
result = executor.submit(new TestCallable(
TesterOkHttpChannelBuilder.build(host, port, serverHostOverride, useTls, testCa),
testCase)).get(TIMEOUT_SECONDS, TimeUnit.SECONDS);
} catch (ExecutionException | InterruptedException | TimeoutException e) {
Log.e(LOG_TAG, "Error while executing test case " + testCase, e);
result = e.getMessage();
}
assertEquals(testCase + " failed", TestCallable.SUCCESS_MESSAGE, result);
}
}
|
InteropInstrumentationTest
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java
|
{
"start": 48081,
"end": 66154
}
|
class ____ implements Iterator<ShardRouting>, ExistingShardsAllocator.UnassignedAllocationHandler {
private final ListIterator<ShardRouting> iterator;
private ShardRouting current;
public UnassignedIterator() {
this.iterator = unassigned.listIterator();
}
@Override
public boolean hasNext() {
return iterator.hasNext();
}
@Override
public ShardRouting next() {
return current = iterator.next();
}
/**
* Initializes the current unassigned shard and moves it from the unassigned list.
*
* @param existingAllocationId allocation id to use. If null, a fresh allocation id is generated.
*/
@Override
public ShardRouting initialize(
String nodeId,
@Nullable String existingAllocationId,
long expectedShardSize,
RoutingChangesObserver routingChangesObserver
) {
nodes.ensureMutable();
innerRemove();
return nodes.initializeShard(current, nodeId, existingAllocationId, expectedShardSize, routingChangesObserver);
}
/**
* Removes and ignores the unassigned shard (will be ignored for this run, but
* will be added back to unassigned once the metadata is constructed again).
* Typically this is used when an allocation decision prevents a shard from being allocated such
* that subsequent consumers of this API won't try to allocate this shard again.
*
* @param attempt the result of the allocation attempt
*/
@Override
public void removeAndIgnore(AllocationStatus attempt, RoutingChangesObserver changes) {
nodes.ensureMutable();
innerRemove();
ignoreShard(current, attempt, changes);
}
private void updateShardRouting(ShardRouting shardRouting) {
current = shardRouting;
iterator.set(shardRouting);
}
/**
* updates the unassigned info and recovery source on the current unassigned shard
*
* @param unassignedInfo the new unassigned info to use
* @param recoverySource the new recovery source to use
* @return the shard with unassigned info updated
*/
@Override
public ShardRouting updateUnassigned(
UnassignedInfo unassignedInfo,
RecoverySource recoverySource,
RoutingChangesObserver changes
) {
nodes.ensureMutable();
ShardRouting updatedShardRouting = current.updateUnassigned(unassignedInfo, recoverySource);
changes.unassignedInfoUpdated(current, unassignedInfo);
updateShardRouting(updatedShardRouting);
return updatedShardRouting;
}
/**
* Unsupported operation, just there for the interface. Use
* {@link #removeAndIgnore(AllocationStatus, RoutingChangesObserver)} or
* {@link #initialize(String, String, long, RoutingChangesObserver)}.
*/
@Override
public void remove() {
throw new UnsupportedOperationException(
"remove is not supported in unassigned iterator," + " use removeAndIgnore or initialize"
);
}
private void innerRemove() {
iterator.remove();
if (current.primary()) {
primaries--;
}
}
}
/**
* Returns <code>true</code> iff this collection contains one or more non-ignored unassigned shards.
*/
public boolean isEmpty() {
return unassigned.isEmpty();
}
/**
* Returns <code>true</code> iff any unassigned shards are marked as temporarily ignored.
* @see UnassignedShards#ignoreShard(ShardRouting, AllocationStatus, RoutingChangesObserver)
* @see UnassignedIterator#removeAndIgnore(AllocationStatus, RoutingChangesObserver)
*/
public boolean isIgnoredEmpty() {
return ignored.isEmpty();
}
/**
* Drains all unassigned shards and returns it.
* This method will not drain ignored shards.
*/
public ShardRouting[] drain() {
nodes.ensureMutable();
ShardRouting[] mutableShardRoutings = unassigned.toArray(new ShardRouting[unassigned.size()]);
unassigned.clear();
primaries = 0;
return mutableShardRoutings;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
UnassignedShards that = (UnassignedShards) o;
return primaries == that.primaries
&& ignoredPrimaries == that.ignoredPrimaries
&& unassigned.equals(that.unassigned)
&& ignored.equals(that.ignored);
}
@Override
public int hashCode() {
return Objects.hash(unassigned, ignored, primaries, ignoredPrimaries);
}
}
/**
* Calculates RoutingNodes statistics by iterating over all {@link ShardRouting}s
* in the cluster to ensure the book-keeping is correct.
* For performance reasons, this should only be called from asserts
*
* @return this method always returns <code>true</code> or throws an assertion error. If assertion are not enabled
* this method does nothing.
*/
public static boolean assertShardStats(RoutingNodes routingNodes) {
if (Assertions.ENABLED == false) {
return true;
}
int unassignedPrimaryCount = 0;
int unassignedIgnoredPrimaryCount = 0;
int inactivePrimaryCount = 0;
int inactiveShardCount = 0;
int relocating = 0;
Map<Index, Integer> indicesAndShards = new HashMap<>();
for (RoutingNode node : routingNodes) {
for (ShardRouting shard : node) {
if (shard.initializing() && shard.relocatingNodeId() == null) {
inactiveShardCount++;
if (shard.primary()) {
inactivePrimaryCount++;
}
}
if (shard.relocating()) {
relocating++;
}
Integer i = indicesAndShards.get(shard.index());
if (i == null) {
i = shard.id();
}
indicesAndShards.put(shard.index(), Math.max(i, shard.id()));
}
}
// Assert that the active shard routing are identical.
Set<Map.Entry<Index, Integer>> entries = indicesAndShards.entrySet();
final Map<ShardId, HashSet<ShardRouting>> shardsByShardId = new HashMap<>();
for (final RoutingNode routingNode : routingNodes) {
for (final ShardRouting shardRouting : routingNode) {
final HashSet<ShardRouting> shards = shardsByShardId.computeIfAbsent(
new ShardId(shardRouting.index(), shardRouting.id()),
k -> new HashSet<>()
);
shards.add(shardRouting);
}
}
for (final Map.Entry<Index, Integer> e : entries) {
final Index index = e.getKey();
for (int i = 0; i <= e.getValue(); i++) {
final ShardId shardId = new ShardId(index, i);
final HashSet<ShardRouting> shards = shardsByShardId.get(shardId);
final List<ShardRouting> mutableShardRoutings = routingNodes.assignedShards(shardId);
assert (shards == null && mutableShardRoutings.size() == 0)
|| (shards != null && shards.size() == mutableShardRoutings.size() && shards.containsAll(mutableShardRoutings));
}
}
for (ShardRouting shard : routingNodes.unassigned()) {
if (shard.primary()) {
unassignedPrimaryCount++;
}
}
for (ShardRouting shard : routingNodes.unassigned().ignored()) {
if (shard.primary()) {
unassignedIgnoredPrimaryCount++;
}
}
for (Map.Entry<String, Recoveries> recoveries : routingNodes.recoveriesPerNode.entrySet()) {
String node = recoveries.getKey();
final Recoveries value = recoveries.getValue();
int incoming = 0;
int outgoing = 0;
RoutingNode routingNode = routingNodes.nodesToShards.get(node);
if (routingNode != null) { // node might have dropped out of the cluster
for (ShardRouting routing : routingNode) {
if (routing.initializing()) {
incoming++;
}
if (routing.primary() && routing.isRelocationTarget() == false) {
for (ShardRouting assigned : routingNodes.assignedShards.get(routing.shardId())) {
if (assigned.initializing() && assigned.recoverySource().getType() == RecoverySource.Type.PEER) {
outgoing++;
}
}
}
}
}
assert incoming == value.incoming : incoming + " != " + value.incoming + " node: " + routingNode;
assert outgoing == value.outgoing : outgoing + " != " + value.outgoing + " node: " + routingNode;
}
assert unassignedPrimaryCount == routingNodes.unassignedShards.getNumPrimaries()
: "Unassigned primaries is ["
+ unassignedPrimaryCount
+ "] but RoutingNodes returned unassigned primaries ["
+ routingNodes.unassigned().getNumPrimaries()
+ "]";
assert unassignedIgnoredPrimaryCount == routingNodes.unassignedShards.getNumIgnoredPrimaries()
: "Unassigned ignored primaries is ["
+ unassignedIgnoredPrimaryCount
+ "] but RoutingNodes returned unassigned ignored primaries ["
+ routingNodes.unassigned().getNumIgnoredPrimaries()
+ "]";
assert inactivePrimaryCount == routingNodes.inactivePrimaryCount
: "Inactive Primary count ["
+ inactivePrimaryCount
+ "] but RoutingNodes returned inactive primaries ["
+ routingNodes.inactivePrimaryCount
+ "]";
assert inactiveShardCount == routingNodes.inactiveShardCount
: "Inactive Shard count ["
+ inactiveShardCount
+ "] but RoutingNodes returned inactive shards ["
+ routingNodes.inactiveShardCount
+ "]";
assert routingNodes.getRelocatingShardCount() == relocating
: "Relocating shards mismatch [" + routingNodes.getRelocatingShardCount() + "] but expected [" + relocating + "]";
return true;
}
private void ensureMutable() {
if (readOnly) {
throw new IllegalStateException("can't modify RoutingNodes - readonly");
}
}
public boolean hasAllocationFailures() {
return unassignedShards.stream().anyMatch((shardRouting -> {
if (shardRouting.unassignedInfo() == null) {
return false;
}
return shardRouting.unassignedInfo().failedAllocations() > 0;
}));
}
public boolean hasRelocationFailures() {
for (var shardRoutings : assignedShards.values()) {
for (var routing : shardRoutings) {
if (routing.relocationFailureInfo() != null && routing.relocationFailureInfo().failedRelocations() > 0) {
return true;
}
}
}
return false;
}
public void resetFailedCounter(RoutingAllocation allocation) {
final var observer = allocation.changes();
int shardsWithMaxFailedAllocations = 0;
int shardsWithMaxFailedRelocations = 0;
List<ShardId> topShardIdsWithFailedAllocations = new ArrayList<>();
List<ShardId> topShardIdsWithFailedRelocations = new ArrayList<>();
final var unassignedIterator = unassigned().iterator();
while (unassignedIterator.hasNext()) {
ShardRouting shardRouting = unassignedIterator.next();
UnassignedInfo unassignedInfo = shardRouting.unassignedInfo();
int failedAllocations = unassignedInfo.failedAllocations();
if (failedAllocations > 0) {
try {
final var maxRetry = SETTING_ALLOCATION_MAX_RETRY.get(
allocation.metadata().indexMetadata(shardRouting.index()).getSettings()
);
if (failedAllocations >= maxRetry) {
shardsWithMaxFailedAllocations++;
if (topShardIdsWithFailedAllocations.size() <= MAX_SHARDS_IN_LOG_MSG) {
topShardIdsWithFailedAllocations.add(shardRouting.shardId());
}
}
} catch (IndexNotFoundException e) {
// ignore
}
}
unassignedIterator.updateUnassigned(
new UnassignedInfo(
failedAllocations > 0 ? UnassignedInfo.Reason.MANUAL_ALLOCATION : unassignedInfo.reason(),
unassignedInfo.message(),
unassignedInfo.failure(),
0,
unassignedInfo.unassignedTimeNanos(),
unassignedInfo.unassignedTimeMillis(),
unassignedInfo.delayed(),
unassignedInfo.lastAllocationStatus(),
Collections.emptySet(),
unassignedInfo.lastAllocatedNodeId()
),
shardRouting.recoverySource(),
observer
);
}
for (RoutingNode routingNode : this) {
var shardsWithRelocationFailures = new ArrayList<ShardRouting>();
for (ShardRouting shardRouting : routingNode) {
if (shardRouting.relocationFailureInfo() != null && shardRouting.relocationFailureInfo().failedRelocations() > 0) {
shardsWithRelocationFailures.add(shardRouting);
try {
int failedRelocations = shardRouting.relocationFailureInfo().failedRelocations();
final var maxRetry = SETTING_ALLOCATION_MAX_RETRY.get(
allocation.metadata().indexMetadata(shardRouting.index()).getSettings()
);
if (failedRelocations >= maxRetry) {
shardsWithMaxFailedRelocations++;
if (topShardIdsWithFailedRelocations.size() <= MAX_SHARDS_IN_LOG_MSG) {
topShardIdsWithFailedRelocations.add(shardRouting.shardId());
}
}
} catch (IndexNotFoundException e) {
// ignore
}
}
}
for (ShardRouting original : shardsWithRelocationFailures) {
ShardRouting updated = original.updateRelocationFailure(RelocationFailureInfo.NO_FAILURES);
routingNode.update(original, updated);
assignedShardsRemove(original);
assignedShardsAdd(updated);
}
}
if (shardsWithMaxFailedAllocations > 0) {
logger.info(
Strings.format(RESET_FAILED_ALLOCATION_COUNTER_LOG_MSG, shardsWithMaxFailedAllocations, topShardIdsWithFailedAllocations)
);
}
if (shardsWithMaxFailedRelocations > 0) {
logger.info(
Strings.format(RESET_FAILED_RELOCATION_COUNTER_LOG_MSG, shardsWithMaxFailedRelocations, topShardIdsWithFailedRelocations)
);
}
}
/**
* Creates an iterator over shards interleaving between nodes: The iterator returns the first shard from
* the first node, then the first shard of the second node, etc. until one shard from each node has been returned.
* The iterator then resumes on the first node by returning the second shard and continues until all shards from
* all the nodes have been returned.
*/
public Iterator<ShardRouting> nodeInterleavedShardIterator() {
final Queue<Iterator<ShardRouting>> queue = new ArrayDeque<>(nodesToShards.size());
for (final var routingNode : nodesToShards.values()) {
final var shards = routingNode.copyShards();
if (shards.length > 0) {
queue.add(Iterators.forArray(shards));
}
}
return new Iterator<>() {
public boolean hasNext() {
return queue.isEmpty() == false;
}
public ShardRouting next() {
if (queue.isEmpty()) {
throw new NoSuchElementException();
}
final var nodeIterator = queue.poll();
assert nodeIterator.hasNext();
final var nextShard = nodeIterator.next();
if (nodeIterator.hasNext()) {
queue.offer(nodeIterator);
}
return nextShard;
}
};
}
private static final
|
UnassignedIterator
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/ComponentProcessorTest.java
|
{
"start": 57272,
"end": 57658
}
|
class ____ {",
" @Provides static String first() { return \"first\"; }",
"}");
Source secondModule =
CompilerTests.javaSource(
"test.SecondModule",
"package test;",
"",
"import dagger.Module;",
"import dagger.Provides;",
"",
"@Module",
"abstract
|
FirstModule
|
java
|
apache__hadoop
|
hadoop-cloud-storage-project/hadoop-tos/src/main/java/org/apache/hadoop/fs/tosfs/commit/CommitterFactory.java
|
{
"start": 1118,
"end": 1367
}
|
class ____ extends PathOutputCommitterFactory {
@Override
public PathOutputCommitter createOutputCommitter(Path outputPath,
TaskAttemptContext context) throws IOException {
return new Committer(outputPath, context);
}
}
|
CommitterFactory
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/ReturnAtTheEndOfVoidFunctionTest.java
|
{
"start": 2800,
"end": 3136
}
|
class ____ {
public void nothing() {}
}
""")
.expectUnchanged()
.doTest();
}
@Test
public void nullReturnInVoidIsUnchanged() {
helper
.addInputLines(
"Builder.java",
"""
package com.google.gporeba;
public final
|
Builder
|
java
|
apache__camel
|
components/camel-wordpress/src/main/java/org/apache/camel/component/wordpress/api/service/WordpressServicePosts.java
|
{
"start": 1067,
"end": 1361
}
|
interface ____ extends WordpressCrudService<Post, PostSearchCriteria> {
/**
* Default endpoint.
*
* @param postId
* @param context
* @param password
* @return
*/
Post retrieve(Integer postId, Context context, String password);
}
|
WordpressServicePosts
|
java
|
apache__kafka
|
streams/src/main/java/org/apache/kafka/streams/kstream/internals/graph/KTableKTableJoinNode.java
|
{
"start": 6150,
"end": 9487
}
|
class ____<K, V1, V2, VR> {
private String nodeName;
private ProcessorParameters<K, Change<V1>, ?, ?> joinThisProcessorParameters;
private ProcessorParameters<K, Change<V2>, ?, ?> joinOtherProcessorParameters;
private String thisJoinSide;
private String otherJoinSide;
private Serde<K> keySerde;
private Serde<VR> valueSerde;
private String[] joinThisStoreNames;
private String[] joinOtherStoreNames;
private ProcessorParameters<K, Change<VR>, ?, ?>
joinMergeProcessorParameters;
private KTableKTableJoinNodeBuilder() {
}
public KTableKTableJoinNodeBuilder<K, V1, V2, VR> withNodeName(final String nodeName) {
this.nodeName = nodeName;
return this;
}
public KTableKTableJoinNodeBuilder<K, V1, V2, VR> withJoinThisProcessorParameters(final ProcessorParameters<K, Change<V1>, ?, ?> joinThisProcessorParameters) {
this.joinThisProcessorParameters = joinThisProcessorParameters;
return this;
}
public KTableKTableJoinNodeBuilder<K, V1, V2, VR> withJoinOtherProcessorParameters(final ProcessorParameters<K, Change<V2>, ?, ?> joinOtherProcessorParameters) {
this.joinOtherProcessorParameters = joinOtherProcessorParameters;
return this;
}
public KTableKTableJoinNodeBuilder<K, V1, V2, VR> withThisJoinSideNodeName(final String thisJoinSide) {
this.thisJoinSide = thisJoinSide;
return this;
}
public KTableKTableJoinNodeBuilder<K, V1, V2, VR> withOtherJoinSideNodeName(final String otherJoinSide) {
this.otherJoinSide = otherJoinSide;
return this;
}
public KTableKTableJoinNodeBuilder<K, V1, V2, VR> withKeySerde(final Serde<K> keySerde) {
this.keySerde = keySerde;
return this;
}
public KTableKTableJoinNodeBuilder<K, V1, V2, VR> withValueSerde(final Serde<VR> valueSerde) {
this.valueSerde = valueSerde;
return this;
}
public KTableKTableJoinNodeBuilder<K, V1, V2, VR> withJoinThisStoreNames(final String[] joinThisStoreNames) {
this.joinThisStoreNames = joinThisStoreNames;
return this;
}
public KTableKTableJoinNodeBuilder<K, V1, V2, VR> withJoinOtherStoreNames(final String[] joinOtherStoreNames) {
this.joinOtherStoreNames = joinOtherStoreNames;
return this;
}
public KTableKTableJoinNodeBuilder<K, V1, V2, VR> withMergeProcessorParameters(final ProcessorParameters<K, Change<VR>, ?, ?> joinMergeProcessorParameters) {
this.joinMergeProcessorParameters = joinMergeProcessorParameters;
return this;
}
public KTableKTableJoinNode<K, V1, V2, VR> build() {
return new KTableKTableJoinNode<>(
nodeName,
joinThisProcessorParameters,
joinOtherProcessorParameters,
joinMergeProcessorParameters,
thisJoinSide,
otherJoinSide,
keySerde,
valueSerde,
joinThisStoreNames,
joinOtherStoreNames
);
}
}
}
|
KTableKTableJoinNodeBuilder
|
java
|
apache__spark
|
common/unsafe/src/main/java/org/apache/spark/sql/catalyst/util/CollationAwareUTF8String.java
|
{
"start": 18816,
"end": 19611
}
|
class ____
// convert the string to uppercase, which only accepts a Java strings as input.
ULocale locale = CollationFactory.fetchCollation(collationId)
.getCollator().getLocale(ULocale.ACTUAL_LOCALE);
return UTF8String.fromString(UCharacter.toUpperCase(locale, target.toValidString()));
}
/**
* Convert the input string to lowercase using the ICU root locale rules.
*
* @param target the input string
* @return the lowercase string
*/
public static UTF8String toLowerCase(final UTF8String target) {
if (target.isFullAscii()) return target.toLowerCaseAscii();
return toLowerCaseSlow(target);
}
private static UTF8String toLowerCaseSlow(final UTF8String target) {
// Note: In order to achieve the desired behavior, we use the ICU UCharacter
|
to
|
java
|
ReactiveX__RxJava
|
src/main/java/io/reactivex/rxjava3/internal/operators/observable/ObservableReplay.java
|
{
"start": 9988,
"end": 17128
}
|
class ____<T>
extends AtomicReference<Disposable>
implements Observer<T>, Disposable {
private static final long serialVersionUID = -533785617179540163L;
/** Holds notifications from upstream. */
final ReplayBuffer<T> buffer;
/** Indicates this Observer received a terminal event. */
boolean done;
/** Indicates an empty array of inner observers. */
static final InnerDisposable[] EMPTY = new InnerDisposable[0];
/** Indicates a terminated ReplayObserver. */
static final InnerDisposable[] TERMINATED = new InnerDisposable[0];
/** Tracks the subscribed observers. */
final AtomicReference<InnerDisposable[]> observers;
/**
* Atomically changed from false to true by connect to make sure the
* connection is only performed by one thread.
*/
final AtomicBoolean shouldConnect;
/** The current connection. */
final AtomicReference<ReplayObserver<T>> current;
ReplayObserver(ReplayBuffer<T> buffer, AtomicReference<ReplayObserver<T>> current) {
this.buffer = buffer;
this.current = current;
this.observers = new AtomicReference<>(EMPTY);
this.shouldConnect = new AtomicBoolean();
}
@Override
public boolean isDisposed() {
return observers.get() == TERMINATED;
}
@Override
public void dispose() {
observers.set(TERMINATED);
current.compareAndSet(ReplayObserver.this, null);
// we don't care if it fails because it means the current has
// been replaced in the meantime
DisposableHelper.dispose(this);
}
/**
* Atomically try adding a new InnerDisposable to this Observer or return false if this
* Observer was terminated.
* @param producer the producer to add
* @return true if succeeded, false otherwise
*/
boolean add(InnerDisposable<T> producer) {
// the state can change so we do a CAS loop to achieve atomicity
for (;;) {
// get the current producer array
InnerDisposable[] c = observers.get();
// if this subscriber-to-source reached a terminal state by receiving
// an onError or onComplete, just refuse to add the new producer
if (c == TERMINATED) {
return false;
}
// we perform a copy-on-write logic
int len = c.length;
InnerDisposable[] u = new InnerDisposable[len + 1];
System.arraycopy(c, 0, u, 0, len);
u[len] = producer;
// try setting the observers array
if (observers.compareAndSet(c, u)) {
return true;
}
// if failed, some other operation succeeded (another add, remove or termination)
// so retry
}
}
/**
* Atomically removes the given InnerDisposable from the observers array.
* @param producer the producer to remove
*/
void remove(InnerDisposable<T> producer) {
// the state can change so we do a CAS loop to achieve atomicity
for (;;) {
// let's read the current observers array
InnerDisposable[] c = observers.get();
int len = c.length;
// if it is either empty or terminated, there is nothing to remove so we quit
if (len == 0) {
return;
}
// let's find the supplied producer in the array
// although this is O(n), we don't expect too many child observers in general
int j = -1;
for (int i = 0; i < len; i++) {
if (c[i].equals(producer)) {
j = i;
break;
}
}
// we didn't find it so just quit
if (j < 0) {
return;
}
// we do copy-on-write logic here
InnerDisposable[] u;
// we don't create a new empty array if producer was the single inhabitant
// but rather reuse an empty array
if (len == 1) {
u = EMPTY;
} else {
// otherwise, create a new array one less in size
u = new InnerDisposable[len - 1];
// copy elements being before the given producer
System.arraycopy(c, 0, u, 0, j);
// copy elements being after the given producer
System.arraycopy(c, j + 1, u, j, len - j - 1);
}
// try setting this new array as
if (observers.compareAndSet(c, u)) {
return;
}
// if we failed, it means something else happened
// (a concurrent add/remove or termination), we need to retry
}
}
@Override
public void onSubscribe(Disposable p) {
if (DisposableHelper.setOnce(this, p)) {
replay();
}
}
@Override
public void onNext(T t) {
if (!done) {
buffer.next(t);
replay();
}
}
@Override
public void onError(Throwable e) {
// The observer front is accessed serially as required by spec so
// no need to CAS in the terminal value
if (!done) {
done = true;
buffer.error(e);
replayFinal();
} else {
RxJavaPlugins.onError(e);
}
}
@Override
public void onComplete() {
// The observer front is accessed serially as required by spec so
// no need to CAS in the terminal value
if (!done) {
done = true;
buffer.complete();
replayFinal();
}
}
/**
* Tries to replay the buffer contents to all known observers.
*/
void replay() {
@SuppressWarnings("unchecked")
InnerDisposable<T>[] a = observers.get();
for (InnerDisposable<T> rp : a) {
buffer.replay(rp);
}
}
/**
* Tries to replay the buffer contents to all known observers.
*/
void replayFinal() {
@SuppressWarnings("unchecked")
InnerDisposable<T>[] a = observers.getAndSet(TERMINATED);
for (InnerDisposable<T> rp : a) {
buffer.replay(rp);
}
}
}
/**
* A Disposable that manages the disposed state of a
* child Observer in thread-safe manner.
* @param <T> the value type
*/
static final
|
ReplayObserver
|
java
|
apache__rocketmq
|
client/src/main/java/org/apache/rocketmq/client/trace/hook/ConsumeMessageTraceHookImpl.java
|
{
"start": 1548,
"end": 5248
}
|
class ____ implements ConsumeMessageHook {
private TraceDispatcher localDispatcher;
public ConsumeMessageTraceHookImpl(TraceDispatcher localDispatcher) {
this.localDispatcher = localDispatcher;
}
@Override
public String hookName() {
return "ConsumeMessageTraceHook";
}
@Override
public void consumeMessageBefore(ConsumeMessageContext context) {
if (context == null || context.getMsgList() == null || context.getMsgList().isEmpty()) {
return;
}
TraceContext traceContext = new TraceContext();
context.setMqTraceContext(traceContext);
traceContext.setTraceType(TraceType.SubBefore);
traceContext.setGroupName(NamespaceUtil.withoutNamespace(context.getConsumerGroup()));
List<TraceBean> beans = new ArrayList<>();
for (MessageExt msg : context.getMsgList()) {
if (msg == null) {
continue;
}
String regionId = msg.getProperty(MessageConst.PROPERTY_MSG_REGION);
String traceOn = msg.getProperty(MessageConst.PROPERTY_TRACE_SWITCH);
if (traceOn != null && traceOn.equals("false")) {
// If trace switch is false ,skip it
continue;
}
TraceBean traceBean = new TraceBean();
traceBean.setTopic(NamespaceUtil.withoutNamespace(msg.getTopic()));
traceBean.setMsgId(msg.getMsgId());
traceBean.setTags(msg.getTags());
traceBean.setKeys(msg.getKeys());
traceBean.setStoreTime(msg.getStoreTimestamp());
traceBean.setBodyLength(msg.getStoreSize());
traceBean.setRetryTimes(msg.getReconsumeTimes());
traceContext.setRegionId(regionId);
beans.add(traceBean);
}
if (beans.size() > 0) {
traceContext.setTraceBeans(beans);
traceContext.setTimeStamp(System.currentTimeMillis());
localDispatcher.append(traceContext);
}
}
@Override
public void consumeMessageAfter(ConsumeMessageContext context) {
if (context == null || context.getMsgList() == null || context.getMsgList().isEmpty()) {
return;
}
TraceContext subBeforeContext = (TraceContext) context.getMqTraceContext();
if (subBeforeContext.getTraceBeans() == null || subBeforeContext.getTraceBeans().size() < 1) {
// If subBefore bean is null ,skip it
return;
}
TraceContext subAfterContext = new TraceContext();
subAfterContext.setTraceType(TraceType.SubAfter);
subAfterContext.setRegionId(subBeforeContext.getRegionId());
subAfterContext.setGroupName(NamespaceUtil.withoutNamespace(subBeforeContext.getGroupName()));
subAfterContext.setRequestId(subBeforeContext.getRequestId());
subAfterContext.setAccessChannel(context.getAccessChannel());
subAfterContext.setSuccess(context.isSuccess());
// Calculate the cost time for processing messages
int costTime = (int) ((System.currentTimeMillis() - subBeforeContext.getTimeStamp()) / context.getMsgList().size());
subAfterContext.setCostTime(costTime);
subAfterContext.setTraceBeans(subBeforeContext.getTraceBeans());
Map<String, String> props = context.getProps();
if (props != null) {
String contextType = props.get(MixAll.CONSUME_CONTEXT_TYPE);
if (contextType != null) {
subAfterContext.setContextCode(ConsumeReturnType.valueOf(contextType).ordinal());
}
}
localDispatcher.append(subAfterContext);
}
}
|
ConsumeMessageTraceHookImpl
|
java
|
spring-projects__spring-boot
|
module/spring-boot-webmvc-test/src/test/java/org/springframework/boot/webmvc/test/autoconfigure/mockmvc/ExampleWebMvcConfigurer.java
|
{
"start": 1375,
"end": 2013
}
|
class ____ implements WebMvcConfigurer {
@Override
public void addArgumentResolvers(List<HandlerMethodArgumentResolver> argumentResolvers) {
argumentResolvers.add(new HandlerMethodArgumentResolver() {
@Override
public boolean supportsParameter(MethodParameter parameter) {
return parameter.getParameterType().equals(ExampleArgument.class);
}
@Override
public Object resolveArgument(MethodParameter parameter, ModelAndViewContainer mavContainer,
NativeWebRequest webRequest, WebDataBinderFactory binderFactory) throws Exception {
return new ExampleArgument("hello");
}
});
}
}
|
ExampleWebMvcConfigurer
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/test/entity/case1/LongObject_100_Entity.java
|
{
"start": 52,
"end": 14216
}
|
class ____ {
private Long f0;
private Long f1;
private Long f2;
private Long f3;
private Long f4;
private Long f5;
private Long f6;
private Long f7;
private Long f8;
private Long f9;
private Long f10;
private Long f11;
private Long f12;
private Long f13;
private Long f14;
private Long f15;
private Long f16;
private Long f17;
private Long f18;
private Long f19;
private Long f20;
private Long f21;
private Long f22;
private Long f23;
private Long f24;
private Long f25;
private Long f26;
private Long f27;
private Long f28;
private Long f29;
private Long f30;
private Long f31;
private Long f32;
private Long f33;
private Long f34;
private Long f35;
private Long f36;
private Long f37;
private Long f38;
private Long f39;
private Long f40;
private Long f41;
private Long f42;
private Long f43;
private Long f44;
private Long f45;
private Long f46;
private Long f47;
private Long f48;
private Long f49;
private Long f50;
private Long f51;
private Long f52;
private Long f53;
private Long f54;
private Long f55;
private Long f56;
private Long f57;
private Long f58;
private Long f59;
private Long f60;
private Long f61;
private Long f62;
private Long f63;
private Long f64;
private Long f65;
private Long f66;
private Long f67;
private Long f68;
private Long f69;
private Long f70;
private Long f71;
private Long f72;
private Long f73;
private Long f74;
private Long f75;
private Long f76;
private Long f77;
private Long f78;
private Long f79;
private Long f80;
private Long f81;
private Long f82;
private Long f83;
private Long f84;
private Long f85;
private Long f86;
private Long f87;
private Long f88;
private Long f89;
private Long f90;
private Long f91;
private Long f92;
private Long f93;
private Long f94;
private Long f95;
private Long f96;
private Long f97;
private Long f98;
private Long f99;
public Long getF0() {
return f0;
}
public void setF0(Long f0) {
this.f0 = f0;
}
public Long getF1() {
return f1;
}
public void setF1(Long f1) {
this.f1 = f1;
}
public Long getF2() {
return f2;
}
public void setF2(Long f2) {
this.f2 = f2;
}
public Long getF3() {
return f3;
}
public void setF3(Long f3) {
this.f3 = f3;
}
public Long getF4() {
return f4;
}
public void setF4(Long f4) {
this.f4 = f4;
}
public Long getF5() {
return f5;
}
public void setF5(Long f5) {
this.f5 = f5;
}
public Long getF6() {
return f6;
}
public void setF6(Long f6) {
this.f6 = f6;
}
public Long getF7() {
return f7;
}
public void setF7(Long f7) {
this.f7 = f7;
}
public Long getF8() {
return f8;
}
public void setF8(Long f8) {
this.f8 = f8;
}
public Long getF9() {
return f9;
}
public void setF9(Long f9) {
this.f9 = f9;
}
public Long getF10() {
return f10;
}
public void setF10(Long f10) {
this.f10 = f10;
}
public Long getF11() {
return f11;
}
public void setF11(Long f11) {
this.f11 = f11;
}
public Long getF12() {
return f12;
}
public void setF12(Long f12) {
this.f12 = f12;
}
public Long getF13() {
return f13;
}
public void setF13(Long f13) {
this.f13 = f13;
}
public Long getF14() {
return f14;
}
public void setF14(Long f14) {
this.f14 = f14;
}
public Long getF15() {
return f15;
}
public void setF15(Long f15) {
this.f15 = f15;
}
public Long getF16() {
return f16;
}
public void setF16(Long f16) {
this.f16 = f16;
}
public Long getF17() {
return f17;
}
public void setF17(Long f17) {
this.f17 = f17;
}
public Long getF18() {
return f18;
}
public void setF18(Long f18) {
this.f18 = f18;
}
public Long getF19() {
return f19;
}
public void setF19(Long f19) {
this.f19 = f19;
}
public Long getF20() {
return f20;
}
public void setF20(Long f20) {
this.f20 = f20;
}
public Long getF21() {
return f21;
}
public void setF21(Long f21) {
this.f21 = f21;
}
public Long getF22() {
return f22;
}
public void setF22(Long f22) {
this.f22 = f22;
}
public Long getF23() {
return f23;
}
public void setF23(Long f23) {
this.f23 = f23;
}
public Long getF24() {
return f24;
}
public void setF24(Long f24) {
this.f24 = f24;
}
public Long getF25() {
return f25;
}
public void setF25(Long f25) {
this.f25 = f25;
}
public Long getF26() {
return f26;
}
public void setF26(Long f26) {
this.f26 = f26;
}
public Long getF27() {
return f27;
}
public void setF27(Long f27) {
this.f27 = f27;
}
public Long getF28() {
return f28;
}
public void setF28(Long f28) {
this.f28 = f28;
}
public Long getF29() {
return f29;
}
public void setF29(Long f29) {
this.f29 = f29;
}
public Long getF30() {
return f30;
}
public void setF30(Long f30) {
this.f30 = f30;
}
public Long getF31() {
return f31;
}
public void setF31(Long f31) {
this.f31 = f31;
}
public Long getF32() {
return f32;
}
public void setF32(Long f32) {
this.f32 = f32;
}
public Long getF33() {
return f33;
}
public void setF33(Long f33) {
this.f33 = f33;
}
public Long getF34() {
return f34;
}
public void setF34(Long f34) {
this.f34 = f34;
}
public Long getF35() {
return f35;
}
public void setF35(Long f35) {
this.f35 = f35;
}
public Long getF36() {
return f36;
}
public void setF36(Long f36) {
this.f36 = f36;
}
public Long getF37() {
return f37;
}
public void setF37(Long f37) {
this.f37 = f37;
}
public Long getF38() {
return f38;
}
public void setF38(Long f38) {
this.f38 = f38;
}
public Long getF39() {
return f39;
}
public void setF39(Long f39) {
this.f39 = f39;
}
public Long getF40() {
return f40;
}
public void setF40(Long f40) {
this.f40 = f40;
}
public Long getF41() {
return f41;
}
public void setF41(Long f41) {
this.f41 = f41;
}
public Long getF42() {
return f42;
}
public void setF42(Long f42) {
this.f42 = f42;
}
public Long getF43() {
return f43;
}
public void setF43(Long f43) {
this.f43 = f43;
}
public Long getF44() {
return f44;
}
public void setF44(Long f44) {
this.f44 = f44;
}
public Long getF45() {
return f45;
}
public void setF45(Long f45) {
this.f45 = f45;
}
public Long getF46() {
return f46;
}
public void setF46(Long f46) {
this.f46 = f46;
}
public Long getF47() {
return f47;
}
public void setF47(Long f47) {
this.f47 = f47;
}
public Long getF48() {
return f48;
}
public void setF48(Long f48) {
this.f48 = f48;
}
public Long getF49() {
return f49;
}
public void setF49(Long f49) {
this.f49 = f49;
}
public Long getF50() {
return f50;
}
public void setF50(Long f50) {
this.f50 = f50;
}
public Long getF51() {
return f51;
}
public void setF51(Long f51) {
this.f51 = f51;
}
public Long getF52() {
return f52;
}
public void setF52(Long f52) {
this.f52 = f52;
}
public Long getF53() {
return f53;
}
public void setF53(Long f53) {
this.f53 = f53;
}
public Long getF54() {
return f54;
}
public void setF54(Long f54) {
this.f54 = f54;
}
public Long getF55() {
return f55;
}
public void setF55(Long f55) {
this.f55 = f55;
}
public Long getF56() {
return f56;
}
public void setF56(Long f56) {
this.f56 = f56;
}
public Long getF57() {
return f57;
}
public void setF57(Long f57) {
this.f57 = f57;
}
public Long getF58() {
return f58;
}
public void setF58(Long f58) {
this.f58 = f58;
}
public Long getF59() {
return f59;
}
public void setF59(Long f59) {
this.f59 = f59;
}
public Long getF60() {
return f60;
}
public void setF60(Long f60) {
this.f60 = f60;
}
public Long getF61() {
return f61;
}
public void setF61(Long f61) {
this.f61 = f61;
}
public Long getF62() {
return f62;
}
public void setF62(Long f62) {
this.f62 = f62;
}
public Long getF63() {
return f63;
}
public void setF63(Long f63) {
this.f63 = f63;
}
public Long getF64() {
return f64;
}
public void setF64(Long f64) {
this.f64 = f64;
}
public Long getF65() {
return f65;
}
public void setF65(Long f65) {
this.f65 = f65;
}
public Long getF66() {
return f66;
}
public void setF66(Long f66) {
this.f66 = f66;
}
public Long getF67() {
return f67;
}
public void setF67(Long f67) {
this.f67 = f67;
}
public Long getF68() {
return f68;
}
public void setF68(Long f68) {
this.f68 = f68;
}
public Long getF69() {
return f69;
}
public void setF69(Long f69) {
this.f69 = f69;
}
public Long getF70() {
return f70;
}
public void setF70(Long f70) {
this.f70 = f70;
}
public Long getF71() {
return f71;
}
public void setF71(Long f71) {
this.f71 = f71;
}
public Long getF72() {
return f72;
}
public void setF72(Long f72) {
this.f72 = f72;
}
public Long getF73() {
return f73;
}
public void setF73(Long f73) {
this.f73 = f73;
}
public Long getF74() {
return f74;
}
public void setF74(Long f74) {
this.f74 = f74;
}
public Long getF75() {
return f75;
}
public void setF75(Long f75) {
this.f75 = f75;
}
public Long getF76() {
return f76;
}
public void setF76(Long f76) {
this.f76 = f76;
}
public Long getF77() {
return f77;
}
public void setF77(Long f77) {
this.f77 = f77;
}
public Long getF78() {
return f78;
}
public void setF78(Long f78) {
this.f78 = f78;
}
public Long getF79() {
return f79;
}
public void setF79(Long f79) {
this.f79 = f79;
}
public Long getF80() {
return f80;
}
public void setF80(Long f80) {
this.f80 = f80;
}
public Long getF81() {
return f81;
}
public void setF81(Long f81) {
this.f81 = f81;
}
public Long getF82() {
return f82;
}
public void setF82(Long f82) {
this.f82 = f82;
}
public Long getF83() {
return f83;
}
public void setF83(Long f83) {
this.f83 = f83;
}
public Long getF84() {
return f84;
}
public void setF84(Long f84) {
this.f84 = f84;
}
public Long getF85() {
return f85;
}
public void setF85(Long f85) {
this.f85 = f85;
}
public Long getF86() {
return f86;
}
public void setF86(Long f86) {
this.f86 = f86;
}
public Long getF87() {
return f87;
}
public void setF87(Long f87) {
this.f87 = f87;
}
public Long getF88() {
return f88;
}
public void setF88(Long f88) {
this.f88 = f88;
}
public Long getF89() {
return f89;
}
public void setF89(Long f89) {
this.f89 = f89;
}
public Long getF90() {
return f90;
}
public void setF90(Long f90) {
this.f90 = f90;
}
public Long getF91() {
return f91;
}
public void setF91(Long f91) {
this.f91 = f91;
}
public Long getF92() {
return f92;
}
public void setF92(Long f92) {
this.f92 = f92;
}
public Long getF93() {
return f93;
}
public void setF93(Long f93) {
this.f93 = f93;
}
public Long getF94() {
return f94;
}
public void setF94(Long f94) {
this.f94 = f94;
}
public Long getF95() {
return f95;
}
public void setF95(Long f95) {
this.f95 = f95;
}
public Long getF96() {
return f96;
}
public void setF96(Long f96) {
this.f96 = f96;
}
public Long getF97() {
return f97;
}
public void setF97(Long f97) {
this.f97 = f97;
}
public Long getF98() {
return f98;
}
public void setF98(Long f98) {
this.f98 = f98;
}
public Long getF99() {
return f99;
}
public void setF99(Long f99) {
this.f99 = f99;
}
}
|
LongObject_100_Entity
|
java
|
elastic__elasticsearch
|
x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/client/AmazonBedrockMockExecuteRequestSender.java
|
{
"start": 1092,
"end": 2911
}
|
class ____ extends AmazonBedrockExecuteOnlyRequestSender {
private Queue<Object> results = new ConcurrentLinkedQueue<>();
private Queue<List<String>> inputs = new ConcurrentLinkedQueue<>();
private int sendCounter = 0;
public AmazonBedrockMockExecuteRequestSender(AmazonBedrockClientCache clientCache, ThrottlerManager throttlerManager) {
super(clientCache, throttlerManager);
}
public void enqueue(Object result) {
results.add(result);
}
public int sendCount() {
return sendCounter;
}
public List<String> getInputs() {
return inputs.remove();
}
@Override
protected AmazonBedrockExecutor createExecutor(
AmazonBedrockRequest awsRequest,
AmazonBedrockResponseHandler awsResponse,
Logger logger,
Supplier<Boolean> hasRequestTimedOutFunction,
ActionListener<InferenceServiceResults> listener
) {
setCacheResult();
return super.createExecutor(awsRequest, awsResponse, logger, hasRequestTimedOutFunction, listener);
}
private void setCacheResult() {
var mockCache = (AmazonBedrockMockClientCache) this.clientCache;
var result = results.remove();
if (result instanceof ConverseResponse converseResponse) {
mockCache.setConverseResponse(converseResponse);
return;
}
if (result instanceof InvokeModelResponse invokeModelResponse) {
mockCache.setInvokeModelResponse(invokeModelResponse);
return;
}
if (result instanceof ElasticsearchException exception) {
mockCache.setExceptionToThrow(exception);
return;
}
throw new RuntimeException("Unknown result type: " + result.getClass());
}
}
|
AmazonBedrockMockExecuteRequestSender
|
java
|
hibernate__hibernate-orm
|
hibernate-envers/src/main/java/org/hibernate/envers/internal/entities/IdMappingData.java
|
{
"start": 409,
"end": 916
}
|
class ____ {
private final IdMapper idMapper;
private final Identifier identifier;
private final IdentifierRelation relation;
public IdMappingData(IdMapper mapper, Identifier identifier, IdentifierRelation relation) {
this.idMapper = mapper;
this.identifier = identifier;
this.relation = relation;
}
public IdMapper getIdMapper() {
return idMapper;
}
public Identifier getIdentifier() {
return identifier;
}
public IdentifierRelation getRelation() {
return relation;
}
}
|
IdMappingData
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorConfigurationAction.java
|
{
"start": 1340,
"end": 1667
}
|
class ____ {
public static final String NAME = "cluster:admin/xpack/connector/update_configuration";
public static final ActionType<ConnectorUpdateActionResponse> INSTANCE = new ActionType<>(NAME);
private UpdateConnectorConfigurationAction() {/* no instances */}
public static
|
UpdateConnectorConfigurationAction
|
java
|
spring-projects__spring-framework
|
spring-test/src/main/java/org/springframework/test/context/support/AnnotationConfigContextLoaderUtils.java
|
{
"start": 1395,
"end": 1636
}
|
class ____ {
private static final Log logger = LogFactory.getLog(AnnotationConfigContextLoaderUtils.class);
/**
* Detect the default configuration classes for the supplied test class.
* <p>The returned
|
AnnotationConfigContextLoaderUtils
|
java
|
ReactiveX__RxJava
|
src/test/java/io/reactivex/rxjava3/subscribers/SerializedSubscriberTest.java
|
{
"start": 18913,
"end": 20614
}
|
class ____ implements Runnable {
private final CountDownLatch latch;
private final Subscriber<String> subscriber;
private final int numStringsToSend;
final AtomicInteger produced;
private final CountDownLatch running;
OnNextThread(Subscriber<String> subscriber, int numStringsToSend, CountDownLatch latch, CountDownLatch running) {
this(subscriber, numStringsToSend, new AtomicInteger(), latch, running);
}
OnNextThread(Subscriber<String> subscriber, int numStringsToSend, AtomicInteger produced) {
this(subscriber, numStringsToSend, produced, null, null);
}
OnNextThread(Subscriber<String> subscriber, int numStringsToSend, AtomicInteger produced, CountDownLatch latch, CountDownLatch running) {
this.subscriber = subscriber;
this.numStringsToSend = numStringsToSend;
this.produced = produced;
this.latch = latch;
this.running = running;
}
OnNextThread(Subscriber<String> subscriber, int numStringsToSend) {
this(subscriber, numStringsToSend, new AtomicInteger());
}
@Override
public void run() {
if (running != null) {
running.countDown();
}
for (int i = 0; i < numStringsToSend; i++) {
subscriber.onNext(Thread.currentThread().getId() + "-" + i);
if (latch != null) {
latch.countDown();
}
produced.incrementAndGet();
}
}
}
/**
* A thread that will call onError or onNext.
*/
public static
|
OnNextThread
|
java
|
quarkusio__quarkus
|
independent-projects/resteasy-reactive/server/vertx/src/test/java/org/jboss/resteasy/reactive/server/vertx/test/resource/basic/SpecialResourceTest.java
|
{
"start": 1732,
"end": 5189
}
|
class ____ {
static Client client;
@RegisterExtension
static ResteasyReactiveUnitTest testExtension = new ResteasyReactiveUnitTest()
.setArchiveProducer(new Supplier<>() {
@Override
public JavaArchive get() {
JavaArchive war = ShrinkWrap.create(JavaArchive.class);
war.addClasses(SpecialResourceStreamResource.class, SpecialResourceApiResource.class,
PortProviderUtil.class,
SpecialResourceDeleteResource.class);
return war;
}
});
@BeforeAll
public static void init() {
client = ClientBuilder.newClient();
}
@AfterAll
public static void close() {
client.close();
client = null;
}
private String generateURL(String path) {
return PortProviderUtil.generateURL(path, SpecialResourceTest.class.getSimpleName());
}
/**
* @tpTestDetails Regression test for RESTEASY-631
* @tpSince RESTEasy 3.0.16
*/
@Test
@DisplayName("Test 631")
public void test631() throws Exception {
WebTarget base = client.target(generateURL("/delete"));
Response response = base.request().method("DELETE", Entity.entity("hello", "text/plain"));
Assertions.assertEquals(Response.Status.NO_CONTENT.getStatusCode(), response.getStatus());
response.close();
}
/**
* @tpTestDetails Regression test for RESTEASY-534
* @tpSince RESTEasy 3.0.16
*/
@Test
@DisplayName("Test 534")
public void test534() throws Exception {
WebTarget base = client.target(generateURL("/inputstream/test/json"));
Response response = base.request().post(Entity.entity("hello world".getBytes(), MediaType.APPLICATION_OCTET_STREAM));
Assertions.assertEquals(Response.Status.NO_CONTENT.getStatusCode(), response.getStatus());
response.close();
}
/**
* @tpTestDetails Regression test for RESTEASY-624
* @tpSince RESTEasy 3.0.16
*/
@Test
@DisplayName("Test 624")
public void test624() throws Exception {
WebTarget base = client.target(generateURL("/ApI/FuNc"));
Response response = base.request().get();
Assertions.assertEquals(Response.Status.OK.getStatusCode(), response.getStatus());
response.close();
}
/**
* @tpTestDetails Regression test for RESTEASY-583
* @tpSince RESTEasy 3.0.16
*/
@Test
@DisplayName("Test 583")
public void test583() throws Exception {
HttpClient client = HttpClientBuilder.create().build();
HttpPut method = new HttpPut(generateURL("/api"));
HttpResponse response = null;
try {
method.setEntity(
new StringEntity("hello", ContentType.create("vnd.net.juniper.space.target-management.targets+xml")));
response = client.execute(method);
Assertions.assertEquals(response.getStatusLine().getStatusCode(), Response.Status.BAD_REQUEST.getStatusCode());
} catch (IOException e) {
throw new RuntimeException(e);
} finally {
try {
if (response != null) {
EntityUtils.consume(response.getEntity());
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
|
SpecialResourceTest
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/streaming/runtime/io/checkpointing/UpstreamRecoveryTracker.java
|
{
"start": 1939,
"end": 3137
}
|
class ____ implements UpstreamRecoveryTracker {
private final HashSet<InputChannelInfo> restoredChannels;
private int numUnrestoredChannels;
private final InputGate inputGate;
UpstreamRecoveryTrackerImpl(InputGate inputGate) {
this.restoredChannels = new HashSet<>();
this.numUnrestoredChannels = inputGate.getNumberOfInputChannels();
this.inputGate = inputGate;
}
@Override
public void handleEndOfRecovery(InputChannelInfo channelInfo) throws IOException {
if (numUnrestoredChannels > 0) {
Preconditions.checkState(
!restoredChannels.contains(channelInfo), "already restored: %s", channelInfo);
restoredChannels.add(channelInfo);
numUnrestoredChannels--;
if (numUnrestoredChannels == 0) {
for (InputChannelInfo inputChannelInfo : inputGate.getChannelInfos()) {
inputGate.resumeConsumption(inputChannelInfo);
}
restoredChannels.clear();
}
}
}
@Override
public boolean allChannelsRecovered() {
return numUnrestoredChannels == 0;
}
}
|
UpstreamRecoveryTrackerImpl
|
java
|
assertj__assertj-core
|
assertj-core/src/main/java/org/assertj/core/error/ShouldNotContain.java
|
{
"start": 1195,
"end": 3939
}
|
class ____ extends BasicErrorMessageFactory {
/**
* Creates a new <code>{@link ShouldNotContain}</code>.
* @param actual the actual value in the failed assertion.
* @param expected values expected not to be contained in {@code actual}.
* @param found the values in {@code expected} found in {@code actual}.
* @param comparisonStrategy the {@link ComparisonStrategy} used to evaluate assertion.
* @return the created {@code ErrorMessageFactory}.
*/
public static ErrorMessageFactory shouldNotContain(Object actual, Object expected, Object found,
ComparisonStrategy comparisonStrategy) {
return new ShouldNotContain(actual, expected, found, comparisonStrategy);
}
/**
* Creates a new <code>{@link ShouldNotContain}</code>.
* @param actual the actual value in the failed assertion.
* @param expected values expected not to be contained in {@code actual}.
* @param found the values in {@code expected} found in {@code actual}.
* @return the created {@code ErrorMessageFactory}.
*/
public static ErrorMessageFactory shouldNotContain(Object actual, Object expected, Object found) {
return new ShouldNotContain(actual, expected, found, StandardComparisonStrategy.instance());
}
private ShouldNotContain(Object actual, Object expected, Object found, ComparisonStrategy comparisonStrategy) {
super("%nExpecting%n %s%nnot to contain%n %s%nbut found%n %s%n%s", actual, expected, found, comparisonStrategy);
}
public static ErrorMessageFactory directoryShouldNotContain(File actual, List<File> matchingContent, String filterDescription) {
return new ShouldNotContain(actual, toFileNames(matchingContent), filterDescription);
}
private static List<String> toFileNames(List<File> files) {
return files.stream()
.map(File::getName)
.collect(toList());
}
public static ErrorMessageFactory directoryShouldNotContain(Path actual, List<Path> matchingContent, String filterDescription) {
return new ShouldNotContain(actual, toPathNames(matchingContent), filterDescription);
}
private static List<String> toPathNames(List<Path> files) {
return files.stream()
.map(Path::toString)
.collect(toList());
}
private ShouldNotContain(Object actual, List<String> matchingContent, String filterDescription) {
// not passing matchingContent and filterDescription as parameter to avoid AssertJ default String formatting
super("%nExpecting directory:%n" +
" %s%n" +
"not to contain any files matching " + filterDescription + " but found some:%n" +
" " + matchingContent,
actual);
}
}
|
ShouldNotContain
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
|
{
"start": 159538,
"end": 160684
}
|
class ____ implements Runnable {
private long lastExecuted = 0;
@Override
public synchronized void run() {
long currentTime = Time.monotonicNow();
if (lastExecuted == 0) {
lastExecuted = currentTime - metricsUpdaterInterval;
}
long currentTotalRequests = totalRequests.sum();
long totalRequestsDiff = currentTotalRequests - lastSeenTotalRequests;
lastSeenTotalRequests = currentTotalRequests;
if ((currentTime - lastExecuted) > 0) {
double totalRequestsPerSecInDouble =
(double) totalRequestsDiff / TimeUnit.MILLISECONDS.toSeconds(
currentTime - lastExecuted);
totalRequestsPerSecond = ((long) totalRequestsPerSecInDouble);
}
lastExecuted = currentTime;
}
}
@VisibleForTesting
CallQueueManager<Call> getCallQueue() {
return callQueue;
}
@VisibleForTesting
void setCallQueue(CallQueueManager<Call> callQueue) {
this.callQueue = callQueue;
}
@VisibleForTesting
void setRpcRequestClass(Class<? extends Writable> rpcRequestClass) {
this.rpcRequestClass = rpcRequestClass;
}
}
|
MetricsUpdateRunner
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestComparators.java
|
{
"start": 3771,
"end": 4300
}
|
class ____
implements Mapper<WritableComparable, Writable,
WritableComparable, Writable> {
public void configure(JobConf job) {
}
public void map(WritableComparable key, Writable value,
OutputCollector<WritableComparable, Writable> out,
Reporter reporter) throws IOException {
out.collect(key, value);
}
public void close() {
}
}
/**
* Checks whether keys are in ascending order.
*/
static
|
IdentityMapper
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/streaming/runtime/operators/sink/committables/CheckpointCommittableManagerImplTest.java
|
{
"start": 1633,
"end": 6067
}
|
class ____ {
private static final SinkCommitterMetricGroup METRIC_GROUP =
MetricsGroupTestUtils.mockCommitterMetricGroup();
private static final int MAX_RETRIES = 1;
@Test
void testAddSummary() {
final CheckpointCommittableManagerImpl<Integer> checkpointCommittables =
new CheckpointCommittableManagerImpl<>(new HashMap<>(), 1, 1L, METRIC_GROUP);
assertThat(checkpointCommittables.getSubtaskCommittableManagers()).isEmpty();
final CommittableSummary<Integer> first = new CommittableSummary<>(1, 1, 1L, 1, 0);
checkpointCommittables.addSummary(first);
assertThat(checkpointCommittables.getSubtaskCommittableManagers())
.singleElement()
.returns(1, SubtaskCommittableManager::getSubtaskId)
.returns(1L, SubtaskCommittableManager::getCheckpointId);
// Add different subtask id
final CommittableSummary<Integer> third = new CommittableSummary<>(2, 1, 2L, 2, 1);
checkpointCommittables.addSummary(third);
assertThat(checkpointCommittables.getSubtaskCommittableManagers()).hasSize(2);
}
@Test
void testCommit() throws IOException, InterruptedException {
final CheckpointCommittableManagerImpl<Integer> checkpointCommittables =
new CheckpointCommittableManagerImpl<>(new HashMap<>(), 1, 1L, METRIC_GROUP);
checkpointCommittables.addSummary(new CommittableSummary<>(1, 1, 1L, 1, 0));
checkpointCommittables.addSummary(new CommittableSummary<>(2, 1, 1L, 2, 0));
checkpointCommittables.addCommittable(new CommittableWithLineage<>(3, 1L, 1));
checkpointCommittables.addCommittable(new CommittableWithLineage<>(4, 1L, 2));
final Committer<Integer> committer = new NoOpCommitter();
// Only commit fully received committables
assertThatCode(() -> checkpointCommittables.commit(committer, MAX_RETRIES))
.hasMessageContaining("Trying to commit incomplete batch of committables");
// Even on retry
assertThatCode(() -> checkpointCommittables.commit(committer, MAX_RETRIES))
.hasMessageContaining("Trying to commit incomplete batch of committables");
// Add missing committable
checkpointCommittables.addCommittable(new CommittableWithLineage<>(5, 1L, 2));
// Commit all committables
assertThatCode(() -> checkpointCommittables.commit(committer, MAX_RETRIES))
.doesNotThrowAnyException();
assertThat(checkpointCommittables.getSuccessfulCommittables())
.hasSize(3)
.containsExactlyInAnyOrder(3, 4, 5);
}
@Test
void testUpdateCommittableSummary() {
final CheckpointCommittableManagerImpl<Integer> checkpointCommittables =
new CheckpointCommittableManagerImpl<>(new HashMap<>(), 1, 1L, METRIC_GROUP);
checkpointCommittables.addSummary(new CommittableSummary<>(1, 1, 1L, 1, 0));
assertThatThrownBy(
() ->
checkpointCommittables.addSummary(
new CommittableSummary<>(1, 1, 1L, 2, 0)))
.isInstanceOf(UnsupportedOperationException.class)
.hasMessageContaining("FLINK-25920");
}
// check different values of subtaskId and numberOfSubtasks to make sure that no value is
// hardcoded.
@ParameterizedTest(name = "subtaskId = {0}, numberOfSubtasks = {1}, checkpointId = {2}")
@CsvSource({"1, 10, 100", "2, 20, 200", "3, 30, 300"})
public void testCopy(int subtaskId, int numberOfSubtasks, long checkpointId) {
final CheckpointCommittableManagerImpl<Integer> original =
new CheckpointCommittableManagerImpl<>(
new HashMap<>(), numberOfSubtasks, checkpointId, METRIC_GROUP);
original.addSummary(
new CommittableSummary<>(subtaskId, numberOfSubtasks, checkpointId, 1, 0));
CheckpointCommittableManagerImpl<Integer> copy = original.copy();
assertThat(copy.getCheckpointId()).isEqualTo(checkpointId);
assertThat(copy)
.returns(numberOfSubtasks, CheckpointCommittableManagerImpl::getNumberOfSubtasks)
.returns(checkpointId, CheckpointCommittableManagerImpl::getCheckpointId);
}
private static
|
CheckpointCommittableManagerImplTest
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/node/NotANumberConversionTest.java
|
{
"start": 253,
"end": 1531
}
|
class ____ extends DatabindTestUtil
{
private final ObjectMapper MAPPER = jsonMapperBuilder()
.enable(DeserializationFeature.USE_BIG_DECIMAL_FOR_FLOATS)
.build();
@Test
public void testBigDecimalWithNaN() throws Exception
{
JsonNode tree = MAPPER.valueToTree(new DoubleWrapper(Double.NaN));
assertNotNull(tree);
String json = MAPPER.writeValueAsString(tree);
assertNotNull(json);
tree = MAPPER.valueToTree(new DoubleWrapper(Double.NEGATIVE_INFINITY));
assertNotNull(tree);
json = MAPPER.writeValueAsString(tree);
assertNotNull(json);
tree = MAPPER.valueToTree(new DoubleWrapper(Double.POSITIVE_INFINITY));
assertNotNull(tree);
json = MAPPER.writeValueAsString(tree);
assertNotNull(json);
}
// for [databind#1315]: no accidental coercion to DoubleNode
@Test
public void testBigDecimalWithoutNaN() throws Exception
{
BigDecimal input = new BigDecimal(Double.MIN_VALUE).divide(new BigDecimal(10L));
JsonNode tree = MAPPER.readTree(input.toString());
assertTrue(tree.isBigDecimal());
BigDecimal output = tree.decimalValue();
assertEquals(input, output);
}
}
|
NotANumberConversionTest
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/HAServiceProtocolPB.java
|
{
"start": 1502,
"end": 1680
}
|
interface ____ extends
HAServiceProtocolService.BlockingInterface, VersionedProtocol {
/**
* If any methods need annotation, it can be added here
*/
}
|
HAServiceProtocolPB
|
java
|
apache__flink
|
flink-table/flink-table-runtime/src/test/java/org/apache/flink/table/data/DataFormatConvertersTest.java
|
{
"start": 3003,
"end": 10301
}
|
class ____ {
private TypeInformation[] simpleTypes =
new TypeInformation[] {
BasicTypeInfo.STRING_TYPE_INFO,
BasicTypeInfo.BOOLEAN_TYPE_INFO,
BasicTypeInfo.INT_TYPE_INFO,
BasicTypeInfo.LONG_TYPE_INFO,
BasicTypeInfo.FLOAT_TYPE_INFO,
BasicTypeInfo.DOUBLE_TYPE_INFO,
BasicTypeInfo.SHORT_TYPE_INFO,
BasicTypeInfo.BYTE_TYPE_INFO,
BasicTypeInfo.CHAR_TYPE_INFO,
PrimitiveArrayTypeInfo.BOOLEAN_PRIMITIVE_ARRAY_TYPE_INFO,
PrimitiveArrayTypeInfo.INT_PRIMITIVE_ARRAY_TYPE_INFO,
PrimitiveArrayTypeInfo.LONG_PRIMITIVE_ARRAY_TYPE_INFO,
PrimitiveArrayTypeInfo.FLOAT_PRIMITIVE_ARRAY_TYPE_INFO,
PrimitiveArrayTypeInfo.DOUBLE_PRIMITIVE_ARRAY_TYPE_INFO,
PrimitiveArrayTypeInfo.SHORT_PRIMITIVE_ARRAY_TYPE_INFO,
PrimitiveArrayTypeInfo.BYTE_PRIMITIVE_ARRAY_TYPE_INFO,
PrimitiveArrayTypeInfo.CHAR_PRIMITIVE_ARRAY_TYPE_INFO,
LocalTimeTypeInfo.LOCAL_DATE,
LocalTimeTypeInfo.LOCAL_TIME,
LocalTimeTypeInfo.LOCAL_DATE_TIME,
StringDataTypeInfo.INSTANCE
};
private Object[] simpleValues =
new Object[] {
"haha",
true,
22,
1111L,
0.5f,
0.5d,
(short) 1,
(byte) 1,
(char) 1,
new boolean[] {true, false},
new int[] {5, 1},
new long[] {5, 1},
new float[] {5, 1},
new double[] {5, 1},
new short[] {5, 1},
new byte[] {5, 1},
new char[] {5, 1},
DateTimeUtils.toLocalDate(5),
DateTimeUtils.toLocalTime(11),
DateTimeUtils.toLocalDateTime(11),
StringData.fromString("hahah")
};
private DataType[] dataTypes =
new DataType[] {
DataTypes.TIMESTAMP(9).bridgedTo(LocalDateTime.class),
DataTypes.TIMESTAMP(9).bridgedTo(Timestamp.class),
DataTypes.TIMESTAMP(3),
DataTypes.TIMESTAMP_LTZ(3).bridgedTo(Timestamp.class),
DataTypes.TIMESTAMP_LTZ(9).bridgedTo(Timestamp.class),
new AtomicDataType(
new LegacyTypeInformationType<>(
LogicalTypeRoot.TIMESTAMP_WITHOUT_TIME_ZONE,
SqlTimeTypeInfo.TIMESTAMP)),
new AtomicDataType(
new LegacyTypeInformationType<>(
LogicalTypeRoot.TIMESTAMP_WITHOUT_TIME_ZONE,
new LegacyTimestampTypeInfo(7))),
DataTypes.TIMESTAMP(3).bridgedTo(TimestampData.class)
};
private Object[] dataValues =
new Object[] {
LocalDateTime.of(1970, 1, 1, 0, 0, 0, 123456789),
Timestamp.valueOf("1970-01-01 00:00:00.123456789"),
LocalDateTime.of(1970, 1, 1, 0, 0, 0, 123),
buildTimestamp(4000L, 123),
buildTimestamp(4000L, 123456789),
Timestamp.valueOf("1970-01-01 00:00:00.123"),
Timestamp.valueOf("1970-01-01 00:00:00.1234567"),
TimestampData.fromEpochMillis(1000L)
};
private static Timestamp buildTimestamp(long mills, int nanos) {
Timestamp ts = new Timestamp(mills);
ts.setNanos(nanos);
return ts;
}
private static DataFormatConverter getConverter(TypeInformation typeInfo) {
return getConverterForDataType(TypeConversions.fromLegacyInfoToDataType(typeInfo));
}
private static void test(TypeInformation typeInfo, Object value) {
test(typeInfo, value, null);
}
private static void test(TypeInformation typeInfo, Object value, Object anotherValue) {
DataFormatConverter converter = getConverter(typeInfo);
final Object innerValue = converter.toInternal(value);
if (anotherValue != null) {
converter.toInternal(anotherValue);
}
assertThat(converter.toExternal(innerValue)).isEqualTo(value);
}
private static DataFormatConverter getConverter(DataType dataType) {
return getConverterForDataType(dataType);
}
private static void testDataType(DataType dataType, Object value) {
DataFormatConverter converter = getConverter(dataType);
assertThat(converter.toExternal(converter.toInternal(value))).isEqualTo(value);
}
@Test
void testTypes() {
for (int i = 0; i < simpleTypes.length; i++) {
test(simpleTypes[i], simpleValues[i]);
}
test(new RowTypeInfo(simpleTypes), new Row(simpleTypes.length));
test(new RowTypeInfo(simpleTypes), Row.ofKind(RowKind.DELETE, simpleValues));
test(
InternalTypeInfo.ofFields(VarCharType.STRING_TYPE, new IntType()),
GenericRowData.of(StringData.fromString("hehe"), 111));
test(
InternalTypeInfo.ofFields(VarCharType.STRING_TYPE, new IntType()),
GenericRowData.of(null, null));
test(new DecimalDataTypeInfo(10, 5), null);
test(new DecimalDataTypeInfo(10, 5), DecimalDataUtils.castFrom(5.555, 10, 5));
test(Types.BIG_DEC, null);
{
DataFormatConverter converter = getConverter(Types.BIG_DEC);
assertThat(
converter.toInternal(
converter.toExternal(DecimalDataUtils.castFrom(5, 19, 18))))
.isEqualTo(DecimalDataUtils.castFrom(5, 19, 18));
}
test(new ListTypeInfo<>(Types.STRING), null);
test(new ListTypeInfo<>(Types.STRING), Arrays.asList("ahah", "xx"));
test(BasicArrayTypeInfo.DOUBLE_ARRAY_TYPE_INFO, new Double[] {1D, 5D});
test(BasicArrayTypeInfo.DOUBLE_ARRAY_TYPE_INFO, new Double[] {null, null});
test(ObjectArrayTypeInfo.getInfoFor(Types.STRING), new String[] {null, null});
test(ObjectArrayTypeInfo.getInfoFor(Types.STRING), new String[] {"haha", "hehe"});
test(
ObjectArrayTypeInfo.getInfoFor(Types.STRING),
new String[] {"haha", "hehe"},
new String[] {"aa", "bb"});
test(new MapTypeInfo<>(Types.STRING, Types.INT), null);
HashMap<String, Integer> map = new HashMap<>();
map.put("haha", 1);
map.put("hah1", 5);
map.put(null, null);
test(new MapTypeInfo<>(Types.STRING, Types.INT), map);
Tuple2 tuple2 = new Tuple2<>(5, 10);
TupleTypeInfo tupleTypeInfo = new TupleTypeInfo<>(tuple2.getClass(), Types.INT, Types.INT);
test(tupleTypeInfo, tuple2);
test(TypeExtractor.createTypeInfo(MyPojo.class), new MyPojo(1, 3));
}
@Test
void testDataTypes() {
for (int i = 0; i < dataTypes.length; i++) {
testDataType(dataTypes[i], dataValues[i]);
}
}
/** Test pojo. */
public static
|
DataFormatConvertersTest
|
java
|
grpc__grpc-java
|
examples/example-xds/src/main/java/io/grpc/examples/helloworldxds/XdsHelloWorldClient.java
|
{
"start": 1278,
"end": 4420
}
|
class ____ {
private static final Logger logger = Logger.getLogger(XdsHelloWorldClient.class.getName());
private final GreeterGrpc.GreeterBlockingStub blockingStub;
/** Construct client for accessing HelloWorld server using the existing channel. */
public XdsHelloWorldClient(Channel channel) {
blockingStub = GreeterGrpc.newBlockingStub(channel);
}
/** Say hello to server. */
public void greet(String name) {
logger.info("Will try to greet " + name + " ...");
HelloRequest request = HelloRequest.newBuilder().setName(name).build();
HelloReply response;
try {
response = blockingStub.sayHello(request);
} catch (StatusRuntimeException e) {
logger.log(Level.WARNING, "RPC failed: {0}", e.getStatus());
return;
}
logger.info("Greeting: " + response.getMessage());
}
/**
* Greet server. If provided, the first element of {@code args} is the name to use in the
* greeting. The second argument is the target server. A {@code --xds-creds} flag is also accepted.
*/
public static void main(String[] args) throws Exception {
String user = "xds world";
// The example defaults to the same behavior as the hello world example. To enable xDS, pass an
// "xds:"-prefixed string as the target.
String target = "localhost:50051";
ChannelCredentials credentials = InsecureChannelCredentials.create();
if (args.length > 0) {
if ("--help".equals(args[0])) {
System.out.println("Usage: [--xds-creds] [NAME [TARGET]]");
System.out.println("");
System.err.println(" --xds-creds Use credentials provided by xDS. Defaults to insecure");
System.out.println("");
System.err.println(" NAME The name you wish to be greeted by. Defaults to " + user);
System.err.println(" TARGET The server to connect to. Defaults to " + target);
System.exit(1);
} else if ("--xds-creds".equals(args[0])) {
// The xDS credentials use the security configured by the xDS server when available. When
// xDS is not used or when xDS does not provide security configuration, the xDS credentials
// fall back to other credentials (in this case, InsecureChannelCredentials).
credentials = XdsChannelCredentials.create(InsecureChannelCredentials.create());
args = Arrays.copyOfRange(args, 1, args.length);
}
}
if (args.length > 0) {
user = args[0];
}
if (args.length > 1) {
target = args[1];
}
// This uses the new ChannelCredentials API. Grpc.newChannelBuilder() is the same as
// ManagedChannelBuilder.forTarget(), except that it is passed credentials. When using this API,
// you don't use methods like `managedChannelBuilder.usePlaintext()`, as that configuration is
// provided by the ChannelCredentials.
ManagedChannel channel = Grpc.newChannelBuilder(target, credentials)
.build();
try {
XdsHelloWorldClient client = new XdsHelloWorldClient(channel);
client.greet(user);
} finally {
channel.shutdownNow().awaitTermination(5, TimeUnit.SECONDS);
}
}
}
|
XdsHelloWorldClient
|
java
|
spring-projects__spring-boot
|
module/spring-boot-actuator/src/test/java/org/springframework/boot/actuate/endpoint/web/annotation/WebEndpointDiscovererTests.java
|
{
"start": 17083,
"end": 17460
}
|
class ____ {
@Bean
TestEndpoint testEndpoint() {
return new TestEndpoint();
}
@Bean
TestWebEndpointExtension testExtensionOne() {
return new TestWebEndpointExtension();
}
@Bean
TestWebEndpointExtension testExtensionTwo() {
return new TestWebEndpointExtension();
}
}
@Configuration(proxyBeanMethods = false)
static
|
ClashingWebEndpointConfiguration
|
java
|
quarkusio__quarkus
|
integration-tests/openapi/src/test/java/io/quarkus/it/openapi/jaxrs/ByteArrayTest.java
|
{
"start": 218,
"end": 4094
}
|
class ____ extends AbstractByteArrayTest {
// Just byte[]
@Test
public void testJustByteArrayInJaxRsServiceRequest() throws IOException {
testServiceByteArrayRequest("/jax-rs/defaultContentType/justByteArray", APPLICATION_OCTET_STREAM);
}
@Test
public void testJusByteArrayInJaxRsServiceResponse() throws IOException {
testServiceByteArrayResponse("/jax-rs/defaultContentType/justByteArray", APPLICATION_OCTET_STREAM);
}
@Test
public void testJustByteArrayInJaxRsOpenAPIRequest() {
testOpenAPIRequest("/jax-rs/defaultContentType/justByteArray", APPLICATION_OCTET_STREAM);
}
@Test
public void testJustByteArrayInJaxRsOpenAPIResponse() {
testOpenAPIResponse("/jax-rs/defaultContentType/justByteArray/{fileName}", APPLICATION_OCTET_STREAM);
}
// RestResponse<byte[]>
@Test
public void testRestResponseByteArrayInJaxRsServiceRequest() throws IOException {
testServiceByteArrayRequest("/jax-rs/defaultContentType/restResponseByteArray", APPLICATION_OCTET_STREAM);
}
@Test
public void testRestResponseByteArrayInJaxRsServiceResponse() throws IOException {
testServiceByteArrayResponse("/jax-rs/defaultContentType/restResponseByteArray", APPLICATION_OCTET_STREAM);
}
@Test
public void testRestResponseByteArrayInJaxRsOpenAPIRequest() {
testOpenAPIRequest("/jax-rs/defaultContentType/restResponseByteArray", APPLICATION_OCTET_STREAM);
}
@Test
public void testRestResponseByteArrayInJaxRsOpenAPIResponse() {
testOpenAPIResponse("/jax-rs/defaultContentType/restResponseByteArray/{fileName}", APPLICATION_OCTET_STREAM);
}
// Optional<byte[]>
//@Test
public void testOptionalByteArrayInJaxRsServiceRequest() throws IOException {
testServiceByteArrayRequest("/jax-rs/defaultContentType/optionalByteArray", APPLICATION_OCTET_STREAM);
}
//@Test
public void testOptionalByteArrayInJaxRsServiceResponse() throws IOException {
testServiceByteArrayResponse("/jax-rs/defaultContentType/optionalByteArray/{fileName}", APPLICATION_OCTET_STREAM);
}
@Test
public void testOptionalByteArrayInJaxRsOpenAPIRequest() {
testOpenAPIRequest("/jax-rs/defaultContentType/optionalByteArray", APPLICATION_OCTET_STREAM);
}
@Test
public void testOptionalByteArrayInJaxRsOpenAPIResponse() {
testOpenAPIResponse("/jax-rs/defaultContentType/optionalByteArray/{fileName}", APPLICATION_OCTET_STREAM);
}
// Uni<byte[]>
@Test
public void testUniByteArrayInJaxRsServiceResponse() throws IOException {
testServiceByteArrayResponse("/jax-rs/defaultContentType/uniByteArray", APPLICATION_OCTET_STREAM);
}
@Test
public void testUniByteArrayInJaxRsOpenAPIResponse() {
testOpenAPIResponse("/jax-rs/defaultContentType/uniByteArray/{fileName}", APPLICATION_OCTET_STREAM);
}
// CompletionStage<byte[]>
@Test
public void testCompletionStageByteArrayInJaxRsServiceResponse() throws IOException {
testServiceByteArrayResponse("/jax-rs/defaultContentType/completionStageByteArray", APPLICATION_OCTET_STREAM);
}
@Test
public void testCompletionStageByteArrayInJaxRsOpenAPIResponse() {
testOpenAPIResponse("/jax-rs/defaultContentType/completionStageByteArray/{fileName}", APPLICATION_OCTET_STREAM);
}
// CompletedFuture<byte[]>
@Test
public void testCompletedFutureByteArrayInJaxRsServiceResponse() throws IOException {
testServiceByteArrayResponse("/jax-rs/defaultContentType/completedFutureByteArray", APPLICATION_OCTET_STREAM);
}
@Test
public void testCompletedFutureByteArrayInJaxRsOpenAPIResponse() {
testOpenAPIResponse("/jax-rs/defaultContentType/completionStageByteArray/{fileName}", APPLICATION_OCTET_STREAM);
}
}
|
ByteArrayTest
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.