language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/boot/models/annotations/internal/AnyDiscriminatorValueAnnotation.java
|
{
"start": 479,
"end": 1756
}
|
class ____ implements AnyDiscriminatorValue {
private String discriminator;
private java.lang.Class<?> entity;
/**
* Used in creating dynamic annotation instances (e.g. from XML)
*/
public AnyDiscriminatorValueAnnotation(ModelsContext modelContext) {
}
/**
* Used in creating annotation instances from JDK variant
*/
public AnyDiscriminatorValueAnnotation(AnyDiscriminatorValue annotation, ModelsContext modelContext) {
this.discriminator = annotation.discriminator();
this.entity = annotation.entity();
}
/**
* Used in creating annotation instances from Jandex variant
*/
public AnyDiscriminatorValueAnnotation(Map<String, Object> attributeValues, ModelsContext modelContext) {
this.discriminator = (String) attributeValues.get( "discriminator" );
this.entity = (Class<?>) attributeValues.get( "entity" );
}
@Override
public Class<? extends Annotation> annotationType() {
return AnyDiscriminatorValue.class;
}
@Override
public String discriminator() {
return discriminator;
}
public void discriminator(String value) {
this.discriminator = value;
}
@Override
public java.lang.Class<?> entity() {
return entity;
}
public void entity(java.lang.Class<?> value) {
this.entity = value;
}
}
|
AnyDiscriminatorValueAnnotation
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestChainMapReduce.java
|
{
"start": 1910,
"end": 6494
}
|
class ____ extends HadoopTestCase {
private static Path getFlagDir(boolean local) {
Path flagDir = new Path("testing/chain/flags");
// Hack for local FS that does not have the concept of a 'mounting point'
if (local) {
String localPathRoot = System.getProperty("test.build.data", "/tmp")
.replace(' ', '+');
flagDir = new Path(localPathRoot, flagDir);
}
return flagDir;
}
private static void cleanFlags(JobConf conf) throws IOException {
FileSystem fs = FileSystem.get(conf);
fs.delete(getFlagDir(conf.getBoolean("localFS", true)), true);
fs.mkdirs(getFlagDir(conf.getBoolean("localFS", true)));
}
private static void writeFlag(JobConf conf, String flag) throws IOException {
FileSystem fs = FileSystem.get(conf);
if (getFlag(conf, flag)) {
fail("Flag " + flag + " already exists");
}
DataOutputStream file =
fs.create(new Path(getFlagDir(conf.getBoolean("localFS", true)), flag));
file.close();
}
private static boolean getFlag(JobConf conf, String flag) throws IOException {
FileSystem fs = FileSystem.get(conf);
return fs
.exists(new Path(getFlagDir(conf.getBoolean("localFS", true)), flag));
}
public TestChainMapReduce() throws IOException {
super(HadoopTestCase.LOCAL_MR, HadoopTestCase.LOCAL_FS, 1, 1);
}
@Test
public void testChain() throws Exception {
Path inDir = new Path("testing/chain/input");
Path outDir = new Path("testing/chain/output");
// Hack for local FS that does not have the concept of a 'mounting point'
if (isLocalFS()) {
String localPathRoot = System.getProperty("test.build.data", "/tmp")
.replace(' ', '+');
inDir = new Path(localPathRoot, inDir);
outDir = new Path(localPathRoot, outDir);
}
JobConf conf = createJobConf();
conf.setBoolean("localFS", isLocalFS());
conf.setInt("mapreduce.job.maps", 1);
cleanFlags(conf);
FileSystem fs = FileSystem.get(conf);
fs.delete(outDir, true);
if (!fs.mkdirs(inDir)) {
throw new IOException("Mkdirs failed to create " + inDir);
}
DataOutputStream file = fs.create(new Path(inDir, "part-0"));
file.writeBytes("1\n2\n");
file.close();
conf.setJobName("chain");
conf.setInputFormat(TextInputFormat.class);
conf.setOutputFormat(TextOutputFormat.class);
conf.set("a", "X");
JobConf mapAConf = new JobConf(false);
mapAConf.set("a", "A");
ChainMapper.addMapper(conf, AMap.class, LongWritable.class, Text.class,
LongWritable.class, Text.class, true, mapAConf);
ChainMapper.addMapper(conf, BMap.class, LongWritable.class, Text.class,
LongWritable.class, Text.class, false, null);
JobConf reduceConf = new JobConf(false);
reduceConf.set("a", "C");
ChainReducer.setReducer(conf, CReduce.class, LongWritable.class, Text.class,
LongWritable.class, Text.class, true, reduceConf);
ChainReducer.addMapper(conf, DMap.class, LongWritable.class, Text.class,
LongWritable.class, Text.class, false, null);
JobConf mapEConf = new JobConf(false);
mapEConf.set("a", "E");
ChainReducer.addMapper(conf, EMap.class, LongWritable.class, Text.class,
LongWritable.class, Text.class, true, mapEConf);
FileInputFormat.setInputPaths(conf, inDir);
FileOutputFormat.setOutputPath(conf, outDir);
JobClient jc = new JobClient(conf);
RunningJob job = jc.submitJob(conf);
while (!job.isComplete()) {
Thread.sleep(100);
}
assertTrue(getFlag(conf, "configure.A"));
assertTrue(getFlag(conf, "configure.B"));
assertTrue(getFlag(conf, "configure.C"));
assertTrue(getFlag(conf, "configure.D"));
assertTrue(getFlag(conf, "configure.E"));
assertTrue(getFlag(conf, "map.A.value.1"));
assertTrue(getFlag(conf, "map.A.value.2"));
assertTrue(getFlag(conf, "map.B.value.1"));
assertTrue(getFlag(conf, "map.B.value.2"));
assertTrue(getFlag(conf, "reduce.C.value.2"));
assertTrue(getFlag(conf, "reduce.C.value.1"));
assertTrue(getFlag(conf, "map.D.value.1"));
assertTrue(getFlag(conf, "map.D.value.2"));
assertTrue(getFlag(conf, "map.E.value.1"));
assertTrue(getFlag(conf, "map.E.value.2"));
assertTrue(getFlag(conf, "close.A"));
assertTrue(getFlag(conf, "close.B"));
assertTrue(getFlag(conf, "close.C"));
assertTrue(getFlag(conf, "close.D"));
assertTrue(getFlag(conf, "close.E"));
}
public static
|
TestChainMapReduce
|
java
|
apache__camel
|
components/camel-mongodb/src/test/java/org/apache/camel/component/mongodb/integration/AbstractMongoDbITSupport.java
|
{
"start": 2224,
"end": 7296
}
|
class ____ implements ConfigurableContext {
@Order(1)
@RegisterExtension
public static MongoDBService service = MongoDBServiceFactory.createSingletonService();
@Order(2)
@RegisterExtension
protected static CamelContextExtension contextExtension = new DefaultCamelContextExtension();
protected static final String SCHEME = "mongodb";
protected static final String USER = "test-user";
protected static final String PASSWORD = "test-pwd";
protected static MongoClient mongo;
protected static MongoDatabase db;
protected static MongoCollection<Document> testCollection;
protected static MongoCollection<Document> dynamicCollection;
protected static String dbName = "test";
protected static String testCollectionName;
protected static String dynamicCollectionName;
protected CamelContext context;
protected ProducerTemplate template;
@BeforeEach
public void setupContextAndTemplates() {
template = contextExtension.getProducerTemplate();
context = contextExtension.getContext();
}
protected void doPreSetup() throws Exception {
mongo = MongoClients.create(service.getReplicaSetUrl());
db = mongo.getDatabase(dbName);
}
protected void doPostSetup() {
refresh();
}
protected void refresh() {
// Refresh the test collection - drop it and recreate it. We don't do
// this for the database because MongoDB would create large
// store files each time
testCollectionName = "camelTest";
testCollection = db.getCollection(testCollectionName, Document.class);
testCollection.drop();
testCollection = db.getCollection(testCollectionName, Document.class);
dynamicCollectionName = testCollectionName.concat("Dynamic");
dynamicCollection = db.getCollection(dynamicCollectionName, Document.class);
dynamicCollection.drop();
dynamicCollection = db.getCollection(dynamicCollectionName, Document.class);
}
@AfterEach
public void tearDown() {
if (testCollection != null) {
testCollection.drop();
}
if (dynamicCollection != null) {
dynamicCollection.drop();
}
}
@ContextFixture
@Override
public void configureContext(CamelContext context) throws Exception {
doPreSetup();
context.getPropertiesComponent().setLocation("classpath:mongodb.test.properties");
context.getComponent(SCHEME, MongoDbComponent.class).setMongoConnection(null);
context.getRegistry().bind("myDb", mongo);
doPostSetup();
}
/**
* Useful to simulate the presence of an authenticated user with name {@value #USER} and password {@value #PASSWORD}
*/
protected void createAuthorizationUser() {
createAuthorizationUser("admin", USER, PASSWORD);
}
protected void createAuthorizationUser(String database, String user, String password) {
MongoDatabase adminDb = mongo.getDatabase("admin");
MongoCollection<Document> usersCollection = adminDb.getCollection("system.users");
if (usersCollection.countDocuments(new Document("user", user)) == 0) {
MongoDatabase db = mongo.getDatabase(database);
Map<String, Object> commandArguments = new LinkedHashMap<>();
commandArguments.put("createUser", user);
commandArguments.put("pwd", password);
String[] roles = { "readWrite" };
commandArguments.put("roles", roles);
BasicDBObject command = new BasicDBObject(commandArguments);
db.runCommand(command);
}
}
protected void pumpDataIntoTestCollection() {
// there should be 100 of each
String[] scientists
= { "Einstein", "Darwin", "Copernicus", "Pasteur", "Curie", "Faraday", "Newton", "Bohr", "Galilei", "Maxwell" };
for (int i = 1; i <= 1000; i++) {
int index = i % scientists.length;
Formatter f = new Formatter();
String doc
= f.format("{\"_id\":\"%d\", \"scientist\":\"%s\", \"fixedField\": \"fixedValue\"}", i, scientists[index])
.toString();
IOHelper.close(f);
testCollection.insertOne(Document.parse(doc));
}
assertEquals(1000L, testCollection.countDocuments(), "Data pumping of 1000 entries did not complete entirely");
}
protected CamelMongoDbException extractAndAssertCamelMongoDbException(Object result, String message) {
Throwable exc = ((CamelExecutionException) result).getCause();
assertTrue(exc instanceof CamelMongoDbException, "Result is not an CamelMongoDbException");
CamelMongoDbException camelExc = ObjectHelper.cast(CamelMongoDbException.class, exc);
if (message != null) {
assertTrue(camelExc.getMessage().contains(message), "CamelMongoDbException doesn't contain desired message string");
}
return camelExc;
}
}
|
AbstractMongoDbITSupport
|
java
|
apache__kafka
|
clients/src/test/java/org/apache/kafka/common/security/oauthbearer/internals/secured/HttpJwtRetrieverTest.java
|
{
"start": 1555,
"end": 6524
}
|
class ____ extends OAuthBearerTest {
@Test
public void test() throws IOException {
String expectedResponse = "Hiya, buddy";
HttpURLConnection mockedCon = createHttpURLConnection(expectedResponse);
String response = HttpJwtRetriever.post(mockedCon, null, null, null, null);
assertEquals(expectedResponse, response);
}
@Test
public void testEmptyResponse() throws IOException {
HttpURLConnection mockedCon = createHttpURLConnection("");
assertThrows(IOException.class, () -> HttpJwtRetriever.post(mockedCon, null, null, null, null));
}
@Test
public void testErrorReadingResponse() throws IOException {
HttpURLConnection mockedCon = createHttpURLConnection("dummy");
when(mockedCon.getInputStream()).thenThrow(new IOException("Can't read"));
assertThrows(IOException.class, () -> HttpJwtRetriever.post(mockedCon, null, null, null, null));
}
@Test
public void testErrorResponseUnretryableCode() throws IOException {
HttpURLConnection mockedCon = createHttpURLConnection("dummy");
when(mockedCon.getInputStream()).thenThrow(new IOException("Can't read"));
when(mockedCon.getErrorStream()).thenReturn(new ByteArrayInputStream(
"{\"error\":\"some_arg\", \"error_description\":\"some problem with arg\"}"
.getBytes(StandardCharsets.UTF_8)));
when(mockedCon.getResponseCode()).thenReturn(HttpURLConnection.HTTP_BAD_REQUEST);
UnretryableException ioe = assertThrows(UnretryableException.class,
() -> HttpJwtRetriever.post(mockedCon, null, null, null, null));
assertTrue(ioe.getMessage().contains("{\"some_arg\" - \"some problem with arg\"}"));
}
@Test
public void testErrorResponseRetryableCode() throws IOException {
HttpURLConnection mockedCon = createHttpURLConnection("dummy");
when(mockedCon.getInputStream()).thenThrow(new IOException("Can't read"));
when(mockedCon.getErrorStream()).thenReturn(new ByteArrayInputStream(
"{\"error\":\"some_arg\", \"error_description\":\"some problem with arg\"}"
.getBytes(StandardCharsets.UTF_8)));
when(mockedCon.getResponseCode()).thenReturn(HttpURLConnection.HTTP_INTERNAL_ERROR);
IOException ioe = assertThrows(IOException.class,
() -> HttpJwtRetriever.post(mockedCon, null, null, null, null));
assertTrue(ioe.getMessage().contains("{\"some_arg\" - \"some problem with arg\"}"));
// error response body has different keys
when(mockedCon.getErrorStream()).thenReturn(new ByteArrayInputStream(
"{\"errorCode\":\"some_arg\", \"errorSummary\":\"some problem with arg\"}"
.getBytes(StandardCharsets.UTF_8)));
ioe = assertThrows(IOException.class,
() -> HttpJwtRetriever.post(mockedCon, null, null, null, null));
assertTrue(ioe.getMessage().contains("{\"some_arg\" - \"some problem with arg\"}"));
// error response is valid json but unknown keys
when(mockedCon.getErrorStream()).thenReturn(new ByteArrayInputStream(
"{\"err\":\"some_arg\", \"err_des\":\"some problem with arg\"}"
.getBytes(StandardCharsets.UTF_8)));
ioe = assertThrows(IOException.class,
() -> HttpJwtRetriever.post(mockedCon, null, null, null, null));
assertTrue(ioe.getMessage().contains("{\"err\":\"some_arg\", \"err_des\":\"some problem with arg\"}"));
}
@Test
public void testErrorResponseIsInvalidJson() throws IOException {
HttpURLConnection mockedCon = createHttpURLConnection("dummy");
when(mockedCon.getInputStream()).thenThrow(new IOException("Can't read"));
when(mockedCon.getErrorStream()).thenReturn(new ByteArrayInputStream(
"non json error output".getBytes(StandardCharsets.UTF_8)));
when(mockedCon.getResponseCode()).thenReturn(HttpURLConnection.HTTP_INTERNAL_ERROR);
IOException ioe = assertThrows(IOException.class,
() -> HttpJwtRetriever.post(mockedCon, null, null, null, null));
assertTrue(ioe.getMessage().contains("{non json error output}"));
}
@Test
public void testCopy() throws IOException {
byte[] expected = new byte[4096 + 1];
Random r = new Random();
r.nextBytes(expected);
InputStream in = new ByteArrayInputStream(expected);
ByteArrayOutputStream out = new ByteArrayOutputStream();
HttpJwtRetriever.copy(in, out);
assertArrayEquals(expected, out.toByteArray());
}
@Test
public void testCopyError() throws IOException {
InputStream mockedIn = mock(InputStream.class);
OutputStream out = new ByteArrayOutputStream();
when(mockedIn.read(any(byte[].class))).thenThrow(new IOException());
assertThrows(IOException.class, () -> HttpJwtRetriever.copy(mockedIn, out));
}
}
|
HttpJwtRetrieverTest
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/issue_2700/Issue2752.java
|
{
"start": 999,
"end": 1602
}
|
class ____ implements Module {
@Override
public ObjectDeserializer createDeserializer(ParserConfig config, Class type) {
if (type.getName().equals("org.springframework.data.domain.Sort")) {
return MiscCodec.instance;
}
return null;
}
@Override
public ObjectSerializer createSerializer(SerializeConfig config, Class type) {
if (type.getName().equals("org.springframework.data.domain.Sort")) {
return MiscCodec.instance;
}
return null;
}
}
}
|
MyModule
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/rest/action/synonyms/RestGetSynonymsAction.java
|
{
"start": 1024,
"end": 1920
}
|
class ____ extends BaseRestHandler {
private static final Integer DEFAULT_FROM_PARAM = 0;
private static final Integer DEFAULT_SIZE_PARAM = 10;
@Override
public String getName() {
return "synonyms_get_action";
}
@Override
public List<Route> routes() {
return List.of(new Route(GET, "/_synonyms/{synonymsSet}"));
}
@Override
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
GetSynonymsAction.Request request = new GetSynonymsAction.Request(
restRequest.param("synonymsSet"),
restRequest.paramAsInt("from", DEFAULT_FROM_PARAM),
restRequest.paramAsInt("size", DEFAULT_SIZE_PARAM)
);
return channel -> client.execute(GetSynonymsAction.INSTANCE, request, new RestToXContentListener<>(channel));
}
}
|
RestGetSynonymsAction
|
java
|
redisson__redisson
|
redisson/src/test/java/org/redisson/RedissonSemaphoreTest.java
|
{
"start": 440,
"end": 11218
}
|
class ____ extends BaseConcurrentTest {
@Test
public void testAcquireAfterAddPermits() throws InterruptedException {
RSemaphore s = redisson.getSemaphore("test");
CountDownLatch l = new CountDownLatch(1);
Thread t1 = new Thread(() -> {
s.addPermits(1);
try {
s.acquire(2);
l.countDown();
} catch (InterruptedException e) {
e.printStackTrace();
}
});
t1.start();
t1.join(1000);
assertThat(l.await(1, TimeUnit.SECONDS)).isFalse();
s.acquire();
Thread.sleep(1000);
s.release();
assertThat(l.await(1, TimeUnit.SECONDS)).isFalse();
s.addPermits(1);
assertThat(l.await(1, TimeUnit.SECONDS)).isTrue();
}
@Test
public void testZero() throws InterruptedException {
RSemaphore s = redisson.getSemaphore("test");
assertThat(s.tryAcquire(0, Duration.ofMinutes(10))).isTrue();
s.release(0);
assertThat(s.availablePermits()).isZero();
}
@Test
public void testAcquireWithoutSetPermits() throws InterruptedException {
RSemaphore s = redisson.getSemaphore("test");
s.release();
s.release();
s.acquire(2);
}
@Test
public void testTrySetPermits() throws InterruptedException {
RSemaphore s = redisson.getSemaphore("test");
assertThat(s.trySetPermits(10)).isTrue();
assertThat(s.availablePermits()).isEqualTo(10);
assertThat(s.trySetPermits(15)).isFalse();
assertThat(s.availablePermits()).isEqualTo(10);
s.delete();
assertThat(s.isExists()).isFalse();
assertThat(s.trySetPermits(1, Duration.ofSeconds(2))).isTrue();
Thread.sleep(1000);
assertThat(s.availablePermits()).isEqualTo(1);
Thread.sleep(1000);
assertThat(s.availablePermits()).isZero();
assertThat(s.isExists()).isFalse();
}
@Test
public void testAddPermits() throws InterruptedException {
RSemaphore s = redisson.getSemaphore("test");
s.trySetPermits(10);
s.acquire(10);
assertThat(s.availablePermits()).isEqualTo(0);
s.addPermits(4);
assertThat(s.availablePermits()).isEqualTo(4);
s.release(10);
assertThat(s.availablePermits()).isEqualTo(14);
s.acquire(5);
assertThat(s.availablePermits()).isEqualTo(9);
}
@Test
public void testReducePermits() throws InterruptedException {
RSemaphore s = redisson.getSemaphore("test2");
s.trySetPermits(10);
s.acquire(10);
s.addPermits(-5);
assertThat(s.availablePermits()).isEqualTo(-5);
s.release(10);
assertThat(s.availablePermits()).isEqualTo(5);
s.acquire(5);
assertThat(s.availablePermits()).isEqualTo(0);
}
@Test
public void testBlockingAcquire() throws InterruptedException {
RSemaphore s = redisson.getSemaphore("test");
s.trySetPermits(1);
s.acquire();
Thread t = new Thread() {
@Override
public void run() {
RSemaphore s = redisson.getSemaphore("test");
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
s.release();
}
};
t.start();
assertThat(s.availablePermits()).isEqualTo(0);
s.acquire();
assertThat(s.tryAcquire()).isFalse();
assertThat(s.availablePermits()).isEqualTo(0);
}
@Test
@Timeout(5)
public void testBlockingNAcquire() throws InterruptedException {
RSemaphore s = redisson.getSemaphore("test");
s.trySetPermits(5);
s.acquire(3);
Thread t = new Thread() {
@Override
public void run() {
RSemaphore s = redisson.getSemaphore("test");
try {
Thread.sleep(500);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
s.release();
try {
Thread.sleep(500);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
s.release();
}
};
assertThat(s.availablePermits()).isEqualTo(2);
t.start();
s.acquire(4);
assertThat(s.availablePermits()).isEqualTo(0);
}
@Test
public void testTryNAcquire() throws InterruptedException {
RSemaphore s = redisson.getSemaphore("test");
s.trySetPermits(5);
assertThat(s.tryAcquire(3)).isTrue();
Thread t = new Thread() {
@Override
public void run() {
RSemaphore s = redisson.getSemaphore("test");
try {
Thread.sleep(500);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
s.release();
try {
Thread.sleep(500);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
s.release();
}
};
assertThat(s.tryAcquire(4)).isFalse();
t.start();
t.join(1);
Awaitility.await().between(Duration.ofMillis(900), Duration.ofMillis(1200)).untilAsserted(() -> {
assertThat(s.tryAcquire(4, Duration.ofSeconds(2))).isTrue();
});
assertThat(s.availablePermits()).isEqualTo(0);
}
@Test
public void testReleaseWithoutPermits() {
RSemaphore s = redisson.getSemaphore("test");
s.release();
assertThat(s.availablePermits()).isEqualTo(1);
}
@Test
public void testDrainPermits() throws InterruptedException {
RSemaphore s = redisson.getSemaphore("test");
assertThat(s.drainPermits()).isZero();
s.trySetPermits(10);
s.acquire(3);
assertThat(s.drainPermits()).isEqualTo(7);
assertThat(s.availablePermits()).isEqualTo(0);
}
@Test
public void testReleaseAcquire() throws InterruptedException {
RSemaphore s = redisson.getSemaphore("test");
s.trySetPermits(10);
s.acquire();
assertThat(s.availablePermits()).isEqualTo(9);
s.release();
assertThat(s.availablePermits()).isEqualTo(10);
s.acquire(5);
assertThat(s.availablePermits()).isEqualTo(5);
s.release(5);
assertThat(s.availablePermits()).isEqualTo(10);
}
@Test
public void testConcurrency_SingleInstance() throws InterruptedException {
final AtomicInteger lockedCounter = new AtomicInteger();
RSemaphore s = redisson.getSemaphore("test");
s.trySetPermits(1);
int iterations = 15;
testSingleInstanceConcurrency(iterations, r -> {
RSemaphore s1 = r.getSemaphore("test");
try {
s1.acquire();
}catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
int value = lockedCounter.get();
lockedCounter.set(value + 1);
s1.release();
});
assertThat(lockedCounter.get()).isEqualTo(iterations);
}
@Test
public void testConcurrencyLoopMax_MultiInstance() throws InterruptedException {
final int iterations = 10;
final AtomicInteger lockedCounter = new AtomicInteger();
RSemaphore s = redisson.getSemaphore("test");
s.trySetPermits(Integer.MAX_VALUE);
testMultiInstanceConcurrency(4, r -> {
for (int i = 0; i < iterations; i++) {
int v = Integer.MAX_VALUE;
if (ThreadLocalRandom.current().nextBoolean()) {
v = 1;
}
try {
r.getSemaphore("test").acquire(v);
}catch (InterruptedException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
try {
Thread.sleep(10);
} catch (InterruptedException e) {
e.printStackTrace();
}
lockedCounter.incrementAndGet();
r.getSemaphore("test").release(v);
}
});
assertThat(lockedCounter.get()).isEqualTo(4 * iterations);
}
@Test
public void testConcurrencyLoop_MultiInstance() throws InterruptedException {
final int iterations = 100;
final AtomicInteger lockedCounter = new AtomicInteger();
RSemaphore s = redisson.getSemaphore("test");
s.trySetPermits(1);
testMultiInstanceConcurrency(16, r -> {
for (int i = 0; i < iterations; i++) {
try {
r.getSemaphore("test").acquire();
}catch (InterruptedException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
try {
Thread.sleep(10);
} catch (InterruptedException e) {
e.printStackTrace();
}
int value = lockedCounter.get();
lockedCounter.set(value + 1);
r.getSemaphore("test").release();
}
});
assertThat(lockedCounter.get()).isEqualTo(16 * iterations);
}
@Test
public void testConcurrency_MultiInstance_1_permits() throws InterruptedException {
int iterations = 30;
final AtomicInteger lockedCounter = new AtomicInteger();
RSemaphore s = redisson.getSemaphore("test");
s.trySetPermits(1);
testMultiInstanceConcurrency(iterations, r -> {
RSemaphore s1 = r.getSemaphore("test");
try {
s1.acquire();
}catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
int value = lockedCounter.get();
lockedCounter.set(value + 1);
s1.release();
});
assertThat(lockedCounter.get()).isEqualTo(iterations);
}
}
|
RedissonSemaphoreTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/type/LongListTypeContributorTest.java
|
{
"start": 3030,
"end": 3322
}
|
class ____ extends java.util.ArrayList<Long> {
public LongList() {
super();
}
public LongList(int initialCapacity) {
super( initialCapacity );
}
public LongList(Long... longs) {
super( longs.length );
this.addAll( Arrays.asList( longs ) );
}
}
public static
|
LongList
|
java
|
junit-team__junit5
|
jupiter-tests/src/test/java/org/junit/jupiter/engine/discovery/DiscoveryTests.java
|
{
"start": 19342,
"end": 19497
}
|
class ____ {
@Test
void test() {
}
@Test
abstract void abstractTest();
}
@SuppressWarnings("JUnitMalformedDeclaration")
static
|
AbstractTestCase
|
java
|
mockito__mockito
|
mockito-core/src/test/java/org/mockito/internal/exceptions/stacktrace/ConditionalStackTraceFilterTest.java
|
{
"start": 461,
"end": 1618
}
|
class ____ extends TestBase {
private ConditionalStackTraceFilter filter = new ConditionalStackTraceFilter();
@Test
public void shouldNotFilterWhenConfigurationSaysNo() {
ConfigurationAccess.getConfig().overrideCleansStackTrace(false);
Throwable t =
new TraceBuilder()
.classes("org.test.MockitoSampleTest", "org.mockito.Mockito")
.toThrowable();
filter.filter(t);
Assertions.assertThat(t)
.has(
onlyThoseClassesInStackTrace(
"org.mockito.Mockito", "org.test.MockitoSampleTest"));
}
@Test
public void shouldFilterWhenConfigurationSaysYes() {
ConfigurationAccess.getConfig().overrideCleansStackTrace(true);
Throwable t =
new TraceBuilder()
.classes("org.test.MockitoSampleTest", "org.mockito.Mockito")
.toThrowable();
filter.filter(t);
Assertions.assertThat(t).has(onlyThoseClassesInStackTrace("org.test.MockitoSampleTest"));
}
}
|
ConditionalStackTraceFilterTest
|
java
|
apache__flink
|
flink-tests/src/test/java/org/apache/flink/test/streaming/runtime/GlobalAggregateITCase.java
|
{
"start": 2775,
"end": 4543
}
|
class ____ extends RichSourceFunction<Integer> {
private GlobalAggregateManager aggregateManager = null;
private final AggregateFunction<Integer, Integer, Integer> aggregateFunction;
private final boolean expectFailures;
public TestSourceFunction(
AggregateFunction<Integer, Integer, Integer> aggregateFunction,
boolean expectFailures) {
this.aggregateFunction = aggregateFunction;
this.expectFailures = expectFailures;
}
@Override
public void open(OpenContext openContext) throws Exception {
super.open(openContext);
StreamingRuntimeContext runtimeContext = (StreamingRuntimeContext) getRuntimeContext();
aggregateManager = runtimeContext.getGlobalAggregateManager();
}
@Override
public void run(SourceContext<Integer> ctx) throws Exception {
Integer expectedAccumulator = 0;
int exceptionCount = 0;
for (int i = 0; i < 5; i++) {
Integer actualAccumlator = 0;
try {
actualAccumlator =
aggregateManager.updateGlobalAggregate("testAgg", i, aggregateFunction);
expectedAccumulator += i;
} catch (IOException e) {
exceptionCount++;
}
if (expectFailures) {
assertEquals(i + 1, exceptionCount);
} else {
assertEquals(expectedAccumulator, actualAccumlator);
}
}
}
@Override
public void cancel() {}
}
/** Simple integer aggregate function. */
private static
|
TestSourceFunction
|
java
|
apache__kafka
|
streams/src/test/java/org/apache/kafka/streams/state/internals/RocksDBVersionedStoreTest.java
|
{
"start": 56692,
"end": 56994
}
|
class ____ {
final String key;
final String value;
final long timestamp;
DataRecord(final String key, final String value, final long timestamp) {
this.key = key;
this.value = value;
this.timestamp = timestamp;
}
}
}
|
DataRecord
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/UnsafeReflectiveConstructionCastTest.java
|
{
"start": 5118,
"end": 5415
}
|
class ____<T> {
public T get(String className) {
try {
return (T) Class.forName(className).getDeclaredConstructor().newInstance();
} catch (ReflectiveOperationException e) {
throw new IllegalStateException(e);
}
}
}
}\
""")
.doTest();
}
}
|
Supplier
|
java
|
redisson__redisson
|
redisson/src/main/java/org/redisson/api/map/event/EntryRemovedListener.java
|
{
"start": 744,
"end": 860
}
|
interface ____<K, V> extends MapEntryListener {
void onRemoved(EntryEvent<K, V> event);
}
|
EntryRemovedListener
|
java
|
google__dagger
|
javatests/dagger/hilt/android/AndroidEntryPointBaseClassTest.java
|
{
"start": 3543,
"end": 3638
}
|
class ____ extends Hilt_AndroidEntryPointBaseClassTest_L {}
@AndroidEntryPoint
public static
|
L
|
java
|
spring-projects__spring-framework
|
spring-expression/src/test/java/org/springframework/expression/spel/BooleanExpressionTests.java
|
{
"start": 1062,
"end": 4171
}
|
class ____ extends AbstractExpressionTests {
@Test
void testBooleanTrue() {
evaluate("true", Boolean.TRUE, Boolean.class);
}
@Test
void testBooleanFalse() {
evaluate("false", Boolean.FALSE, Boolean.class);
}
@Test
void testOr() {
evaluate("false or false", Boolean.FALSE, Boolean.class);
evaluate("false or true", Boolean.TRUE, Boolean.class);
evaluate("true or false", Boolean.TRUE, Boolean.class);
evaluate("true or true", Boolean.TRUE, Boolean.class);
}
@Test
void testAnd() {
evaluate("false and false", Boolean.FALSE, Boolean.class);
evaluate("false and true", Boolean.FALSE, Boolean.class);
evaluate("true and false", Boolean.FALSE, Boolean.class);
evaluate("true and true", Boolean.TRUE, Boolean.class);
}
@Test
void testNot() {
evaluate("!false", Boolean.TRUE, Boolean.class);
evaluate("!true", Boolean.FALSE, Boolean.class);
evaluate("not false", Boolean.TRUE, Boolean.class);
evaluate("NoT true", Boolean.FALSE, Boolean.class);
}
@Test
void testCombinations01() {
evaluate("false and false or true", Boolean.TRUE, Boolean.class);
evaluate("true and false or true", Boolean.TRUE, Boolean.class);
evaluate("true and false or false", Boolean.FALSE, Boolean.class);
}
@Test
void testWritability() {
evaluate("true and true", Boolean.TRUE, Boolean.class, false);
evaluate("true or true", Boolean.TRUE, Boolean.class, false);
evaluate("!false", Boolean.TRUE, Boolean.class, false);
}
@Test
void testBooleanErrors01() {
evaluateAndCheckError("1.0 or false", SpelMessage.TYPE_CONVERSION_ERROR, 0);
evaluateAndCheckError("false or 39.4", SpelMessage.TYPE_CONVERSION_ERROR, 9);
evaluateAndCheckError("true and 'hello'", SpelMessage.TYPE_CONVERSION_ERROR, 9);
evaluateAndCheckError(" 'hello' and 'goodbye'", SpelMessage.TYPE_CONVERSION_ERROR, 1);
evaluateAndCheckError("!35.2", SpelMessage.TYPE_CONVERSION_ERROR, 1);
evaluateAndCheckError("! 'foob'", SpelMessage.TYPE_CONVERSION_ERROR, 2);
}
@Test
void testConvertAndHandleNull() { // SPR-9445
// without null conversion
evaluateAndCheckError("null or true", SpelMessage.TYPE_CONVERSION_ERROR, 0, "null", "boolean");
evaluateAndCheckError("null and true", SpelMessage.TYPE_CONVERSION_ERROR, 0, "null", "boolean");
evaluateAndCheckError("!null", SpelMessage.TYPE_CONVERSION_ERROR, 1, "null", "boolean");
evaluateAndCheckError("null ? 'foo' : 'bar'", SpelMessage.TYPE_CONVERSION_ERROR, 0, "null", "boolean");
// with null conversion (null -> false)
GenericConversionService conversionService = new GenericConversionService() {
@Override
protected Object convertNullSource(TypeDescriptor sourceType, TypeDescriptor targetType) {
return targetType.getType() == Boolean.class ? false : null;
}
};
context.setTypeConverter(new StandardTypeConverter(conversionService));
evaluate("null or true", Boolean.TRUE, Boolean.class, false);
evaluate("null and true", Boolean.FALSE, Boolean.class, false);
evaluate("!null", Boolean.TRUE, Boolean.class, false);
evaluate("null ? 'foo' : 'bar'", "bar", String.class, false);
}
}
|
BooleanExpressionTests
|
java
|
spring-projects__spring-boot
|
core/spring-boot-docker-compose/src/test/java/org/springframework/boot/docker/compose/core/DockerCliComposePsResponseTests.java
|
{
"start": 1041,
"end": 1552
}
|
class ____ {
@Test
void deserializeJson() throws IOException {
String json = new ClassPathResource("docker-compose-ps.json", getClass())
.getContentAsString(StandardCharsets.UTF_8);
DockerCliComposePsResponse response = DockerJson.deserialize(json, DockerCliComposePsResponse.class);
DockerCliComposePsResponse expected = new DockerCliComposePsResponse("f5af31dae7f6", "redis-docker-redis-1",
"redis:7.0", "running");
assertThat(response).isEqualTo(expected);
}
}
|
DockerCliComposePsResponseTests
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/InputStreamCallbacksImpl.java
|
{
"start": 1608,
"end": 4006
}
|
class ____ implements ObjectInputStreamCallbacks {
private static final Logger LOG = LoggerFactory.getLogger(InputStreamCallbacksImpl.class);
/**
* Audit span to activate before each call.
*/
private final AuditSpan auditSpan;
/**
* store operations.
*/
private final S3AStore store;
/**
* crypto FS operations.
*/
private final S3AFileSystemOperations fsOperations;
/**
* A (restricted) thread pool for asynchronous operations.
*/
private final ThreadPoolExecutor threadPool;
/**
* Create.
* @param auditSpan Audit span to activate before each call.
* @param store store operations
* @param fsOperations crypto FS operations.
* @param threadPool thread pool for async operations.
*/
public InputStreamCallbacksImpl(
final AuditSpan auditSpan,
final S3AStore store,
final S3AFileSystemOperations fsOperations,
final ThreadPoolExecutor threadPool) {
this.auditSpan = requireNonNull(auditSpan);
this.store = requireNonNull(store);
this.fsOperations = requireNonNull(fsOperations);
this.threadPool = requireNonNull(threadPool);
}
/**
* Closes the audit span.
*/
@Override
public void close() {
auditSpan.close();
}
@Override
public GetObjectRequest.Builder newGetRequestBuilder(final String key) {
// active the audit span used for the operation
try (AuditSpan span = auditSpan.activate()) {
return store.getRequestFactory().newGetObjectRequestBuilder(key);
}
}
@Override
public ResponseInputStream<GetObjectResponse> getObject(GetObjectRequest request) throws
IOException {
// active the audit span used for the operation
try (AuditSpan span = auditSpan.activate()) {
return fsOperations.getObject(store, request, store.getRequestFactory());
}
}
@Override
public <T> CompletableFuture<T> submit(final CallableRaisingIOE<T> operation) {
CompletableFuture<T> result = new CompletableFuture<>();
threadPool.submit(() ->
eval(result, () -> {
LOG.debug("Starting submitted operation in {}", auditSpan.getSpanId());
try (AuditSpan span = auditSpan.activate()) {
return operation.apply();
} finally {
LOG.debug("Completed submitted operation in {}", auditSpan.getSpanId());
}
}));
return result;
}
}
|
InputStreamCallbacksImpl
|
java
|
spring-projects__spring-framework
|
spring-test/src/main/java/org/springframework/test/context/ContextConfigurationAttributes.java
|
{
"start": 5354,
"end": 6217
}
|
class ____ via {@code @ContextConfiguration}
* @throws IllegalArgumentException if the {@code declaringClass} or {@code contextLoaderClass} is
* {@code null}
*/
public ContextConfigurationAttributes(
Class<?> declaringClass, String[] locations, Class<?>[] classes, boolean inheritLocations,
Class<? extends ApplicationContextInitializer<?>>[] initializers,
boolean inheritInitializers, Class<? extends ContextLoader> contextLoaderClass) {
this(declaringClass, locations, classes, inheritLocations, initializers, inheritInitializers, null,
contextLoaderClass);
}
/**
* Construct a new {@link ContextConfigurationAttributes} instance for the
* {@linkplain Class test class} that declared the
* {@link ContextConfiguration @ContextConfiguration} annotation and its
* corresponding attributes.
* @param declaringClass the test
|
declared
|
java
|
apache__dubbo
|
dubbo-remoting/dubbo-remoting-api/src/main/java/org/apache/dubbo/remoting/Codec2.java
|
{
"start": 1123,
"end": 1391
}
|
interface ____ {
@Adaptive({Constants.CODEC_KEY})
void encode(Channel channel, ChannelBuffer buffer, Object message) throws IOException;
@Adaptive({Constants.CODEC_KEY})
Object decode(Channel channel, ChannelBuffer buffer) throws IOException;
|
Codec2
|
java
|
apache__flink
|
flink-formats/flink-json/src/test/java/org/apache/flink/formats/json/JsonRowDataSerDeSchemaTest.java
|
{
"start": 4546,
"end": 46469
}
|
class ____ {
private static final ObjectMapper OBJECT_MAPPER = JacksonMapperFactory.createObjectMapper();
@Parameter public boolean isJsonParser;
@Parameters(name = "isJsonParser={0}")
public static Collection<Boolean> parameters() throws Exception {
return Arrays.asList(true, false);
}
@TestTemplate
void testSerDe() throws Exception {
byte tinyint = 'c';
short smallint = 128;
int intValue = 45536;
float floatValue = 33.333F;
long bigint = 1238123899121L;
String name = "asdlkjasjkdla998y1122";
byte[] bytes = new byte[1024];
ThreadLocalRandom.current().nextBytes(bytes);
BigDecimal decimal = new BigDecimal("123.456789");
Double[] doubles = new Double[] {1.1, 2.2, 3.3};
LocalDate date = LocalDate.parse("1990-10-14");
LocalTime time = LocalTime.parse("12:12:43");
Timestamp timestamp3 = Timestamp.valueOf("1990-10-14 12:12:43.123");
Timestamp timestamp9 = Timestamp.valueOf("1990-10-14 12:12:43.123456789");
Instant timestampWithLocalZone =
LocalDateTime.of(1990, 10, 14, 12, 12, 43, 123456789)
.atOffset(ZoneOffset.of("Z"))
.toInstant();
Map<String, Long> map = new HashMap<>();
map.put("element", 123L);
Map<String, Integer> multiSet = new HashMap<>();
multiSet.put("element", 2);
Map<String, Map<String, Integer>> nestedMap = new HashMap<>();
Map<String, Integer> innerMap = new HashMap<>();
innerMap.put("key", 234);
nestedMap.put("inner_map", innerMap);
ArrayNode doubleNode = OBJECT_MAPPER.createArrayNode().add(1.1D).add(2.2D).add(3.3D);
// Root
ObjectNode root = OBJECT_MAPPER.createObjectNode();
root.put("bool", true);
root.put("tinyint", tinyint);
root.put("smallint", smallint);
root.put("int", intValue);
root.put("bigint", bigint);
root.put("float", floatValue);
root.put("name", name);
root.put("bytes", bytes);
root.put("decimal", decimal);
root.set("doubles", doubleNode);
root.put("date", "1990-10-14");
root.put("time", "12:12:43");
root.put("timestamp3", "1990-10-14T12:12:43.123");
root.put("timestamp9", "1990-10-14T12:12:43.123456789");
root.put("timestampWithLocalZone", "1990-10-14T12:12:43.123456789Z");
root.putObject("map").put("element", 123);
root.putObject("multiSet").put("element", 2);
root.putObject("map2map").putObject("inner_map").put("key", 234);
byte[] serializedJson = OBJECT_MAPPER.writeValueAsBytes(root);
DataType dataType =
ROW(
FIELD("bool", BOOLEAN()),
FIELD("tinyint", TINYINT()),
FIELD("smallint", SMALLINT()),
FIELD("int", INT()),
FIELD("bigint", BIGINT()),
FIELD("float", FLOAT()),
FIELD("name", STRING()),
FIELD("bytes", BYTES()),
FIELD("decimal", DECIMAL(9, 6)),
FIELD("doubles", ARRAY(DOUBLE())),
FIELD("date", DATE()),
FIELD("time", TIME(0)),
FIELD("timestamp3", TIMESTAMP(3)),
FIELD("timestamp9", TIMESTAMP(9)),
FIELD("timestampWithLocalZone", TIMESTAMP_WITH_LOCAL_TIME_ZONE(9)),
FIELD("map", MAP(STRING(), BIGINT())),
FIELD("multiSet", MULTISET(STRING())),
FIELD("map2map", MAP(STRING(), MAP(STRING(), INT()))));
RowType schema = (RowType) dataType.getLogicalType();
DeserializationSchema<RowData> deserializationSchema =
createDeserializationSchema(
isJsonParser, schema, false, false, TimestampFormat.ISO_8601);
open(deserializationSchema);
Row expected = new Row(18);
expected.setField(0, true);
expected.setField(1, tinyint);
expected.setField(2, smallint);
expected.setField(3, intValue);
expected.setField(4, bigint);
expected.setField(5, floatValue);
expected.setField(6, name);
expected.setField(7, bytes);
expected.setField(8, decimal);
expected.setField(9, doubles);
expected.setField(10, date);
expected.setField(11, time);
expected.setField(12, timestamp3.toLocalDateTime());
expected.setField(13, timestamp9.toLocalDateTime());
expected.setField(14, timestampWithLocalZone);
expected.setField(15, map);
expected.setField(16, multiSet);
expected.setField(17, nestedMap);
RowData rowData = deserializationSchema.deserialize(serializedJson);
Row actual = convertToExternal(rowData, dataType);
assertThat(actual).isEqualTo(expected);
// test serialization
JsonRowDataSerializationSchema serializationSchema =
new JsonRowDataSerializationSchema(
schema,
TimestampFormat.ISO_8601,
JsonFormatOptions.MapNullKeyMode.LITERAL,
"null",
true,
false);
open(serializationSchema);
byte[] actualBytes = serializationSchema.serialize(rowData);
assertThat(serializedJson).containsExactly(actualBytes);
}
@Test
public void testEmptyJsonArrayDeserialization() throws Exception {
DataType dataType = ROW(FIELD("f1", INT()), FIELD("f2", BOOLEAN()), FIELD("f3", STRING()));
RowType rowType = (RowType) dataType.getLogicalType();
ObjectMapper objectMapper = new ObjectMapper();
ArrayNode arrayNode = objectMapper.createArrayNode();
DeserializationSchema<RowData> deserializationSchema =
createDeserializationSchema(
isJsonParser, rowType, false, false, TimestampFormat.ISO_8601);
open(deserializationSchema);
List<RowData> result = new ArrayList<>();
Collector<RowData> collector = new ListCollector<>(result);
deserializationSchema.deserialize(objectMapper.writeValueAsBytes(arrayNode), collector);
assertThat(result).isEmpty();
}
@Test
public void testJsonArrayToMultiRecords() throws Exception {
DataType dataType = ROW(FIELD("f1", INT()), FIELD("f2", BOOLEAN()), FIELD("f3", STRING()));
RowType rowType = (RowType) dataType.getLogicalType();
ObjectMapper objectMapper = new ObjectMapper();
ObjectNode element1 = objectMapper.createObjectNode();
element1.put("f1", 1);
element1.put("f2", true);
element1.put("f3", "str");
ObjectNode element2 = objectMapper.createObjectNode();
element2.put("f1", 10);
element2.put("f2", false);
element2.put("f3", "newStr");
ArrayNode arrayNode = objectMapper.createArrayNode();
arrayNode.add(element1);
arrayNode.add(element2);
DeserializationSchema<RowData> deserializationSchema =
createDeserializationSchema(
isJsonParser, rowType, false, false, TimestampFormat.ISO_8601);
open(deserializationSchema);
// test serialization
JsonRowDataSerializationSchema serializationSchema =
new JsonRowDataSerializationSchema(
rowType,
TimestampFormat.ISO_8601,
JsonFormatOptions.MapNullKeyMode.LITERAL,
"null",
true,
false);
open(serializationSchema);
List<RowData> result = new ArrayList<>();
Collector<RowData> collector = new ListCollector<>(result);
deserializationSchema.deserialize(objectMapper.writeValueAsBytes(arrayNode), collector);
assertThat(result).hasSize(2);
byte[] result1 = serializationSchema.serialize(result.get(0));
byte[] result2 = serializationSchema.serialize(result.get(1));
assertThat(result1).isEqualTo(objectMapper.writeValueAsBytes(element1));
assertThat(result2).isEqualTo(objectMapper.writeValueAsBytes(element2));
}
/**
* Tests the deserialization slow path, e.g. convert into string and use {@link
* Double#parseDouble(String)}.
*/
@TestTemplate
void testSlowDeserialization() throws Exception {
Random random = new Random();
boolean bool = random.nextBoolean();
int integer = random.nextInt();
long bigint = random.nextLong();
double doubleValue = random.nextDouble();
float floatValue = random.nextFloat();
ObjectNode root = OBJECT_MAPPER.createObjectNode();
root.put("bool", String.valueOf(bool));
root.put("int", String.valueOf(integer));
root.put("bigint", String.valueOf(bigint));
root.put("double1", String.valueOf(doubleValue));
root.put("double2", new BigDecimal(doubleValue));
root.put("float1", String.valueOf(floatValue));
root.put("float2", new BigDecimal(floatValue));
byte[] serializedJson = OBJECT_MAPPER.writeValueAsBytes(root);
DataType dataType =
ROW(
FIELD("bool", BOOLEAN()),
FIELD("int", INT()),
FIELD("bigint", BIGINT()),
FIELD("double1", DOUBLE()),
FIELD("double2", DOUBLE()),
FIELD("float1", FLOAT()),
FIELD("float2", FLOAT()));
RowType rowType = (RowType) dataType.getLogicalType();
DeserializationSchema<RowData> deserializationSchema =
createDeserializationSchema(
isJsonParser, rowType, false, false, TimestampFormat.ISO_8601);
open(deserializationSchema);
Row expected = new Row(7);
expected.setField(0, bool);
expected.setField(1, integer);
expected.setField(2, bigint);
expected.setField(3, doubleValue);
expected.setField(4, doubleValue);
expected.setField(5, floatValue);
expected.setField(6, floatValue);
RowData rowData = deserializationSchema.deserialize(serializedJson);
Row actual = convertToExternal(rowData, dataType);
assertThat(actual).isEqualTo(expected);
}
@TestTemplate
void testSerDeMultiRows() throws Exception {
RowType rowType =
(RowType)
ROW(
FIELD("f1", INT()),
FIELD("f2", BOOLEAN()),
FIELD("f3", STRING()),
FIELD("f4", MAP(STRING(), STRING())),
FIELD("f5", ARRAY(STRING())),
FIELD("f6", ROW(FIELD("f1", STRING()), FIELD("f2", INT()))))
.getLogicalType();
DeserializationSchema<RowData> deserializationSchema =
createDeserializationSchema(
isJsonParser, rowType, false, false, TimestampFormat.ISO_8601);
open(deserializationSchema);
JsonRowDataSerializationSchema serializationSchema =
new JsonRowDataSerializationSchema(
rowType,
TimestampFormat.ISO_8601,
JsonFormatOptions.MapNullKeyMode.LITERAL,
"null",
true,
false);
open(serializationSchema);
// the first row
{
ObjectNode root = OBJECT_MAPPER.createObjectNode();
root.put("f1", 1);
root.put("f2", true);
root.put("f3", "str");
ObjectNode map = root.putObject("f4");
map.put("hello1", "flink");
ArrayNode array = root.putArray("f5");
array.add("element1");
array.add("element2");
ObjectNode row = root.putObject("f6");
row.put("f1", "this is row1");
row.put("f2", 12);
byte[] serializedJson = OBJECT_MAPPER.writeValueAsBytes(root);
RowData rowData = deserializationSchema.deserialize(serializedJson);
byte[] actual = serializationSchema.serialize(rowData);
assertThat(serializedJson).containsExactly(actual);
}
// the second row
{
ObjectNode root = OBJECT_MAPPER.createObjectNode();
root.put("f1", 10);
root.put("f2", false);
root.put("f3", "newStr");
ObjectNode map = root.putObject("f4");
map.put("hello2", "json");
ArrayNode array = root.putArray("f5");
array.add("element3");
array.add("element4");
ObjectNode row = root.putObject("f6");
row.put("f1", "this is row2");
row.putNull("f2");
byte[] serializedJson = OBJECT_MAPPER.writeValueAsBytes(root);
RowData rowData = deserializationSchema.deserialize(serializedJson);
byte[] actual = serializationSchema.serialize(rowData);
assertThat(serializedJson).containsExactly(actual);
}
}
@TestTemplate
void testSerDeMultiRowsWithNullValues() throws Exception {
String[] jsons =
new String[] {
"{\"svt\":\"2020-02-24T12:58:09.209+0800\",\"metrics\":{\"k1\":10.01,\"k2\":\"invalid\"}}",
"{\"svt\":\"2020-02-24T12:58:09.209+0800\", \"ops\":{\"id\":\"281708d0-4092-4c21-9233-931950b6eccf\"}, "
+ "\"ids\":[1, 2, 3]}",
"{\"svt\":\"2020-02-24T12:58:09.209+0800\",\"metrics\":{}}",
};
String[] expected =
new String[] {
"{\"svt\":\"2020-02-24T12:58:09.209+0800\",\"ops\":null,\"ids\":null,\"metrics\":{\"k1\":10.01,\"k2\":null}}",
"{\"svt\":\"2020-02-24T12:58:09.209+0800\",\"ops\":{\"id\":\"281708d0-4092-4c21-9233-931950b6eccf\"},"
+ "\"ids\":[1,2,3],\"metrics\":null}",
"{\"svt\":\"2020-02-24T12:58:09.209+0800\",\"ops\":null,\"ids\":null,\"metrics\":{}}",
};
RowType rowType =
(RowType)
ROW(
FIELD("svt", STRING()),
FIELD("ops", ROW(FIELD("id", STRING()))),
FIELD("ids", ARRAY(INT())),
FIELD("metrics", MAP(STRING(), DOUBLE())))
.getLogicalType();
DeserializationSchema<RowData> deserializationSchema =
createDeserializationSchema(
isJsonParser, rowType, false, true, TimestampFormat.ISO_8601);
open(deserializationSchema);
JsonRowDataSerializationSchema serializationSchema =
new JsonRowDataSerializationSchema(
rowType,
TimestampFormat.ISO_8601,
JsonFormatOptions.MapNullKeyMode.LITERAL,
"null",
true,
false);
open(serializationSchema);
for (int i = 0; i < jsons.length; i++) {
String json = jsons[i];
RowData row = deserializationSchema.deserialize(json.getBytes());
String result = new String(serializationSchema.serialize(row));
assertThat(result).isEqualTo(expected[i]);
}
}
@TestTemplate
void testDeserializationNullRow() throws Exception {
DataType dataType = ROW(FIELD("name", STRING()));
RowType schema = (RowType) dataType.getLogicalType();
DeserializationSchema<RowData> deserializationSchema =
createDeserializationSchema(
isJsonParser, schema, true, false, TimestampFormat.ISO_8601);
open(deserializationSchema);
assertThat(deserializationSchema.deserialize(null)).isNull();
}
@TestTemplate
void testDeserializationMissingNode() throws Exception {
DataType dataType = ROW(FIELD("name", STRING()));
RowType schema = (RowType) dataType.getLogicalType();
DeserializationSchema<RowData> deserializationSchema =
createDeserializationSchema(
isJsonParser, schema, true, false, TimestampFormat.ISO_8601);
open(deserializationSchema);
RowData rowData = deserializationSchema.deserialize("".getBytes());
assertThat(rowData).isNull();
}
@TestTemplate
void testDeserializationMissingField() throws Exception {
// Root
ObjectNode root = OBJECT_MAPPER.createObjectNode();
root.put("id", 123123123);
byte[] serializedJson = OBJECT_MAPPER.writeValueAsBytes(root);
DataType dataType = ROW(FIELD("name", STRING()));
RowType schema = (RowType) dataType.getLogicalType();
// pass on missing field
DeserializationSchema<RowData> deserializationSchema =
createDeserializationSchema(
isJsonParser, schema, false, false, TimestampFormat.ISO_8601);
open(deserializationSchema);
Row expected = new Row(1);
Row actual = convertToExternal(deserializationSchema.deserialize(serializedJson), dataType);
assertThat(actual).isEqualTo(expected);
// fail on missing field
deserializationSchema =
createDeserializationSchema(
isJsonParser, schema, true, false, TimestampFormat.ISO_8601);
open(deserializationSchema);
String errorMessage = "Failed to deserialize JSON '{\"id\":123123123}'.";
DeserializationSchema<RowData> finalDeserializationSchema = deserializationSchema;
assertThatThrownBy(() -> finalDeserializationSchema.deserialize(serializedJson))
.hasMessage(errorMessage);
// ignore on parse error
deserializationSchema =
createDeserializationSchema(
isJsonParser, schema, false, true, TimestampFormat.ISO_8601);
open(deserializationSchema);
actual = convertToExternal(deserializationSchema.deserialize(serializedJson), dataType);
assertThat(actual).isEqualTo(expected);
errorMessage =
"JSON format doesn't support failOnMissingField and ignoreParseErrors are both enabled.";
assertThatThrownBy(
() ->
new JsonRowDataDeserializationSchema(
schema,
InternalTypeInfo.of(schema),
true,
true,
TimestampFormat.ISO_8601))
.hasMessage(errorMessage);
}
@TestTemplate
void testSerDeSQLTimestampFormat() throws Exception {
RowType rowType =
(RowType)
ROW(
FIELD("timestamp3", TIMESTAMP(3)),
FIELD("timestamp9", TIMESTAMP(9)),
FIELD(
"timestamp_with_local_timezone3",
TIMESTAMP_WITH_LOCAL_TIME_ZONE(3)),
FIELD(
"timestamp_with_local_timezone9",
TIMESTAMP_WITH_LOCAL_TIME_ZONE(9)))
.getLogicalType();
DeserializationSchema<RowData> deserializationSchema =
createDeserializationSchema(
isJsonParser, rowType, false, false, TimestampFormat.SQL);
open(deserializationSchema);
JsonRowDataSerializationSchema serializationSchema =
new JsonRowDataSerializationSchema(
rowType,
TimestampFormat.SQL,
JsonFormatOptions.MapNullKeyMode.LITERAL,
"null",
true,
false);
open(serializationSchema);
ObjectNode root = OBJECT_MAPPER.createObjectNode();
root.put("timestamp3", "1990-10-14 12:12:43.123");
root.put("timestamp9", "1990-10-14 12:12:43.123456789");
root.put("timestamp_with_local_timezone3", "1990-10-14 12:12:43.123Z");
root.put("timestamp_with_local_timezone9", "1990-10-14 12:12:43.123456789Z");
byte[] serializedJson = OBJECT_MAPPER.writeValueAsBytes(root);
RowData rowData = deserializationSchema.deserialize(serializedJson);
byte[] actual = serializationSchema.serialize(rowData);
assertThat(serializedJson).containsExactly(actual);
}
@Test
void testSerializationMapNullKey() {
RowType rowType =
(RowType)
ROW(FIELD("nestedMap", MAP(STRING(), MAP(STRING(), INT()))))
.getLogicalType();
// test data
// use LinkedHashMap to make sure entries order
Map<StringData, Integer> map = new LinkedHashMap<>();
map.put(StringData.fromString("no-null key"), 1);
map.put(StringData.fromString(null), 2);
GenericMapData mapData = new GenericMapData(map);
Map<StringData, GenericMapData> nestedMap = new LinkedHashMap<>();
nestedMap.put(StringData.fromString("no-null key"), mapData);
nestedMap.put(StringData.fromString(null), mapData);
GenericMapData nestedMapData = new GenericMapData(nestedMap);
GenericRowData rowData = new GenericRowData(1);
rowData.setField(0, nestedMapData);
JsonRowDataSerializationSchema serializationSchema1 =
new JsonRowDataSerializationSchema(
rowType,
TimestampFormat.SQL,
JsonFormatOptions.MapNullKeyMode.FAIL,
"null",
true,
false);
open(serializationSchema1);
// expect message for serializationSchema1
String errorMessage1 =
"JSON format doesn't support to serialize map data with null keys."
+ " You can drop null key entries or encode null in literals by specifying map-null-key.mode option.";
JsonRowDataSerializationSchema serializationSchema2 =
new JsonRowDataSerializationSchema(
rowType,
TimestampFormat.SQL,
JsonFormatOptions.MapNullKeyMode.DROP,
"null",
true,
false);
open(serializationSchema2);
// expect result for serializationSchema2
String expectResult2 = "{\"nestedMap\":{\"no-null key\":{\"no-null key\":1}}}";
JsonRowDataSerializationSchema serializationSchema3 =
new JsonRowDataSerializationSchema(
rowType,
TimestampFormat.SQL,
JsonFormatOptions.MapNullKeyMode.LITERAL,
"nullKey",
true,
false);
open(serializationSchema3);
// expect result for serializationSchema3
String expectResult3 =
"{\"nestedMap\":{\"no-null key\":{\"no-null key\":1,\"nullKey\":2},\"nullKey\":{\"no-null key\":1,\"nullKey\":2}}}";
assertThatThrownBy(() -> serializationSchema1.serialize(rowData))
.satisfies(FlinkAssertions.anyCauseMatches(errorMessage1));
// mapNullKey Mode is drop
byte[] actual2 = serializationSchema2.serialize(rowData);
assertThat(new String(actual2)).isEqualTo(expectResult2);
// mapNullKey Mode is literal
byte[] actual3 = serializationSchema3.serialize(rowData);
assertThat(new String(actual3)).isEqualTo(expectResult3);
}
@TestTemplate
void testSerializationDecimalEncode() throws Exception {
RowType schema =
(RowType)
ROW(
FIELD("decimal1", DECIMAL(9, 6)),
FIELD("decimal2", DECIMAL(20, 0)),
FIELD("decimal3", DECIMAL(11, 9)))
.getLogicalType();
DeserializationSchema<RowData> deserializer =
createDeserializationSchema(
isJsonParser, schema, false, false, TimestampFormat.ISO_8601);
deserializer.open(new DummyInitializationContext());
JsonRowDataSerializationSchema plainDecimalSerializer =
new JsonRowDataSerializationSchema(
schema,
TimestampFormat.ISO_8601,
JsonFormatOptions.MapNullKeyMode.LITERAL,
"null",
true,
false);
plainDecimalSerializer.open(new DummyInitializationContext());
JsonRowDataSerializationSchema scientificDecimalSerializer =
new JsonRowDataSerializationSchema(
schema,
TimestampFormat.ISO_8601,
JsonFormatOptions.MapNullKeyMode.LITERAL,
"null",
false,
false);
scientificDecimalSerializer.open(new DummyInitializationContext());
String plainDecimalJson =
"{\"decimal1\":123.456789,\"decimal2\":454621864049246170,\"decimal3\":0.000000027}";
RowData rowData = deserializer.deserialize(plainDecimalJson.getBytes());
String plainDecimalResult = new String(plainDecimalSerializer.serialize(rowData));
assertThat(plainDecimalResult).isEqualTo(plainDecimalJson);
String scientificDecimalJson =
"{\"decimal1\":123.456789,\"decimal2\":4.5462186404924617E+17,\"decimal3\":2.7E-8}";
String scientificDecimalResult = new String(scientificDecimalSerializer.serialize(rowData));
assertThat(scientificDecimalResult).isEqualTo(scientificDecimalJson);
}
@TestTemplate
void testSerDeMultiRowsWithNullValuesIgnored() throws Exception {
String[] jsons =
new String[] {
"{\"ops\":null,\"ids\":null,\"metrics\":{\"k1\":10.01,\"k2\":null}}",
"{\"ops\":{\"id\":\"281708d0-4092-4c21-9233-931950b6eccf\", \"svt\":\"2020-02-24T12:58:09.209+0800\"}, "
+ "\"ids\":[1, 2, 3]}",
"{\"ops\":{\"id\":null, \"svt\":\"2020-02-24T12:58:09.209+0800\"}, "
+ "\"ids\":[1, 2, null]}",
"{\"ops\":{},\"ids\":[],\"metrics\":{}}",
};
String[] expected =
new String[] {
"{\"metrics\":{\"k1\":10.01,\"k2\":null}}",
"{\"ops\":{\"id\":\"281708d0-4092-4c21-9233-931950b6eccf\",\"svt\":\"2020-02-24T12:58:09.209+0800\"},"
+ "\"ids\":[1,2,3]}",
"{\"ops\":{\"svt\":\"2020-02-24T12:58:09.209+0800\"},\"ids\":[1,2,null]}",
"{\"ops\":{},\"ids\":[],\"metrics\":{}}",
};
RowType rowType =
(RowType)
ROW(
FIELD(
"ops",
ROW(FIELD("id", STRING()), FIELD("svt", STRING()))),
FIELD("ids", ARRAY(INT())),
FIELD("metrics", MAP(STRING(), DOUBLE())))
.getLogicalType();
JsonRowDataDeserializationSchema deserializationSchema =
new JsonRowDataDeserializationSchema(
rowType,
InternalTypeInfo.of(rowType),
false,
true,
TimestampFormat.ISO_8601);
open(deserializationSchema);
JsonRowDataSerializationSchema serializationSchema =
new JsonRowDataSerializationSchema(
rowType,
TimestampFormat.ISO_8601,
JsonFormatOptions.MapNullKeyMode.LITERAL,
"null",
false,
true);
open(serializationSchema);
for (int i = 0; i < jsons.length; i++) {
String json = jsons[i];
RowData row = deserializationSchema.deserialize(json.getBytes());
String result = new String(serializationSchema.serialize(row));
assertThat(result).isEqualTo(expected[i]);
}
}
@TestTemplate
void testJsonParse() throws Exception {
for (TestSpec spec : testData) {
testIgnoreParseErrors(spec);
if (spec.errorMessage != null) {
testParseErrors(spec);
}
}
}
@Test
void testSerializationWithTypesMismatch() {
RowType rowType = (RowType) ROW(FIELD("f0", INT()), FIELD("f1", STRING())).getLogicalType();
GenericRowData genericRowData = new GenericRowData(2);
genericRowData.setField(0, 1);
genericRowData.setField(1, 1);
JsonRowDataSerializationSchema serializationSchema =
new JsonRowDataSerializationSchema(
rowType,
TimestampFormat.SQL,
JsonFormatOptions.MapNullKeyMode.FAIL,
"null",
true,
false);
open(serializationSchema);
String errorMessage = "Fail to serialize at field: f1.";
assertThatThrownBy(() -> serializationSchema.serialize(genericRowData))
.satisfies(anyCauseMatches(RuntimeException.class, errorMessage));
}
@TestTemplate
void testDeserializationWithTypesMismatch() {
RowType rowType = (RowType) ROW(FIELD("f0", STRING()), FIELD("f1", INT())).getLogicalType();
String json = "{\"f0\":\"abc\", \"f1\": \"abc\"}";
DeserializationSchema<RowData> deserializationSchema =
createDeserializationSchema(
isJsonParser, rowType, false, false, TimestampFormat.SQL);
open(deserializationSchema);
String errorMessage = "Fail to deserialize at field: f1.";
assertThatThrownBy(() -> deserializationSchema.deserialize(json.getBytes()))
.satisfies(anyCauseMatches(errorMessage));
}
private void testIgnoreParseErrors(TestSpec spec) throws Exception {
// the parsing field should be null and no exception is thrown
DeserializationSchema<RowData> ignoreErrorsSchema =
createDeserializationSchema(
isJsonParser, spec.rowType, false, true, spec.timestampFormat);
ignoreErrorsSchema.open(new DummyInitializationContext());
Row expected;
if (spec.expected != null) {
expected = spec.expected;
} else {
expected = new Row(1);
}
RowData rowData = ignoreErrorsSchema.deserialize(spec.json.getBytes());
Row actual = convertToExternal(rowData, fromLogicalToDataType(spec.rowType));
assertThat(actual)
.isEqualTo(expected)
.withFailMessage("Test Ignore Parse Error: " + spec.json);
}
private void testParseErrors(TestSpec spec) {
// expect exception if parse error is not ignored
DeserializationSchema<RowData> failingSchema =
createDeserializationSchema(
isJsonParser, spec.rowType, false, false, spec.timestampFormat);
open(failingSchema);
assertThatThrownBy(() -> failingSchema.deserialize(spec.json.getBytes()))
.hasMessageContaining(spec.errorMessage);
}
private static List<TestSpec> testData =
Arrays.asList(
TestSpec.json("{\"id\": \"trueA\"}")
.rowType(ROW(FIELD("id", BOOLEAN())))
.expect(Row.of(false)),
TestSpec.json("{\"id\": true}")
.rowType(ROW(FIELD("id", BOOLEAN())))
.expect(Row.of(true)),
TestSpec.json("{\"id\":\"abc\"}")
.rowType(ROW(FIELD("id", INT())))
.expectErrorMessage("Failed to deserialize JSON '{\"id\":\"abc\"}'."),
TestSpec.json("{\"id\":11211111111.013}")
.rowType(ROW(FIELD("id", INT())))
.expect(null),
TestSpec.json("{\"id\":112.013}")
.rowType(ROW(FIELD("id", INT())))
.expect(Row.of(112)),
TestSpec.json("{\"id\":112.013}")
.rowType(ROW(FIELD("id", BIGINT())))
.expect(Row.of(112L)),
TestSpec.json("{\"id\":\"long\"}")
.rowType(ROW(FIELD("id", BIGINT())))
.expectErrorMessage("Failed to deserialize JSON '{\"id\":\"long\"}'."),
TestSpec.json("{\"id\":\"112.013.123\"}")
.rowType(ROW(FIELD("id", FLOAT())))
.expectErrorMessage(
"Failed to deserialize JSON '{\"id\":\"112.013.123\"}'."),
TestSpec.json("{\"id\":\"112.013.123\"}")
.rowType(ROW(FIELD("id", DOUBLE())))
.expectErrorMessage(
"Failed to deserialize JSON '{\"id\":\"112.013.123\"}'."),
TestSpec.json("{\"id\":\"18:00:243\"}")
.rowType(ROW(FIELD("id", TIME())))
.expectErrorMessage(
"Failed to deserialize JSON '{\"id\":\"18:00:243\"}'."),
TestSpec.json("{\"id\":\"18:00:243\"}")
.rowType(ROW(FIELD("id", TIME())))
.expectErrorMessage(
"Failed to deserialize JSON '{\"id\":\"18:00:243\"}'."),
TestSpec.json("{\"id\":\"20191112\"}")
.rowType(ROW(FIELD("id", DATE())))
.expectErrorMessage(
"Failed to deserialize JSON '{\"id\":\"20191112\"}'."),
TestSpec.json("{\"id\":\"20191112\"}")
.rowType(ROW(FIELD("id", DATE())))
.expectErrorMessage(
"Failed to deserialize JSON '{\"id\":\"20191112\"}'."),
TestSpec.json("{\"id\":true}")
.rowType(ROW(FIELD("id", STRING())))
.expect(Row.of("true")),
TestSpec.json("{\"id\":123.234}")
.rowType(ROW(FIELD("id", STRING())))
.expect(Row.of("123.234")),
TestSpec.json("{\"id\":1234567}")
.rowType(ROW(FIELD("id", STRING())))
.expect(Row.of("1234567")),
TestSpec.json("{\"id\":\"string field\"}")
.rowType(ROW(FIELD("id", STRING())))
.expect(Row.of("string field")),
TestSpec.json("{\"id\":[\"array data1\",\"array data2\",123,234.345]}")
.rowType(ROW(FIELD("id", STRING())))
.expect(Row.of("[\"array data1\",\"array data2\",123,234.345]")),
TestSpec.json("{\"id\":{\"k1\":123,\"k2\":234.234,\"k3\":\"string data\"}}")
.rowType(ROW(FIELD("id", STRING())))
.expect(Row.of("{\"k1\":123,\"k2\":234.234,\"k3\":\"string data\"}")),
TestSpec.json("{\"id\":\"2019-11-12 18:00:12\"}")
.rowType(ROW(FIELD("id", TIMESTAMP(0))))
.timestampFormat(TimestampFormat.ISO_8601)
.expectErrorMessage(
"Failed to deserialize JSON '{\"id\":\"2019-11-12 18:00:12\"}'."),
TestSpec.json("{\"id\":\"2019-11-12T18:00:12\"}")
.rowType(ROW(FIELD("id", TIMESTAMP(0))))
.expectErrorMessage(
"Failed to deserialize JSON '{\"id\":\"2019-11-12T18:00:12\"}'."),
TestSpec.json("{\"id\":\"2019-11-12T18:00:12Z\"}")
.rowType(ROW(FIELD("id", TIMESTAMP(0))))
.expectErrorMessage(
"Failed to deserialize JSON '{\"id\":\"2019-11-12T18:00:12Z\"}'."),
TestSpec.json("{\"id\":\"2019-11-12T18:00:12Z\"}")
.rowType(ROW(FIELD("id", TIMESTAMP(0))))
.timestampFormat(TimestampFormat.ISO_8601)
.expectErrorMessage(
"Failed to deserialize JSON '{\"id\":\"2019-11-12T18:00:12Z\"}'."),
TestSpec.json("{\"id\":\"abc\"}")
.rowType(ROW(FIELD("id", DECIMAL(10, 3))))
.expectErrorMessage("Failed to deserialize JSON '{\"id\":\"abc\"}'."),
TestSpec.json("{\"row\":{\"id\":\"abc\"}}")
.rowType(ROW(FIELD("row", ROW(FIELD("id", BOOLEAN())))))
.expect(Row.of(Row.of(false))),
TestSpec.json("{\"array\":[123, \"abc\"]}")
.rowType(ROW(FIELD("array", ARRAY(INT()))))
.expect(Row.of((Object) new Integer[] {123, null}))
.expectErrorMessage(
"Failed to deserialize JSON '{\"array\":[123, \"abc\"]}'."),
TestSpec.json("{\"map\":{\"key1\":\"123\", \"key2\":\"abc\"}}")
.rowType(ROW(FIELD("map", MAP(STRING(), INT()))))
.expect(Row.of(createHashMap("key1", 123, "key2", null)))
.expectErrorMessage(
"Failed to deserialize JSON '{\"map\":{\"key1\":\"123\", \"key2\":\"abc\"}}'."),
TestSpec.json("{\"id\":\"2019-11-12T18:00:12\"}")
.rowType(ROW(FIELD("id", TIMESTAMP_WITH_LOCAL_TIME_ZONE(0))))
.expectErrorMessage(
"Failed to deserialize JSON '{\"id\":\"2019-11-12T18:00:12\"}'."),
TestSpec.json("{\"id\":\"2019-11-12T18:00:12+0800\"}")
.rowType(ROW(FIELD("id", TIMESTAMP_WITH_LOCAL_TIME_ZONE(0))))
.expectErrorMessage(
"Failed to deserialize JSON '{\"id\":\"2019-11-12T18:00:12+0800\"}'."),
TestSpec.json("{\"id\":1,\"factor\":799.929496989092949698}")
.rowType(ROW(FIELD("id", INT()), FIELD("factor", DECIMAL(38, 18))))
.expect(Row.of(1, new BigDecimal("799.929496989092949698"))),
TestSpec.json("{\"id\":\"\tstring field\"}") // test to parse control chars
.rowType(ROW(FIELD("id", STRING())))
.expect(Row.of("\tstring field")));
private static Map<String, Integer> createHashMap(
String k1, Integer v1, String k2, Integer v2) {
Map<String, Integer> map = new HashMap<>();
map.put(k1, v1);
map.put(k2, v2);
return map;
}
@SuppressWarnings("unchecked")
static Row convertToExternal(RowData rowData, DataType dataType) {
return (Row) DataFormatConverters.getConverterForDataType(dataType).toExternal(rowData);
}
private DeserializationSchema<RowData> createDeserializationSchema(
boolean isJsonParser,
RowType rowType,
boolean failOnMissingField,
boolean ignoreParseErrors,
TimestampFormat timestampFormat) {
if (isJsonParser) {
return new JsonParserRowDataDeserializationSchema(
rowType,
InternalTypeInfo.of(rowType),
failOnMissingField,
ignoreParseErrors,
timestampFormat);
} else {
return new JsonRowDataDeserializationSchema(
rowType,
InternalTypeInfo.of(rowType),
failOnMissingField,
ignoreParseErrors,
timestampFormat);
}
}
private static
|
JsonRowDataSerDeSchemaTest
|
java
|
hibernate__hibernate-orm
|
hibernate-testing/src/main/java/org/hibernate/testing/orm/junit/ServiceRegistryProducer.java
|
{
"start": 582,
"end": 747
}
|
interface ____ {
default StandardServiceRegistry produceServiceRegistry(StandardServiceRegistryBuilder builder) {
return builder.build();
}
}
|
ServiceRegistryProducer
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/index/IndexingPressureMonitor.java
|
{
"start": 736,
"end": 1379
}
|
interface ____ {
/**
* Returns the maximum allowed size in bytes for any single indexing operation.
* Operations exceeding this limit may be rejected.
*
* @return the maximum allowed operation size in bytes
*/
long getMaxAllowedOperationSizeInBytes();
/**
* Registers a listener to be notified of indexing pressure events.
* The listener will receive callbacks when operations are tracked or rejected.
*
* @param listener the listener to register for indexing pressure events
*/
void addListener(IndexingPressureListener listener);
/**
* Listener
|
IndexingPressureMonitor
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/common/requests/ConsumerGroupDescribeRequest.java
|
{
"start": 1284,
"end": 3639
}
|
class ____ extends AbstractRequest.Builder<ConsumerGroupDescribeRequest> {
private final ConsumerGroupDescribeRequestData data;
public Builder(ConsumerGroupDescribeRequestData data) {
this(data, false);
}
public Builder(ConsumerGroupDescribeRequestData data, boolean enableUnstableLastVersion) {
super(ApiKeys.CONSUMER_GROUP_DESCRIBE, enableUnstableLastVersion);
this.data = data;
}
@Override
public ConsumerGroupDescribeRequest build(short version) {
return new ConsumerGroupDescribeRequest(data, version);
}
@Override
public String toString() {
return data.toString();
}
}
private final ConsumerGroupDescribeRequestData data;
public ConsumerGroupDescribeRequest(ConsumerGroupDescribeRequestData data, short version) {
super(ApiKeys.CONSUMER_GROUP_DESCRIBE, version);
this.data = data;
}
@Override
public ConsumerGroupDescribeResponse getErrorResponse(int throttleTimeMs, Throwable e) {
ConsumerGroupDescribeResponseData data = new ConsumerGroupDescribeResponseData()
.setThrottleTimeMs(throttleTimeMs);
// Set error for each group
this.data.groupIds().forEach(
groupId -> data.groups().add(
new ConsumerGroupDescribeResponseData.DescribedGroup()
.setGroupId(groupId)
.setErrorCode(Errors.forException(e).code())
)
);
return new ConsumerGroupDescribeResponse(data);
}
@Override
public ConsumerGroupDescribeRequestData data() {
return data;
}
public static ConsumerGroupDescribeRequest parse(Readable readable, short version) {
return new ConsumerGroupDescribeRequest(
new ConsumerGroupDescribeRequestData(readable, version),
version
);
}
public static List<ConsumerGroupDescribeResponseData.DescribedGroup> getErrorDescribedGroupList(
List<String> groupIds,
Errors error
) {
return groupIds.stream()
.map(groupId -> new ConsumerGroupDescribeResponseData.DescribedGroup()
.setGroupId(groupId)
.setErrorCode(error.code())
).collect(Collectors.toList());
}
}
|
Builder
|
java
|
apache__flink
|
flink-metrics/flink-metrics-core/src/test/java/org/apache/flink/metrics/util/MetricReporterTestUtils.java
|
{
"start": 1144,
"end": 1246
}
|
class ____ {
/**
* Verifies that the given {@link MetricReporterFactory}
|
MetricReporterTestUtils
|
java
|
spring-projects__spring-framework
|
spring-jms/src/test/java/org/springframework/jms/config/JmsNamespaceHandlerTests.java
|
{
"start": 17880,
"end": 18202
}
|
class ____ implements MessageListener {
public Message message;
@Override
public void onMessage(Message message) {
this.message = message;
}
}
/**
* Internal extension that registers a {@link ReaderEventListener} to store
* registered {@link ComponentDefinition}s.
*/
private static
|
TestMessageListener
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/AbstractCGroupsCpuResourceHandler.java
|
{
"start": 1940,
"end": 8448
}
|
class ____ implements CpuResourceHandler {
static final Logger LOG =
LoggerFactory.getLogger(AbstractCGroupsCpuResourceHandler.class);
protected CGroupsHandler cGroupsHandler;
private boolean strictResourceUsageMode = false;
private float yarnProcessors;
private int nodeVCores;
private static final CGroupsHandler.CGroupController CPU =
CGroupsHandler.CGroupController.CPU;
@VisibleForTesting
static final int MAX_QUOTA_US = 1000 * 1000;
@VisibleForTesting
static final int MIN_PERIOD_US = 1000;
AbstractCGroupsCpuResourceHandler(CGroupsHandler cGroupsHandler) {
this.cGroupsHandler = cGroupsHandler;
}
@Override
public List<PrivilegedOperation> bootstrap(Configuration conf)
throws ResourceHandlerException {
return bootstrap(
ResourceCalculatorPlugin.getResourceCalculatorPlugin(null, conf), conf);
}
@VisibleForTesting
List<PrivilegedOperation> bootstrap(
ResourceCalculatorPlugin plugin, Configuration conf)
throws ResourceHandlerException {
this.strictResourceUsageMode = conf.getBoolean(
YarnConfiguration.NM_LINUX_CONTAINER_CGROUPS_STRICT_RESOURCE_USAGE,
YarnConfiguration.DEFAULT_NM_LINUX_CONTAINER_CGROUPS_STRICT_RESOURCE_USAGE);
this.cGroupsHandler.initializeCGroupController(CPU);
nodeVCores = NodeManagerHardwareUtils.getVCores(plugin, conf);
// cap overall usage to the number of cores allocated to YARN
yarnProcessors = NodeManagerHardwareUtils.getContainersCPUs(plugin, conf);
int systemProcessors = NodeManagerHardwareUtils.getNodeCPUs(plugin, conf);
boolean existingCpuLimits;
existingCpuLimits = cpuLimitExists(
cGroupsHandler.getPathForCGroup(CPU, ""));
if (systemProcessors != (int) yarnProcessors) {
LOG.info("YARN containers restricted to " + yarnProcessors + " cores");
int[] limits = getOverallLimits(yarnProcessors);
updateCgroupMaxCpuLimit("", String.valueOf(limits[1]), String.valueOf(limits[0]));
} else if (existingCpuLimits) {
LOG.info("Removing CPU constraints for YARN containers.");
updateCgroupMaxCpuLimit("", String.valueOf(-1), null);
}
return null;
}
protected abstract void updateCgroupMaxCpuLimit(String cgroupId, String quota, String period)
throws ResourceHandlerException;
protected abstract boolean cpuLimitExists(String path) throws ResourceHandlerException;
@VisibleForTesting
@InterfaceAudience.Private
public static int[] getOverallLimits(float yarnProcessors) {
int[] ret = new int[2];
if (yarnProcessors < 0.01f) {
throw new IllegalArgumentException("Number of processors can't be <= 0.");
}
int quotaUS = MAX_QUOTA_US;
int periodUS = (int) (MAX_QUOTA_US / yarnProcessors);
if (yarnProcessors < 1.0f) {
periodUS = MAX_QUOTA_US;
quotaUS = (int) (periodUS * yarnProcessors);
if (quotaUS < MIN_PERIOD_US) {
LOG.warn("The quota calculated for the cgroup was too low."
+ " The minimum value is " + MIN_PERIOD_US
+ ", calculated value is " + quotaUS
+ ". Setting quota to minimum value.");
quotaUS = MIN_PERIOD_US;
}
}
// cfs_period_us can't be less than 1000 microseconds
// if the value of periodUS is less than 1000, we can't really use cgroups
// to limit cpu
if (periodUS < MIN_PERIOD_US) {
LOG.warn("The period calculated for the cgroup was too low."
+ " The minimum value is " + MIN_PERIOD_US
+ ", calculated value is " + periodUS
+ ". Using all available CPU.");
periodUS = MAX_QUOTA_US;
quotaUS = -1;
}
ret[0] = periodUS;
ret[1] = quotaUS;
return ret;
}
@Override
public List<PrivilegedOperation> preStart(Container container)
throws ResourceHandlerException {
String cgroupId = container.getContainerId().toString();
cGroupsHandler.createCGroup(CPU, cgroupId);
updateContainer(container);
List<PrivilegedOperation> ret = new ArrayList<>();
ret.add(new PrivilegedOperation(
PrivilegedOperation.OperationType.ADD_PID_TO_CGROUP,
PrivilegedOperation.CGROUP_ARG_PREFIX + cGroupsHandler
.getPathForCGroupTasks(CPU, cgroupId)));
return ret;
}
@Override
public List<PrivilegedOperation> reacquireContainer(ContainerId containerId)
throws ResourceHandlerException {
return null;
}
@Override
public List<PrivilegedOperation> updateContainer(Container container)
throws ResourceHandlerException {
Resource containerResource = container.getResource();
String cgroupId = container.getContainerId().toString();
File cgroup = new File(cGroupsHandler.getPathForCGroup(CPU, cgroupId));
if (cgroup.exists()) {
try {
int containerVCores = containerResource.getVirtualCores();
ContainerTokenIdentifier id = container.getContainerTokenIdentifier();
if (id != null && id.getExecutionType() ==
ExecutionType.OPPORTUNISTIC) {
updateCgroupCpuWeight(cgroupId, getOpportunisticCpuWeight());
} else {
updateCgroupCpuWeight(cgroupId, getCpuWeightByContainerVcores(containerVCores));
}
if (strictResourceUsageMode) {
if (nodeVCores != containerVCores) {
float containerCPU =
(containerVCores * yarnProcessors) / (float) nodeVCores;
int[] limits = getOverallLimits(containerCPU);
updateCgroupMaxCpuLimit(cgroupId, String.valueOf(limits[1]), String.valueOf(limits[0]));
}
}
} catch (ResourceHandlerException re) {
cGroupsHandler.deleteCGroup(CPU, cgroupId);
LOG.warn("Could not update cgroup for container", re);
throw re;
}
}
return null;
}
protected abstract int getOpportunisticCpuWeight();
protected abstract int getCpuWeightByContainerVcores(int containerVcores);
protected abstract void updateCgroupCpuWeight(String cgroupId, int weight)
throws ResourceHandlerException;
@Override
public List<PrivilegedOperation> postComplete(ContainerId containerId)
throws ResourceHandlerException {
cGroupsHandler.deleteCGroup(CPU, containerId.toString());
return null;
}
@Override public List<PrivilegedOperation> teardown()
throws ResourceHandlerException {
return null;
}
@Override
public String toString() {
return AbstractCGroupsCpuResourceHandler.class.getName();
}
}
|
AbstractCGroupsCpuResourceHandler
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/aggregate/ValueAggregatorCombiner.java
|
{
"start": 1328,
"end": 2630
}
|
class ____<K1 extends WritableComparable<?>,
V1 extends Writable>
extends Reducer<Text, Text, Text, Text> {
/** Combines values for a given key.
* @param key the key is expected to be a Text object, whose prefix indicates
* the type of aggregation to aggregate the values.
* @param values the values to combine
* @param context to collect combined values
*/
public void reduce(Text key, Iterable<Text> values, Context context)
throws IOException, InterruptedException {
String keyStr = key.toString();
int pos = keyStr.indexOf(ValueAggregatorDescriptor.TYPE_SEPARATOR);
String type = keyStr.substring(0, pos);
long uniqCount = context.getConfiguration().
getLong(UniqValueCount.MAX_NUM_UNIQUE_VALUES, Long.MAX_VALUE);
ValueAggregator aggregator = ValueAggregatorBaseDescriptor
.generateValueAggregator(type, uniqCount);
for (Text val : values) {
aggregator.addNextValue(val);
}
Iterator<?> outputs = aggregator.getCombinerOutput().iterator();
while (outputs.hasNext()) {
Object v = outputs.next();
if (v instanceof Text) {
context.write(key, (Text)v);
} else {
context.write(key, new Text(v.toString()));
}
}
}
}
|
ValueAggregatorCombiner
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/async/AsyncTaskManagementServiceTests.java
|
{
"start": 2104,
"end": 2454
}
|
class ____ extends ESSingleNodeTestCase {
private ClusterService clusterService;
private TransportService transportService;
private AsyncResultsService<TestTask, StoredAsyncResponse<TestResponse>> results;
private final ExecutorService executorService = Executors.newFixedThreadPool(1);
public static
|
AsyncTaskManagementServiceTests
|
java
|
spring-projects__spring-framework
|
spring-websocket/src/main/java/org/springframework/web/socket/handler/WebSocketHandlerDecorator.java
|
{
"start": 1307,
"end": 3025
}
|
class ____ implements WebSocketHandler {
private final WebSocketHandler delegate;
public WebSocketHandlerDecorator(WebSocketHandler delegate) {
Assert.notNull(delegate, "Delegate must not be null");
this.delegate = delegate;
}
public WebSocketHandler getDelegate() {
return this.delegate;
}
public WebSocketHandler getLastHandler() {
WebSocketHandler result = this.delegate;
while (result instanceof WebSocketHandlerDecorator webSocketHandlerDecorator) {
result = webSocketHandlerDecorator.getDelegate();
}
return result;
}
public static WebSocketHandler unwrap(WebSocketHandler handler) {
if (handler instanceof WebSocketHandlerDecorator webSocketHandlerDecorator) {
return webSocketHandlerDecorator.getLastHandler();
}
else {
return handler;
}
}
@Override
public void afterConnectionEstablished(WebSocketSession session) throws Exception {
this.delegate.afterConnectionEstablished(session);
}
@Override
public void handleMessage(WebSocketSession session, WebSocketMessage<?> message) throws Exception {
this.delegate.handleMessage(session, message);
}
@Override
public void handleTransportError(WebSocketSession session, Throwable exception) throws Exception {
this.delegate.handleTransportError(session, exception);
}
@Override
public void afterConnectionClosed(WebSocketSession session, CloseStatus closeStatus) throws Exception {
this.delegate.afterConnectionClosed(session, closeStatus);
}
@Override
public boolean supportsPartialMessages() {
return this.delegate.supportsPartialMessages();
}
@Override
public String toString() {
return getClass().getSimpleName() + " [delegate=" + this.delegate + "]";
}
}
|
WebSocketHandlerDecorator
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/fkcircularity/FkCircularityTest.java
|
{
"start": 548,
"end": 1721
}
|
class ____ {
@Test
public void testJoinedSublcassesInPK() {
MetadataSources metadataSources = new MetadataSources( ServiceRegistryUtil.serviceRegistry() )
.addAnnotatedClass(A.class)
.addAnnotatedClass(B.class)
.addAnnotatedClass(C.class)
.addAnnotatedClass(D.class);
try {
metadataSources.buildMetadata();
}
finally {
ServiceRegistry metaServiceRegistry = metadataSources.getServiceRegistry();
if(metaServiceRegistry instanceof BootstrapServiceRegistry ) {
BootstrapServiceRegistryBuilder.destroy( metaServiceRegistry );
}
}
}
@Test
public void testDeepJoinedSuclassesHierachy() {
MetadataSources metadataSources = new MetadataSources( ServiceRegistryUtil.serviceRegistry() )
.addAnnotatedClass(ClassA.class)
.addAnnotatedClass(ClassB.class)
.addAnnotatedClass(ClassC.class)
.addAnnotatedClass(ClassD.class);
try {
metadataSources.buildMetadata();
}
finally {
ServiceRegistry metaServiceRegistry = metadataSources.getServiceRegistry();
if(metaServiceRegistry instanceof BootstrapServiceRegistry ) {
BootstrapServiceRegistryBuilder.destroy( metaServiceRegistry );
}
}
}
}
|
FkCircularityTest
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/UnnecessaryParenthesesTest.java
|
{
"start": 5819,
"end": 6109
}
|
class ____ {
void print(Integer i) {
(++i).toString();
}
}
""")
.doTest();
}
@Test
public void negativeStatements() {
helper
.addSourceLines(
"Test.java",
"""
|
Test
|
java
|
apache__logging-log4j2
|
log4j-api-test/src/test/java/org/apache/logging/log4j/message/ThreadDumpMessageTest.java
|
{
"start": 5086,
"end": 5804
}
|
class ____ extends Thread {
private final CountDownLatch started;
private final CountDownLatch keepAlive;
volatile boolean finished;
public ThreadWithCountDownLatch(final CountDownLatch started, final CountDownLatch keepAlive) {
super("ThreadWithCountDownLatch");
this.started = started;
this.keepAlive = keepAlive;
setDaemon(true);
}
@Override
public void run() {
started.countDown();
try {
keepAlive.await();
} catch (final InterruptedException e) {
// ignored
}
finished = true;
}
}
}
|
ThreadWithCountDownLatch
|
java
|
apache__dubbo
|
dubbo-config/dubbo-config-spring/src/test/java/org/apache/dubbo/config/spring/reference/javaconfig/JavaConfigReferenceBeanTest.java
|
{
"start": 17697,
"end": 17999
}
|
class ____ {
@DubboReference(group = "${myapp.group}")
private HelloService helloService;
@Bean(name = "helloServiceClient")
public HelloService helloService() {
return helloService;
}
}
@Configuration
public static
|
LazyProxyConfiguration1
|
java
|
FasterXML__jackson-core
|
src/test/java/tools/jackson/core/unittest/ErrorReportConfigurationMaxRawContentLengthTest.java
|
{
"start": 334,
"end": 2027
}
|
class ____
extends JacksonCoreTestBase
{
/*
/**********************************************************
/* Unit Tests
/**********************************************************
*/
public void testBasicToStringErrorConfig() throws Exception {
// Truncated result
_verifyToString("abc", 2,
"[Source: (String)\"ab\"[truncated 1 chars]; line: 1, column: 1]");
// Exact length
_verifyToString("abc", 3,
"[Source: (String)\"abc\"; line: 1, column: 1]");
// Enough length
_verifyToString("abc", 4,
"[Source: (String)\"abc\"; line: 1, column: 1]");
}
/*
/**********************************************************
/* Internal helper methods
/**********************************************************
*/
private void _verifyToString(String rawSrc, int rawContentLength, String expectedMessage) {
ContentReference reference = _sourceRefWithErrorReportConfig(rawSrc, rawContentLength);
String location = new TokenStreamLocation(reference, 10L, 10L, 1, 1).toString();
assertEquals(expectedMessage, location);
}
private ContentReference _sourceRefWithErrorReportConfig(String rawSrc, int rawContentLength) {
return _sourceRef(rawSrc,
ErrorReportConfiguration.builder().maxRawContentLength(rawContentLength).build());
}
private ContentReference _sourceRef(String rawSrc, ErrorReportConfiguration errorReportConfiguration) {
return ContentReference.construct(true, rawSrc, 0, rawSrc.length(),errorReportConfiguration);
}
}
|
ErrorReportConfigurationMaxRawContentLengthTest
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/Job20LineHistoryEventEmitter.java
|
{
"start": 1728,
"end": 2458
}
|
class ____ extends HistoryEventEmitter {
static List<SingleEventEmitter> nonFinals =
new LinkedList<SingleEventEmitter>();
static List<SingleEventEmitter> finals = new LinkedList<SingleEventEmitter>();
Long originalSubmitTime = null;
static {
nonFinals.add(new JobSubmittedEventEmitter());
nonFinals.add(new JobPriorityChangeEventEmitter());
nonFinals.add(new JobStatusChangedEventEmitter());
nonFinals.add(new JobInitedEventEmitter());
nonFinals.add(new JobInfoChangeEventEmitter());
finals.add(new JobUnsuccessfulCompletionEventEmitter());
finals.add(new JobFinishedEventEmitter());
}
Job20LineHistoryEventEmitter() {
super();
}
static private
|
Job20LineHistoryEventEmitter
|
java
|
spring-projects__spring-security
|
config/src/main/java/org/springframework/security/config/http/MatcherType.java
|
{
"start": 1447,
"end": 3006
}
|
enum ____ {
path(PathPatternRequestMatcher.class), regex(RegexRequestMatcher.class), ciRegex(RegexRequestMatcher.class);
private static final String ATT_MATCHER_TYPE = "request-matcher";
final Class<? extends RequestMatcher> type;
MatcherType(Class<? extends RequestMatcher> type) {
this.type = type;
}
public BeanDefinition createMatcher(ParserContext pc, String path, String method) {
return createMatcher(pc, path, method, null);
}
public BeanDefinition createMatcher(ParserContext pc, String path, String method, String servletPath) {
if (("/**".equals(path) || "**".equals(path)) && method == null) {
return new RootBeanDefinition(AnyRequestMatcher.class);
}
BeanDefinitionBuilder matcherBldr;
if (this == MatcherType.path) {
matcherBldr = BeanDefinitionBuilder.rootBeanDefinition(PathPatternRequestMatcherFactoryBean.class);
matcherBldr.addConstructorArgValue(path);
matcherBldr.addPropertyValue("basePath", servletPath);
}
else {
matcherBldr = BeanDefinitionBuilder.rootBeanDefinition(this.type);
matcherBldr.addConstructorArgValue(path);
}
matcherBldr.addConstructorArgValue(method);
if (this == ciRegex) {
matcherBldr.addConstructorArgValue(true);
}
return matcherBldr.getBeanDefinition();
}
static MatcherType fromElement(Element elt) {
if (StringUtils.hasText(elt.getAttribute(ATT_MATCHER_TYPE))) {
return valueOf(elt.getAttribute(ATT_MATCHER_TYPE));
}
return path;
}
static MatcherType fromElementOrMvc(Element elt) {
return MatcherType.fromElement(elt);
}
}
|
MatcherType
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/jmx/support/JmxUtilsTests.java
|
{
"start": 5236,
"end": 5810
}
|
class ____ extends StandardMBean implements IJmxTestBean {
public StandardMBeanImpl() throws NotCompliantMBeanException {
super(IJmxTestBean.class);
}
@Override
public int add(int x, int y) {
return 0;
}
@Override
public long myOperation() {
return 0;
}
@Override
public int getAge() {
return 0;
}
@Override
public void setAge(int age) {
}
@Override
public void setName(String name) {
}
@Override
public String getName() {
return null;
}
@Override
public void dontExposeMe() {
}
}
public
|
StandardMBeanImpl
|
java
|
apache__maven
|
its/core-it-support/core-it-plugins/maven-it-plugin-artifact/src/main/java/org/apache/maven/plugin/coreit/AbstractRepoMojo.java
|
{
"start": 1225,
"end": 2264
}
|
class ____ extends AbstractMojo {
/**
* The project's main artifact.
*/
@Parameter(defaultValue = "${project.artifact}", required = true, readonly = true)
protected Artifact mainArtifact;
/**
* The project's attached artifact.
*/
@Parameter(defaultValue = "${project.attachedArtifacts}", required = true, readonly = true)
protected Collection<Artifact> attachedArtifacts;
/**
* The packaging of the project.
*/
@Parameter(defaultValue = "${project.packaging}", required = true, readonly = true)
protected String packaging;
/**
* The POM file of the project.
*/
@Parameter(defaultValue = "${project.file}", required = true, readonly = true)
protected File pomFile;
/**
* The local repository.
*/
@Parameter(defaultValue = "${localRepository}", required = true, readonly = true)
protected ArtifactRepository localRepository;
protected boolean isPomArtifact() {
return "pom".equals(packaging);
}
}
|
AbstractRepoMojo
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationFieldType.java
|
{
"start": 331,
"end": 1017
}
|
enum ____ {
STRING("str"),
INTEGER("int"),
LIST("list"),
BOOLEAN("bool");
private final String value;
ConfigurationFieldType(String value) {
this.value = value;
}
@Override
public String toString() {
return this.value;
}
public static ConfigurationFieldType fieldType(String type) {
for (ConfigurationFieldType fieldType : ConfigurationFieldType.values()) {
if (fieldType.value.equals(type)) {
return fieldType;
}
}
throw new IllegalArgumentException("Unknown " + ConfigurationFieldType.class.getSimpleName() + " [" + type + "].");
}
}
|
ConfigurationFieldType
|
java
|
reactor__reactor-core
|
reactor-core/src/jcstress/java/reactor/core/publisher/SinkOneStressTest.java
|
{
"start": 2695,
"end": 3397
}
|
class ____ extends SinkOneStressTest {
final SinkOneMulticast<String> sink = new SinkOneMulticast<>();
final StressSubscriber<String> subscriber = new StressSubscriber<>();
@Actor
public void value() {
sink.tryEmitError(new RuntimeException("boo"));
sink.tryEmitEmpty();
sink.tryEmitValue("foo");
}
@Actor
public void subscriber() {
sink.asMono().subscribe(subscriber);
}
@Arbiter
public void arbiter(LLL_Result r) {
r.r1 = subscriber.onNextCalls.get() == 1 ? subscriber.receivedValues.get(0) : null;
r.r2 = subscriber.onCompleteCalls.get();
r.r3 = subscriber.onErrorCalls.get() == 1 ? subscriber.error.getMessage() : null;
}
}
}
|
EmitErrorSubscribeStressTest
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/Assertions_assertThat_with_ShortArray_Test.java
|
{
"start": 941,
"end": 1162
}
|
class ____ {
@Test
void should_create_Assert() {
AbstractShortArrayAssert<?> assertions = Assertions.assertThat(emptyArray());
assertThat(assertions).isNotNull();
}
}
|
Assertions_assertThat_with_ShortArray_Test
|
java
|
apache__thrift
|
lib/java/src/main/java/org/apache/thrift/protocol/TProtocol.java
|
{
"start": 16822,
"end": 18041
}
|
class ____ strongly encouraged to provide a more efficient
* alternative.
*/
protected void skipBool() throws TException {
this.readBool();
}
protected void skipByte() throws TException {
this.readByte();
}
protected void skipI16() throws TException {
this.readI16();
}
protected void skipI32() throws TException {
this.readI32();
}
protected void skipI64() throws TException {
this.readI64();
}
protected void skipDouble() throws TException {
this.readDouble();
}
protected void skipBinary() throws TException {
this.readBinary();
}
static final int MAX_SKIPPED_BYTES = 256;
protected byte[] skippedBytes = new byte[MAX_SKIPPED_BYTES];
protected void skipBytes(int numBytes) throws TException {
if (numBytes <= MAX_SKIPPED_BYTES) {
if (this.getTransport().getBytesRemainingInBuffer() >= numBytes) {
this.getTransport().consumeBuffer(numBytes);
} else {
this.getTransport().readAll(skippedBytes, 0, numBytes);
}
} else {
int remaining = numBytes;
while (remaining > 0) {
skipBytes(Math.min(remaining, MAX_SKIPPED_BYTES));
remaining -= MAX_SKIPPED_BYTES;
}
}
}
}
|
are
|
java
|
spring-projects__spring-security
|
config/src/main/java/org/springframework/security/config/annotation/rsocket/EnableRSocketSecurity.java
|
{
"start": 1002,
"end": 1367
}
|
class ____ have Spring Security
* {@link RSocketSecurity} support added.
*
* @author Rob Winch
* @since 5.2
* @see RSocketSecurity
*/
@Documented
@Target(ElementType.TYPE)
@Retention(RetentionPolicy.RUNTIME)
@Import({ RSocketSecurityConfiguration.class, SecuritySocketAcceptorInterceptorConfiguration.class,
ReactiveObservationImportSelector.class })
public @
|
to
|
java
|
mybatis__mybatis-3
|
src/main/java/org/apache/ibatis/annotations/Lang.java
|
{
"start": 1063,
"end": 1355
}
|
interface ____ {
* @Lang(MyXMLLanguageDriver.class)
* @Select("SELECT id, name FROM users WHERE id = #{id}")
* User selectById(int id);
* }
* </pre>
*
* @author Clinton Begin
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @
|
UserMapper
|
java
|
junit-team__junit5
|
jupiter-tests/src/test/java/org/junit/jupiter/engine/extension/TestWatcherTests.java
|
{
"start": 11654,
"end": 11889
}
|
class ____ extends AbstractDisabledMethodsTestCase {
@RegisterExtension
TestWatcher watcher = new TrackingTestWatcher();
}
@SuppressWarnings("JUnitMalformedDeclaration")
static
|
TestInstancePerMethodInstanceLevelTestWatcherTestCase
|
java
|
resilience4j__resilience4j
|
resilience4j-spring6/src/test/java/io/github/resilience4j/spring6/bulkhead/configure/ThreadPoolBulkHeadInitializationInAspectTest.java
|
{
"start": 1462,
"end": 3567
}
|
class ____ {
@Bean
public ThreadPoolBulkheadRegistry threadPoolBulkheadRegistry() {
ThreadPoolBulkheadConfig backendBulkHeadConfig = ThreadPoolBulkheadConfig.custom()
.coreThreadPoolSize(1)
.maxThreadPoolSize(1)
.build();
return ThreadPoolBulkheadRegistry.custom()
.withThreadPoolBulkheadConfig(ThreadPoolBulkheadConfig.ofDefaults())
.addThreadPoolBulkheadConfig(BACKEND, backendBulkHeadConfig)
.build();
}
}
@Autowired
BulkheadDummyService testDummyService;
@Autowired
@Qualifier("threadPoolBulkheadRegistry")
ThreadPoolBulkheadRegistry registry;
@Before
public void setUp() {
// ensure no bulkheads are initialized
assertThat(registry.getAllBulkheads()).isEmpty();
}
@After
public void tearDown() {
registry.getAllBulkheads().stream().map(ThreadPoolBulkhead::getName).forEach(registry::remove);
}
@Test
public void testSpelWithoutMappingConfigurationInAspect() throws Exception {
assertThat(testDummyService.spelSyncThreadPoolNoCfg("foo").toCompletableFuture().get(5, TimeUnit.SECONDS)).isEqualTo("foo");
assertThat(registry.getAllBulkheads()).hasSize(1).first()
.matches(bulkhead -> bulkhead.getName().equals("foo"))
.matches(bulkhead -> bulkhead.getBulkheadConfig() == registry.getDefaultConfig());
}
@Test
public void testSpelWithMappingConfigurationInAspect() throws Exception {
// The bulkhead is configured to allow 0 concurrent calls, so the call should be rejected
assertThat(testDummyService.spelSyncThreadPoolWithCfg("foo").toCompletableFuture().get(5, TimeUnit.SECONDS)).isEqualTo("foo");
assertThat(registry.getAllBulkheads()).hasSize(1).first()
.matches(bulkhead -> bulkhead.getName().equals("foo"))
.matches(bulkhead -> bulkhead.getBulkheadConfig() == registry.getConfiguration(BACKEND).orElse(null));
}
}
|
TestConfig
|
java
|
quarkusio__quarkus
|
extensions/resteasy-classic/resteasy/runtime/src/main/java/io/quarkus/resteasy/runtime/standalone/VertxHttpRequest.java
|
{
"start": 15752,
"end": 16125
}
|
class ____ implements Consumer<Throwable> {
@Override
public void accept(Throwable t) {
try {
requestContext.terminate();
} finally {
VertxHttpAsyncResponse.this.vertxFlush();
}
}
}
}
}
}
|
FlushTask
|
java
|
google__gson
|
gson/src/test/java/com/google/gson/functional/JsonAdapterAnnotationOnFieldsTest.java
|
{
"start": 23979,
"end": 24100
}
|
class ____ {
@JsonAdapter(Serializer.class)
List<Integer> f = Collections.emptyList();
static
|
WithJsonSerializer
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java
|
{
"start": 54381,
"end": 58126
}
|
class ____ implements HttpChannel, HttpServerChannel {
private boolean open = true;
private ActionListener<Void> closeListener;
private InetSocketAddress localAddress;
private final BlockingDeque<HttpResponse> responses = new LinkedBlockingDeque<>();
private CountDownLatch notifySendResponse = null;
private CountDownLatch blockSendResponse = null;
public CountDownLatch notifyInSendResponse() {
synchronized (this) {
assert notifySendResponse == null : "already notifying";
notifySendResponse = new CountDownLatch(1);
return notifySendResponse;
}
}
public synchronized void blockSendResponse() {
synchronized (this) {
assert blockSendResponse == null : "blockSendResponse already set";
blockSendResponse = new CountDownLatch(1);
}
}
public synchronized void allowSendResponse() {
synchronized (this) {
assert blockSendResponse != null : "blockSendResponse null, no need to allow";
blockSendResponse.countDown();
}
}
public boolean noResponses() {
return responses.peek() == null;
}
public HttpResponse getResponse() {
try {
return responses.takeFirst();
} catch (InterruptedException e) {
fail("interrupted");
}
// unreachable
return null;
}
@Override
public void sendResponse(HttpResponse response, ActionListener<Void> listener) {
CountDownLatch notify;
CountDownLatch blockSend;
synchronized (this) {
notify = notifySendResponse;
blockSend = blockSendResponse;
}
if (notify != null) {
notify.countDown();
synchronized (this) {
notifySendResponse = null;
}
}
if (blockSend != null) {
try {
blockSend.await();
synchronized (this) {
blockSendResponse = null;
}
} catch (InterruptedException e) {
fail("interrupted");
}
}
responses.add(response);
listener.onResponse(null);
}
public void setLocalAddress(InetSocketAddress localAddress) {
this.localAddress = localAddress;
}
@Override
public InetSocketAddress getLocalAddress() {
return localAddress;
}
@Override
public InetSocketAddress getRemoteAddress() {
return null;
}
@Override
public void close() {
synchronized (this) {
if (open == false) {
throw new IllegalStateException("channel already closed!");
}
open = false;
}
if (closeListener != null) {
closeListener.onResponse(null);
}
}
@Override
public boolean isOpen() {
return open;
}
@Override
public void addCloseListener(ActionListener<Void> listener) {
if (open == false) {
listener.onResponse(null);
} else {
if (closeListener != null) {
throw new IllegalStateException("close listener already set");
}
closeListener = listener;
}
}
}
private static
|
TestHttpChannel
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/index/codec/bloomfilter/ES85BloomFilterRWPostingsFormat.java
|
{
"start": 2406,
"end": 7669
}
|
class ____ extends FieldsConsumer {
private final SegmentWriteState state;
private final IndexOutput indexOut;
private final List<BloomFilter> bloomFilters = new ArrayList<>();
private final List<FieldsGroup> fieldsGroups = new ArrayList<>();
private final List<Closeable> toCloses = new ArrayList<>();
private boolean closed;
FieldsWriter(SegmentWriteState state) throws IOException {
this.state = state;
boolean success = false;
try {
indexOut = state.directory.createOutput(indexFile(state.segmentInfo, state.segmentSuffix), state.context);
toCloses.add(indexOut);
CodecUtil.writeIndexHeader(indexOut, BLOOM_CODEC_NAME, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
success = true;
} finally {
if (success == false) {
IOUtils.closeWhileHandlingException(toCloses);
}
}
}
@Override
public void write(Fields fields, NormsProducer norms) throws IOException {
writePostings(fields, norms);
writeBloomFilters(fields);
}
private void writePostings(Fields fields, NormsProducer norms) throws IOException {
final Map<PostingsFormat, FieldsGroup> currentGroups = new HashMap<>();
for (String field : fields) {
final PostingsFormat postingsFormat = postingsFormats.apply(field);
if (postingsFormat == null) {
throw new IllegalStateException("PostingsFormat for field [" + field + "] wasn't specified");
}
FieldsGroup group = currentGroups.get(postingsFormat);
if (group == null) {
group = new FieldsGroup(postingsFormat, Integer.toString(fieldsGroups.size()), new ArrayList<>());
currentGroups.put(postingsFormat, group);
fieldsGroups.add(group);
}
group.fields().add(field);
}
for (FieldsGroup group : currentGroups.values()) {
final FieldsConsumer writer = group.postingsFormat().fieldsConsumer(new SegmentWriteState(state, group.suffix()));
toCloses.add(writer);
final Fields maskedFields = new FilterLeafReader.FilterFields(fields) {
@Override
public Iterator<String> iterator() {
return group.fields().iterator();
}
};
writer.write(maskedFields, norms);
}
}
private void writeBloomFilters(Fields fields) throws IOException {
for (String field : fields) {
final Terms terms = fields.terms(field);
if (terms == null) {
continue;
}
final int bloomFilterSize = bloomFilterSize(state.segmentInfo.maxDoc());
final int numBytes = numBytesForBloomFilter(bloomFilterSize);
try (ByteArray buffer = bigArrays.newByteArray(numBytes)) {
final TermsEnum termsEnum = terms.iterator();
while (true) {
final BytesRef term = termsEnum.next();
if (term == null) {
break;
}
final int hash = hashTerm(term) % bloomFilterSize;
final int pos = hash >> 3;
final int mask = 1 << (hash & 0x7);
final byte val = (byte) (buffer.get(pos) | mask);
buffer.set(pos, val);
}
bloomFilters.add(new BloomFilter(field, indexOut.getFilePointer(), bloomFilterSize));
final BytesReference bytes = BytesReference.fromByteArray(buffer, numBytes);
bytes.writeTo(new IndexOutputOutputStream(indexOut));
}
}
}
@Override
public void close() throws IOException {
if (closed) {
return;
}
closed = true;
try {
CodecUtil.writeFooter(indexOut);
} finally {
IOUtils.close(toCloses);
}
try (IndexOutput metaOut = state.directory.createOutput(metaFile(state.segmentInfo, state.segmentSuffix), state.context)) {
CodecUtil.writeIndexHeader(metaOut, BLOOM_CODEC_NAME, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
// write postings formats
metaOut.writeVInt(fieldsGroups.size());
for (FieldsGroup group : fieldsGroups) {
group.writeTo(metaOut, state.fieldInfos);
}
// Write bloom filters
metaOut.writeVInt(bloomFilters.size());
for (BloomFilter bloomFilter : bloomFilters) {
bloomFilter.writeTo(metaOut, state.fieldInfos);
}
CodecUtil.writeFooter(metaOut);
}
}
}
}
|
FieldsWriter
|
java
|
resilience4j__resilience4j
|
resilience4j-ratelimiter/src/main/java/io/github/resilience4j/ratelimiter/internal/AtomicRateLimiter.java
|
{
"start": 17537,
"end": 18729
}
|
class ____ implements Metrics {
private AtomicRateLimiterMetrics() {
}
/**
* {@inheritDoc}
*/
@Override
public int getNumberOfWaitingThreads() {
return waitingThreads.get();
}
/**
* {@inheritDoc}
*/
@Override
public int getAvailablePermissions() {
State currentState = state.get();
State estimatedState = calculateNextState(1, -1, currentState);
return estimatedState.activePermissions;
}
/**
* @return estimated time duration in nanos to wait for the next permission
*/
public long getNanosToWait() {
State currentState = state.get();
State estimatedState = calculateNextState(1, -1, currentState);
return estimatedState.nanosToWait;
}
/**
* @return estimated current cycle
*/
public long getCycle() {
State currentState = state.get();
State estimatedState = calculateNextState(1, -1, currentState);
return estimatedState.activeCycle;
}
}
}
|
AtomicRateLimiterMetrics
|
java
|
alibaba__nacos
|
auth/src/main/java/com/alibaba/nacos/auth/parser/http/AiHttpResourceParser.java
|
{
"start": 1069,
"end": 3134
}
|
class ____ extends AbstractHttpResourceParser {
public static final String MCP_PATH = "/ai/mcp";
public static final String A2A_PATH = "/ai/a2a";
private static final String AGENT_CARD_PARAM = "agentCard";
@Override
protected String getNamespaceId(HttpServletRequest request) {
String namespaceId = request.getParameter(Constants.NAMESPACE_ID);
if (StringUtils.isBlank(namespaceId)) {
namespaceId = AiConstants.Mcp.MCP_DEFAULT_NAMESPACE;
}
return namespaceId;
}
@Override
protected String getGroup(HttpServletRequest request) {
return Constants.DEFAULT_GROUP;
}
@Override
protected String getResourceName(HttpServletRequest request) {
String url = request.getRequestURI();
if (url.contains(MCP_PATH)) {
return getMcpName(request);
} else if (url.contains(A2A_PATH)) {
return getAgentName(request);
}
return StringUtils.EMPTY;
}
private String getMcpName(HttpServletRequest request) {
String mcpName = request.getParameter("mcpName");
return StringUtils.isBlank(mcpName) ? StringUtils.EMPTY : mcpName;
}
private String getAgentName(HttpServletRequest request) {
String agentName = request.getParameter("agentName");
if (request.getParameterMap().containsKey(AGENT_CARD_PARAM)) {
agentName = deserializeAndGetAgentName(request.getParameter(AGENT_CARD_PARAM));
}
return StringUtils.isBlank(agentName) ? StringUtils.EMPTY : agentName;
}
private String deserializeAndGetAgentName(String agentCardJson) {
try {
AgentCard agentCard = JacksonUtils.toObj(agentCardJson, AgentCard.class);
return agentCard.getName();
} catch (Exception ignored) {
return StringUtils.EMPTY;
}
}
@Override
protected Properties getProperties(HttpServletRequest request) {
return new Properties();
}
}
|
AiHttpResourceParser
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/runc/RuncContainerExecutorConfig.java
|
{
"start": 21451,
"end": 22620
}
|
class ____ {
final private String type;
final private String path;
final private long major;
final private long minor;
final private int fileMode;
final private int uid;
final private int gid;
public String getType() {
return type;
}
public String getPath() {
return path;
}
public long getMajor() {
return major;
}
public long getMinor() {
return minor;
}
public int getFileMode() {
return fileMode;
}
public int getUid() {
return uid;
}
public int getGid() {
return gid;
}
public Device(String type, String path, long major, long minor,
int fileMode, int uid, int gid) {
this.type = type;
this.path = path;
this.major = major;
this.minor = minor;
this.fileMode = fileMode;
this.uid = uid;
this.gid = gid;
}
public Device() {
this(null, null, 0, 0, 0, 0, 0);
}
}
/**
* This
|
Device
|
java
|
spring-projects__spring-security
|
config/src/test/java/org/springframework/security/config/annotation/web/configurers/oauth2/client/OidcLogoutConfigurerTests.java
|
{
"start": 22352,
"end": 27054
}
|
class ____ {
private static final RSAKey key = key();
private static final JWKSource<SecurityContext> jwks = jwks(key);
private static RSAKey key() {
try {
KeyPair pair = KeyPairGenerator.getInstance("RSA").generateKeyPair();
return new RSAKey.Builder((RSAPublicKey) pair.getPublic()).privateKey(pair.getPrivate()).build();
}
catch (Exception ex) {
throw new RuntimeException(ex);
}
}
private static JWKSource<SecurityContext> jwks(RSAKey key) {
try {
return new ImmutableJWKSet<>(new JWKSet(key));
}
catch (Exception ex) {
throw new RuntimeException(ex);
}
}
private final String username = "user";
private final JwtEncoder encoder = new NimbusJwtEncoder(jwks);
private String nonce;
@Autowired
ClientRegistration registration;
@Autowired(required = false)
MockWebServer web;
@Bean
@Order(0)
SecurityFilterChain authorizationServer(HttpSecurity http, ClientRegistration registration) throws Exception {
// @formatter:off
http
.securityMatcher("/jwks", "/login/oauth/authorize", "/nonce", "/token", "/token/logout", "/user")
.authorizeHttpRequests((authorize) -> authorize
.requestMatchers("/jwks").permitAll()
.anyRequest().authenticated()
)
.httpBasic(Customizer.withDefaults())
.oauth2ResourceServer((oauth2) -> oauth2
.jwt((jwt) -> jwt.jwkSetUri(registration.getProviderDetails().getJwkSetUri()))
);
// @formatter:off
return http.build();
}
@Bean
UserDetailsService users(ClientRegistration registration) {
return new InMemoryUserDetailsManager(User.withUsername(registration.getClientId())
.password("{noop}" + registration.getClientSecret()).authorities("APP").build());
}
@GetMapping("/login/oauth/authorize")
String nonce(@RequestParam("nonce") String nonce, @RequestParam("state") String state) {
this.nonce = nonce;
return state;
}
@PostMapping("/token")
Map<String, Object> accessToken(HttpServletRequest request) {
HttpSession session = request.getSession();
JwtEncoderParameters parameters = JwtEncoderParameters
.from(JwtClaimsSet.builder().id("id").subject(this.username)
.issuer(getIssuerUri()).issuedAt(Instant.now())
.expiresAt(Instant.now().plusSeconds(86400)).claim("scope", "openid").build());
String token = this.encoder.encode(parameters).getTokenValue();
return new OIDCTokens(idToken(session.getId()), new BearerAccessToken(token, 86400, new Scope("openid")), null)
.toJSONObject();
}
String idToken(String sessionId) {
OidcIdToken token = TestOidcIdTokens.idToken().issuer(getIssuerUri())
.subject(this.username).expiresAt(Instant.now().plusSeconds(86400))
.audience(List.of(this.registration.getClientId())).nonce(this.nonce)
.claim(LogoutTokenClaimNames.SID, sessionId).build();
JwtEncoderParameters parameters = JwtEncoderParameters
.from(JwtClaimsSet.builder().claims((claims) -> claims.putAll(token.getClaims())).build());
return this.encoder.encode(parameters).getTokenValue();
}
private String getIssuerUri() {
if (this.web == null) {
return TestClientRegistrations.clientRegistration().build().getProviderDetails().getIssuerUri();
}
return this.web.url("/").toString();
}
@GetMapping("/user")
Map<String, Object> userinfo() {
return Map.of("sub", this.username, "id", this.username);
}
@GetMapping("/jwks")
String jwks() {
return new JWKSet(key).toString();
}
@GetMapping("/token/logout")
String logoutToken(@AuthenticationPrincipal OidcUser user) {
OidcLogoutToken token = TestOidcLogoutTokens.withUser(user)
.audience(List.of(this.registration.getClientId())).build();
JwsHeader header = JwsHeader.with(SignatureAlgorithm.RS256).type("logout+jwt").build();
JwtClaimsSet claims = JwtClaimsSet.builder().claims((c) -> c.putAll(token.getClaims())).build();
JwtEncoderParameters parameters = JwtEncoderParameters.from(header, claims);
return this.encoder.encode(parameters).getTokenValue();
}
@GetMapping("/token/logout/all")
String logoutTokenAll(@AuthenticationPrincipal OidcUser user) {
OidcLogoutToken token = TestOidcLogoutTokens.withUser(user)
.audience(List.of(this.registration.getClientId()))
.claims((claims) -> claims.remove(LogoutTokenClaimNames.SID)).build();
JwsHeader header = JwsHeader.with(SignatureAlgorithm.RS256).type("JWT").build();
JwtClaimsSet claims = JwtClaimsSet.builder().claims((c) -> c.putAll(token.getClaims())).build();
JwtEncoderParameters parameters = JwtEncoderParameters.from(header, claims);
return this.encoder.encode(parameters).getTokenValue();
}
}
@Configuration
static
|
OidcProviderConfig
|
java
|
apache__dubbo
|
dubbo-common/src/main/java/org/apache/dubbo/common/utils/AnnotationUtils.java
|
{
"start": 2268,
"end": 2380
}
|
class ____
*
* @param annotatedElement the annotated element
* @param annotationClassName the
|
name
|
java
|
micronaut-projects__micronaut-core
|
http-client-core/src/main/java/io/micronaut/http/client/ReactiveClientResultTransformer.java
|
{
"start": 931,
"end": 1325
}
|
interface ____ {
/**
* Transform the publisher result.
*
* @param publisherResult The publisher result that is an object that conforms to
* {@link io.micronaut.core.async.publisher.Publishers#isConvertibleToPublisher(Class)}
* @return The transformed result
*/
Object transform(Object publisherResult);
}
|
ReactiveClientResultTransformer
|
java
|
apache__avro
|
lang/java/tools/src/main/java/org/apache/avro/tool/TrevniCreateRandomTool.java
|
{
"start": 1188,
"end": 2102
}
|
class ____ implements Tool {
@Override
public String getName() {
return "trevni_random";
}
@Override
public String getShortDescription() {
return "Create a Trevni file filled with random instances of a schema.";
}
@Override
public int run(InputStream stdin, PrintStream out, PrintStream err, List<String> args) throws Exception {
if (args.size() != 3) {
err.println("Usage: schemaFile count outputFile");
return 1;
}
File schemaFile = new File(args.get(0));
int count = Integer.parseInt(args.get(1));
File outputFile = new File(args.get(2));
Schema schema = new Schema.Parser().parse(schemaFile);
AvroColumnWriter<Object> writer = new AvroColumnWriter<>(schema, new ColumnFileMetaData());
for (Object datum : new RandomData(schema, count))
writer.write(datum);
writer.writeTo(outputFile);
return 0;
}
}
|
TrevniCreateRandomTool
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/clients/consumer/internals/StreamsRebalanceData.java
|
{
"start": 7105,
"end": 9663
}
|
class ____ {
private final Set<String> sourceTopics;
private final Set<String> repartitionSinkTopics;
private final Map<String, TopicInfo> stateChangelogTopics;
private final Map<String, TopicInfo> repartitionSourceTopics;
private final Collection<Set<String>> copartitionGroups;
public Subtopology(final Set<String> sourceTopics,
final Set<String> repartitionSinkTopics,
final Map<String, TopicInfo> repartitionSourceTopics,
final Map<String, TopicInfo> stateChangelogTopics,
final Collection<Set<String>> copartitionGroups
) {
this.sourceTopics = Set.copyOf(Objects.requireNonNull(sourceTopics, "Subtopology ID cannot be null"));
this.repartitionSinkTopics =
Set.copyOf(Objects.requireNonNull(repartitionSinkTopics, "Repartition sink topics cannot be null"));
this.repartitionSourceTopics =
Map.copyOf(Objects.requireNonNull(repartitionSourceTopics, "Repartition source topics cannot be null"));
this.stateChangelogTopics =
Map.copyOf(Objects.requireNonNull(stateChangelogTopics, "State changelog topics cannot be null"));
this.copartitionGroups =
Collections.unmodifiableCollection(Objects.requireNonNull(
copartitionGroups,
"Co-partition groups cannot be null"
)
);
}
public Set<String> sourceTopics() {
return sourceTopics;
}
public Set<String> repartitionSinkTopics() {
return repartitionSinkTopics;
}
public Map<String, TopicInfo> stateChangelogTopics() {
return stateChangelogTopics;
}
public Map<String, TopicInfo> repartitionSourceTopics() {
return repartitionSourceTopics;
}
public Collection<Set<String>> copartitionGroups() {
return copartitionGroups;
}
@Override
public String toString() {
return "Subtopology{" +
"sourceTopics=" + sourceTopics +
", repartitionSinkTopics=" + repartitionSinkTopics +
", stateChangelogTopics=" + stateChangelogTopics +
", repartitionSourceTopics=" + repartitionSourceTopics +
", copartitionGroups=" + copartitionGroups +
'}';
}
}
public static
|
Subtopology
|
java
|
mockito__mockito
|
mockito-core/src/test/java/org/mockito/internal/stubbing/defaultanswers/ReturnsSmartNullsTest.java
|
{
"start": 2141,
"end": 3915
}
|
interface ____ {
Foo get();
Foo withArgs(String oneArg, String otherArg);
}
@Test
public void should_return_an_object_that_fails_on_any_method_invocation_for_non_primitives()
throws Throwable {
Answer<Object> answer = new ReturnsSmartNulls();
Foo smartNull = (Foo) answer.answer(invocationOf(Foo.class, "get"));
try {
smartNull.get();
fail();
} catch (SmartNullPointerException expected) {
}
}
@Test
public void should_return_an_object_that_allows_object_methods() throws Throwable {
Answer<Object> answer = new ReturnsSmartNulls();
Foo smartNull = (Foo) answer.answer(invocationOf(Foo.class, "get"));
assertThat(smartNull.toString()).contains("SmartNull returned by").contains("foo.get()");
}
@Test
public void should_print_the_parameters_when_calling_a_method_with_args() throws Throwable {
Answer<Object> answer = new ReturnsSmartNulls();
Foo smartNull = (Foo) answer.answer(invocationOf(Foo.class, "withArgs", "oompa", "lumpa"));
assertThat(smartNull.toString())
.contains("foo.withArgs")
.contains("oompa")
.contains("lumpa");
}
@Test
public void should_print_the_parameters_on_SmartNullPointerException_message()
throws Throwable {
Answer<Object> answer = new ReturnsSmartNulls();
Foo smartNull = (Foo) answer.answer(invocationOf(Foo.class, "withArgs", "oompa", "lumpa"));
try {
smartNull.get();
fail();
} catch (SmartNullPointerException e) {
assertThat(e).hasMessageContaining("oompa").hasMessageContaining("lumpa");
}
}
|
Foo
|
java
|
micronaut-projects__micronaut-core
|
http/src/main/java/io/micronaut/http/ssl/PemParser.java
|
{
"start": 5956,
"end": 7445
}
|
class ____ implements Decoder {
@Override
public Collection<?> decode(byte[] der) throws GeneralSecurityException {
// we need to figure out which key algorithm is used, so we parse the DER a bit.
DerInput outer = new DerInput(der);
DerInput privateKeyInfo = outer.readSequence();
// Version
privateKeyInfo.expect(0x02);
privateKeyInfo.expect(0x01);
privateKeyInfo.expect(0x00);
DerInput privateKeyAlgorithm = privateKeyInfo.readSequence();
String algOid = privateKeyAlgorithm.readOid();
String alg = switch (algOid) {
case OID_RSA -> "RSA";
case OID_EC -> "EC";
case "1.3.101.112" -> "Ed25519";
case "1.3.101.113" -> "Ed448";
case "2.16.840.1.101.3.4.3.17", "2.16.840.1.101.3.4.3.18",
"2.16.840.1.101.3.4.3.19" -> "ML-DSA";
case "2.16.840.1.101.3.4.4.1", "2.16.840.1.101.3.4.4.2", "2.16.840.1.101.3.4.4.3" ->
"ML-KEM";
default ->
throw new IllegalArgumentException("Unrecognized PKCS#8 key algorithm " + algOid);
};
KeyFactory factory = provider == null ? KeyFactory.getInstance(alg) : KeyFactory.getInstance(alg, provider);
return List.of(factory.generatePrivate(new PKCS8EncodedKeySpec(der)));
}
}
private final
|
Pkcs8PrivateKey
|
java
|
micronaut-projects__micronaut-core
|
inject-java-test/src/test/groovy/io/micronaut/inject/visitor/beans/Message.java
|
{
"start": 51,
"end": 168
}
|
class ____ {
public Builder<?> getBuilder() {
return new Builder<>();
}
public static final
|
Message
|
java
|
quarkusio__quarkus
|
integration-tests/main/src/test/java/io/quarkus/it/main/QuarkusTestNestedTestCase.java
|
{
"start": 2614,
"end": 4463
}
|
class ____ {
String innerValue;
@BeforeEach
void beforeEach() {
COUNT_BEFORE_EACH.incrementAndGet();
innerValue = EXPECTED_INNER_VALUE;
}
@Test
@Order(1)
void testOne() {
assertEquals(1, COUNT_BEFORE_ALL.get(), "COUNT_BEFORE_ALL");
assertEquals(7, COUNT_BEFORE_EACH.get(), "COUNT_BEFORE_EACH");
assertEquals(2, COUNT_TEST.getAndIncrement(), "COUNT_TEST");
assertEquals(5, COUNT_AFTER_EACH.get(), "COUNT_AFTER_EACH");
assertEquals(0, COUNT_AFTER_ALL.get(), "COUNT_AFTER_ALL");
}
@Test
@Order(2)
void testTwo() {
assertEquals(1, COUNT_BEFORE_ALL.get(), "COUNT_BEFORE_ALL");
assertEquals(9, COUNT_BEFORE_EACH.get(), "COUNT_BEFORE_EACH");
assertEquals(3, COUNT_TEST.getAndIncrement(), "COUNT_TEST");
assertEquals(7, COUNT_AFTER_EACH.get(), "COUNT_AFTER_EACH");
assertEquals(0, COUNT_AFTER_ALL.get(), "COUNT_AFTER_ALL");
}
@Test
@Order(3)
void testOuterInstancesInBeforeEach() {
assertEquals(1, TestContextCheckerBeforeEachCallback.OUTER_INSTANCES.size());
}
@Test
@Order(4)
void testOuterInstancesInAfterEach() {
assertEquals(1, TestContextCheckerAfterEachCallback.OUTER_INSTANCES.size());
}
@Test
void testInnerAndOuterValues() {
assertEquals(EXPECTED_INNER_VALUE, innerValue);
assertEquals(EXPECTED_OUTER_VALUE, outerValue);
}
@AfterEach
void afterEach() {
COUNT_AFTER_EACH.incrementAndGet();
}
@Nested
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
@TestMethodOrder(OrderAnnotation.class)
|
FirstNested
|
java
|
apache__spark
|
examples/src/main/java/org/apache/spark/examples/mllib/JavaSimpleFPGrowth.java
|
{
"start": 1208,
"end": 2238
}
|
class ____ {
public static void main(String[] args) {
SparkConf conf = new SparkConf().setAppName("FP-growth Example");
JavaSparkContext sc = new JavaSparkContext(conf);
// $example on$
JavaRDD<String> data = sc.textFile("data/mllib/sample_fpgrowth.txt");
JavaRDD<List<String>> transactions = data.map(line -> Arrays.asList(line.split(" ")));
FPGrowth fpg = new FPGrowth()
.setMinSupport(0.2)
.setNumPartitions(10);
FPGrowthModel<String> model = fpg.run(transactions);
for (FPGrowth.FreqItemset<String> itemset: model.freqItemsets().toJavaRDD().collect()) {
System.out.println("[" + itemset.javaItems() + "], " + itemset.freq());
}
double minConfidence = 0.8;
for (AssociationRules.Rule<String> rule
: model.generateAssociationRules(minConfidence).toJavaRDD().collect()) {
System.out.println(
rule.javaAntecedent() + " => " + rule.javaConsequent() + ", " + rule.confidence());
}
// $example off$
sc.stop();
}
}
|
JavaSimpleFPGrowth
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestKeyPageBlobDirectories.java
|
{
"start": 1082,
"end": 6642
}
|
class ____ extends AbstractWasbTestBase{
@Override
protected AzureBlobStorageTestAccount createTestAccount() throws Exception {
return AzureBlobStorageTestAccount.create();
}
public void expectPageBlobKey(boolean expectedOutcome, AzureNativeFileSystemStore store, String path) {
assertEquals(expectedOutcome, store.isPageBlobKey(path),
"Unexpected result for isPageBlobKey(" + path + ")");
}
@Test
public void testKeySetWithoutAsterisk() throws Exception {
NativeAzureFileSystem azureFs = fs;
AzureNativeFileSystemStore store = azureFs.getStore();
Configuration conf = fs.getConf();
String dirList = "/service/WALs,/data/mypageblobfiles";
conf.set(AzureNativeFileSystemStore.KEY_PAGE_BLOB_DIRECTORIES, dirList);
URI uri = fs.getUri();
fs.initialize(uri, conf);
expectPageBlobKey(false, store, "/");
expectPageBlobKey(false, store, "service");
expectPageBlobKey(false, store, "service/dir/recovered.edits");
expectPageBlobKey(true, store, "service/WALs/recovered.edits");
expectPageBlobKey(false, store, "data/dir/recovered.txt");
expectPageBlobKey(true, store, "data/mypageblobfiles/recovered.txt");
}
@Test
public void testKeySetWithAsterisk() throws Exception {
NativeAzureFileSystem azureFs = fs;
AzureNativeFileSystemStore store = azureFs.getStore();
Configuration conf = fs.getConf();
String dirList = "/service/*/*/*/recovered.edits,/*/recovered.edits,/*/*/*/WALs, /*/*/oldWALs/*/*";
conf.set(AzureNativeFileSystemStore.KEY_PAGE_BLOB_DIRECTORIES, dirList);
URI uri = fs.getUri();
fs.initialize(uri, conf);
expectPageBlobKey(false, store, "/");
expectPageBlobKey(false, store, "service");
expectPageBlobKey(false, store, "service/dir/recovered.edits");
expectPageBlobKey(true, store, "service/dir1/dir2/dir3/recovered.edits");
expectPageBlobKey(false, store, "data/dir/recovered.edits");
expectPageBlobKey(true, store, "data/recovered.edits");
expectPageBlobKey(false, store, "dir1/dir2/WALs/data");
expectPageBlobKey(true, store, "dir1/dir2/dir3/WALs/data1");
expectPageBlobKey(true, store, "dir1/dir2/dir3/WALs/data2");
expectPageBlobKey(false, store, "dir1/oldWALs/data");
expectPageBlobKey(false, store, "dir1/dir2/oldWALs/data");
expectPageBlobKey(true, store, "dir1/dir2/oldWALs/dir3/dir4/data");
}
@Test
public void testKeySetUsingFullName() throws Exception {
NativeAzureFileSystem azureFs = fs;
AzureNativeFileSystemStore store = azureFs.getStore();
Configuration conf = fs.getConf();
String dirList = "/service/WALs,/data/mypageblobfiles,/*/*/WALs,/*/*/recover.edits";
conf.set(AzureNativeFileSystemStore.KEY_PAGE_BLOB_DIRECTORIES, dirList);
URI uri = fs.getUri();
fs.initialize(uri, conf);
final String defaultFS = FileSystem.getDefaultUri(conf).toString();
expectPageBlobKey(false, store, defaultFS + "service/recover.edits");
expectPageBlobKey(true, store, defaultFS + "service/WALs/recover.edits");
expectPageBlobKey(false, store, defaultFS + "data/mismatch/mypageblobfiles/data");
expectPageBlobKey(true, store, defaultFS + "data/mypageblobfiles/data");
expectPageBlobKey(false, store, defaultFS + "dir1/dir2/dir3/WALs/data");
expectPageBlobKey(true, store, defaultFS + "dir1/dir2/WALs/data");
expectPageBlobKey(false, store, defaultFS + "dir1/dir2/dir3/recover.edits");
expectPageBlobKey(true, store, defaultFS + "dir1/dir2/recover.edits");
}
@Test
public void testKeyContainsAsterisk() throws IOException {
NativeAzureFileSystem azureFs = fs;
AzureNativeFileSystemStore store = azureFs.getStore();
Configuration conf = fs.getConf();
// Test dir name which contains *
String dirList = "/service/*/*/*/d*ir,/*/fi**le.data,/*/*/*/WALs*, /*/*/oldWALs";
conf.set(AzureNativeFileSystemStore.KEY_PAGE_BLOB_DIRECTORIES, dirList);
URI uri = fs.getUri();
fs.initialize(uri, conf);
expectPageBlobKey(false, store, "/");
expectPageBlobKey(false, store, "service");
expectPageBlobKey(false, store, "service/d*ir/data");
expectPageBlobKey(true, store, "service/dir1/dir2/dir3/d*ir/data");
expectPageBlobKey(false, store, "dir/fi*le.data");
expectPageBlobKey(true, store, "dir/fi**le.data");
expectPageBlobKey(false, store, "dir1/dir2/WALs/data");
expectPageBlobKey(false, store, "dir1/dir2/dir3/WALs/data");
expectPageBlobKey(true, store, "dir1/dir2/dir3/WALs*/data1");
expectPageBlobKey(true, store, "dir1/dir2/dir3/WALs*/data2");
expectPageBlobKey(false, store, "dir1/oldWALs/data");
expectPageBlobKey(true, store, "dir1/dir2/oldWALs/data1");
expectPageBlobKey(true, store, "dir1/dir2/oldWALs/data2");
}
@Test
public void testKeyWithCommonPrefix() throws IOException {
NativeAzureFileSystem azureFs = fs;
AzureNativeFileSystemStore store = azureFs.getStore();
Configuration conf = fs.getConf();
// Test dir name which contains *
String dirList = "/service/WALs,/*/*/WALs";
conf.set(AzureNativeFileSystemStore.KEY_PAGE_BLOB_DIRECTORIES, dirList);
URI uri = fs.getUri();
fs.initialize(uri, conf);
expectPageBlobKey(false, store, "/");
expectPageBlobKey(false, store, "service");
expectPageBlobKey(false, store, "service/WALsssssss/dir");
expectPageBlobKey(true, store, "service/WALs/dir");
expectPageBlobKey(false, store, "service/dir/WALsss/data");
expectPageBlobKey(true, store, "service/dir/WALs/data");
}
}
|
TestKeyPageBlobDirectories
|
java
|
google__guava
|
android/guava/src/com/google/common/util/concurrent/ServiceManager.java
|
{
"start": 6787,
"end": 18980
}
|
class ____ {
/** Constructor for use by subclasses. */
public Listener() {}
/**
* Called when the service initially becomes healthy.
*
* <p>This will be called at most once after all the services have entered the {@linkplain
* State#RUNNING running} state. If any services fail during start up or {@linkplain
* State#FAILED fail}/{@linkplain State#TERMINATED terminate} before all other services have
* started {@linkplain State#RUNNING running} then this method will not be called.
*/
public void healthy() {}
/**
* Called when the all of the component services have reached a terminal state, either
* {@linkplain State#TERMINATED terminated} or {@linkplain State#FAILED failed}.
*/
public void stopped() {}
/**
* Called when a component service has {@linkplain State#FAILED failed}.
*
* @param service The service that failed.
*/
public void failure(Service service) {}
}
/**
* An encapsulation of all of the state that is accessed by the {@linkplain ServiceListener
* service listeners}. This is extracted into its own object so that {@link ServiceListener} could
* be made {@code static} and its instances can be safely constructed and added in the {@link
* ServiceManager} constructor without having to close over the partially constructed {@link
* ServiceManager} instance (i.e. avoid leaking a pointer to {@code this}).
*/
private final ServiceManagerState state;
private final ImmutableList<Service> services;
/**
* Constructs a new instance for managing the given services.
*
* @param services The services to manage
* @throws IllegalArgumentException if not all services are {@linkplain State#NEW new} or if there
* are any duplicate services.
*/
public ServiceManager(Iterable<? extends Service> services) {
ImmutableList<Service> copy = ImmutableList.copyOf(services);
if (copy.isEmpty()) {
// Having no services causes the manager to behave strangely. Notably, listeners are never
// fired. To avoid this we substitute a placeholder service.
logger
.get()
.log(
Level.WARNING,
"ServiceManager configured with no services. Is your application configured"
+ " properly?",
new EmptyServiceManagerWarning());
copy = ImmutableList.of(new NoOpService());
}
this.state = new ServiceManagerState(copy);
this.services = copy;
WeakReference<ServiceManagerState> stateReference = new WeakReference<>(state);
for (Service service : copy) {
service.addListener(new ServiceListener(service, stateReference), directExecutor());
// We check the state after adding the listener as a way to ensure that our listener was added
// to a NEW service.
checkArgument(service.state() == NEW, "Can only manage NEW services, %s", service);
}
// We have installed all of our listeners and after this point any state transition should be
// correct.
this.state.markReady();
}
/**
* Registers a {@link Listener} to be {@linkplain Executor#execute executed} on the given
* executor. The listener will not have previous state changes replayed, so it is suggested that
* listeners are added before any of the managed services are {@linkplain Service#startAsync
* started}.
*
* <p>{@code addListener} guarantees execution ordering across calls to a given listener but not
* across calls to multiple listeners. Specifically, a given listener will have its callbacks
* invoked in the same order as the underlying service enters those states. Additionally, at most
* one of the listener's callbacks will execute at once. However, multiple listeners' callbacks
* may execute concurrently, and listeners may execute in an order different from the one in which
* they were registered.
*
* <p>RuntimeExceptions thrown by a listener will be caught and logged. Any exception thrown
* during {@code Executor.execute} (e.g., a {@code RejectedExecutionException}) will be caught and
* logged.
*
* <p>When selecting an executor, note that {@code directExecutor} is dangerous in some cases. See
* the discussion in the {@link ListenableFuture#addListener ListenableFuture.addListener}
* documentation.
*
* @param listener the listener to run when the manager changes state
* @param executor the executor in which the listeners callback methods will be run.
*/
public void addListener(Listener listener, Executor executor) {
state.addListener(listener, executor);
}
/**
* Initiates service {@linkplain Service#startAsync startup} on all the services being managed. It
* is only valid to call this method if all of the services are {@linkplain State#NEW new}.
*
* @return this
* @throws IllegalStateException if any of the Services are not {@link State#NEW new} when the
* method is called.
*/
@CanIgnoreReturnValue
public ServiceManager startAsync() {
for (Service service : services) {
checkState(service.state() == NEW, "Not all services are NEW, cannot start %s", this);
}
for (Service service : services) {
try {
state.tryStartTiming(service);
service.startAsync();
} catch (IllegalStateException e) {
// This can happen if the service has already been started or stopped (e.g. by another
// service or listener). Our contract says it is safe to call this method if
// all services were NEW when it was called, and this has already been verified above, so we
// don't propagate the exception.
logger.get().log(Level.WARNING, "Unable to start Service " + service, e);
}
}
return this;
}
/**
* Waits for the {@link ServiceManager} to become {@linkplain #isHealthy() healthy}. The manager
* will become healthy after all the component services have reached the {@linkplain State#RUNNING
* running} state.
*
* @throws IllegalStateException if the service manager reaches a state from which it cannot
* become {@linkplain #isHealthy() healthy}.
*/
public void awaitHealthy() {
state.awaitHealthy();
}
/**
* Waits for the {@link ServiceManager} to become {@linkplain #isHealthy() healthy} for no more
* than the given time. The manager will become healthy after all the component services have
* reached the {@linkplain State#RUNNING running} state.
*
* @param timeout the maximum time to wait
* @throws TimeoutException if not all of the services have finished starting within the deadline
* @throws IllegalStateException if the service manager reaches a state from which it cannot
* become {@linkplain #isHealthy() healthy}.
* @since 33.4.0 (but since 28.0 in the JRE flavor)
*/
@IgnoreJRERequirement // Users will use this only if they're already using Duration.
public void awaitHealthy(Duration timeout) throws TimeoutException {
awaitHealthy(toNanosSaturated(timeout), TimeUnit.NANOSECONDS);
}
/**
* Waits for the {@link ServiceManager} to become {@linkplain #isHealthy() healthy} for no more
* than the given time. The manager will become healthy after all the component services have
* reached the {@linkplain State#RUNNING running} state.
*
* @param timeout the maximum time to wait
* @param unit the time unit of the timeout argument
* @throws TimeoutException if not all of the services have finished starting within the deadline
* @throws IllegalStateException if the service manager reaches a state from which it cannot
* become {@linkplain #isHealthy() healthy}.
*/
@SuppressWarnings("GoodTime") // should accept a java.time.Duration
public void awaitHealthy(long timeout, TimeUnit unit) throws TimeoutException {
state.awaitHealthy(timeout, unit);
}
/**
* Initiates service {@linkplain Service#stopAsync shutdown} if necessary on all the services
* being managed.
*
* @return this
*/
@CanIgnoreReturnValue
public ServiceManager stopAsync() {
for (Service service : services) {
service.stopAsync();
}
return this;
}
/**
* Waits for the all the services to reach a terminal state. After this method returns all
* services will either be {@linkplain Service.State#TERMINATED terminated} or {@linkplain
* Service.State#FAILED failed}.
*/
public void awaitStopped() {
state.awaitStopped();
}
/**
* Waits for the all the services to reach a terminal state for no more than the given time. After
* this method returns all services will either be {@linkplain Service.State#TERMINATED
* terminated} or {@linkplain Service.State#FAILED failed}.
*
* @param timeout the maximum time to wait
* @throws TimeoutException if not all of the services have stopped within the deadline
* @since 33.4.0 (but since 28.0 in the JRE flavor)
*/
@IgnoreJRERequirement // Users will use this only if they're already using Duration.
public void awaitStopped(Duration timeout) throws TimeoutException {
awaitStopped(toNanosSaturated(timeout), TimeUnit.NANOSECONDS);
}
/**
* Waits for the all the services to reach a terminal state for no more than the given time. After
* this method returns all services will either be {@linkplain Service.State#TERMINATED
* terminated} or {@linkplain Service.State#FAILED failed}.
*
* @param timeout the maximum time to wait
* @param unit the time unit of the timeout argument
* @throws TimeoutException if not all of the services have stopped within the deadline
*/
@SuppressWarnings("GoodTime") // should accept a java.time.Duration
public void awaitStopped(long timeout, TimeUnit unit) throws TimeoutException {
state.awaitStopped(timeout, unit);
}
/**
* Returns true if all services are currently in the {@linkplain State#RUNNING running} state.
*
* <p>Users who want more detailed information should use the {@link #servicesByState} method to
* get detailed information about which services are not running.
*/
public boolean isHealthy() {
for (Service service : services) {
if (!service.isRunning()) {
return false;
}
}
return true;
}
/**
* Provides a snapshot of the current state of all the services under management.
*
* <p>N.B. This snapshot is guaranteed to be consistent, i.e. the set of states returned will
* correspond to a point in time view of the services.
*
* @since 29.0 (present with return type {@code ImmutableMultimap} since 14.0)
*/
@Override
public ImmutableSetMultimap<State, Service> servicesByState() {
return state.servicesByState();
}
/**
* Returns the service load times. This value will only return startup times for services that
* have finished starting.
*
* @return Map of services and their corresponding startup time in millis, the map entries will be
* ordered by startup time.
*/
public ImmutableMap<Service, Long> startupTimes() {
return state.startupTimes();
}
/**
* Returns the service load times. This value will only return startup times for services that
* have finished starting.
*
* @return Map of services and their corresponding startup time, the map entries will be ordered
* by startup time.
* @since 33.4.0 (but since 31.0 in the JRE flavor)
*/
@J2ObjCIncompatible
// If users use this when they shouldn't, we hope that NewApi will catch subsequent Duration calls
@IgnoreJRERequirement
public ImmutableMap<Service, Duration> startupDurations() {
return ImmutableMap.copyOf(
Maps.<Service, Long, Duration>transformValues(startupTimes(), Duration::ofMillis));
}
@Override
public String toString() {
return MoreObjects.toStringHelper(ServiceManager.class)
.add("services", Collections2.filter(services, not(instanceOf(NoOpService.class))))
.toString();
}
/**
* An encapsulation of all the mutable state of the {@link ServiceManager} that needs to be
* accessed by instances of {@link ServiceListener}.
*/
private static final
|
Listener
|
java
|
elastic__elasticsearch
|
x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/expression/function/scalar/math/ToNumberFunctionPipeTests.java
|
{
"start": 1029,
"end": 5292
}
|
class ____ extends AbstractNodeTestCase<ToNumberFunctionPipe, Pipe> {
@Override
protected ToNumberFunctionPipe randomInstance() {
return randomToNumberFunctionPipe();
}
private Expression randomToNumberFunctionExpression() {
return randomToNumberFunctionPipe().expression();
}
public static ToNumberFunctionPipe randomToNumberFunctionPipe() {
return (ToNumberFunctionPipe) (new ToNumber(
randomSource(),
randomStringLiteral(),
randomFrom(true, false) ? randomIntLiteral() : null
).makePipe());
}
@Override
public void testTransform() {
// test transforming only the properties (source, expression),
// skipping the children (string and base) which are tested separately
ToNumberFunctionPipe b1 = randomInstance();
Expression newExpression = randomValueOtherThan(b1.expression(), () -> randomToNumberFunctionExpression());
ToNumberFunctionPipe newB = new ToNumberFunctionPipe(b1.source(), newExpression, b1.value(), b1.base());
assertEquals(newB, b1.transformPropertiesOnly(Expression.class, v -> Objects.equals(v, b1.expression()) ? newExpression : v));
ToNumberFunctionPipe b2 = randomInstance();
Source newLoc = randomValueOtherThan(b2.source(), () -> randomSource());
newB = new ToNumberFunctionPipe(newLoc, b2.expression(), b2.value(), b2.base());
assertEquals(newB, b2.transformPropertiesOnly(Source.class, v -> Objects.equals(v, b2.source()) ? newLoc : v));
}
@Override
public void testReplaceChildren() {
ToNumberFunctionPipe b = randomInstance();
Pipe newValue = randomValueOtherThan(b.value(), () -> pipe(randomStringLiteral()));
Pipe newBase = b.base() == null ? null : randomValueOtherThan(b.base(), () -> pipe(randomIntLiteral()));
ToNumberFunctionPipe newB = new ToNumberFunctionPipe(b.source(), b.expression(), b.value(), b.base());
ToNumberFunctionPipe transformed = newB.replaceChildren(newValue, b.base());
assertEquals(transformed.value(), newValue);
assertEquals(transformed.source(), b.source());
assertEquals(transformed.expression(), b.expression());
assertEquals(transformed.base(), b.base());
transformed = newB.replaceChildren(b.value(), newBase);
assertEquals(transformed.value(), b.value());
assertEquals(transformed.source(), b.source());
assertEquals(transformed.expression(), b.expression());
assertEquals(transformed.base(), newBase);
transformed = newB.replaceChildren(newValue, newBase);
assertEquals(transformed.value(), newValue);
assertEquals(transformed.source(), b.source());
assertEquals(transformed.expression(), b.expression());
assertEquals(transformed.base(), newBase);
}
@Override
protected ToNumberFunctionPipe mutate(ToNumberFunctionPipe instance) {
List<Function<ToNumberFunctionPipe, ToNumberFunctionPipe>> randoms = new ArrayList<>();
randoms.add(
f -> new ToNumberFunctionPipe(
f.source(),
f.expression(),
pipe(((Expression) randomValueOtherThan(f.value(), () -> randomStringLiteral()))),
f.base()
)
);
randoms.add(
f -> new ToNumberFunctionPipe(
f.source(),
f.expression(),
f.value(),
f.base() == null ? null : randomValueOtherThan(f.base(), () -> pipe(randomIntLiteral()))
)
);
randoms.add(
f -> new ToNumberFunctionPipe(
f.source(),
f.expression(),
pipe(((Expression) randomValueOtherThan(f.value(), () -> randomStringLiteral()))),
f.base() == null ? null : randomValueOtherThan(f.base(), () -> pipe(randomIntLiteral()))
)
);
return randomFrom(randoms).apply(instance);
}
@Override
protected ToNumberFunctionPipe copy(ToNumberFunctionPipe instance) {
return new ToNumberFunctionPipe(instance.source(), instance.expression(), instance.value(), instance.base());
}
}
|
ToNumberFunctionPipeTests
|
java
|
dropwizard__dropwizard
|
dropwizard-testing/src/test/java/io/dropwizard/testing/DropwizardTestSupportTest.java
|
{
"start": 6918,
"end": 7555
}
|
class ____<T extends Configuration> extends DropwizardTestSupport.ServiceListener<T> {
volatile boolean executedOnRun = false;
volatile boolean executedOnStop = false;
@Override
public void onRun(T configuration, Environment environment, DropwizardTestSupport<T> rule) throws Exception {
super.onRun(configuration, environment, rule);
executedOnRun = true;
}
@Override
public void onStop(DropwizardTestSupport<T> rule) throws Exception {
super.onStop(rule);
executedOnStop = true;
}
}
public static
|
TestServiceListener
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/ReturnAtTheEndOfVoidFunctionTest.java
|
{
"start": 1913,
"end": 2324
}
|
class ____ {
public int stuff() {
int x = 5;
return x;
}
}
""")
.expectUnchanged()
.doTest();
}
@Test
public void returnAtDifferentPositionIsNotDeleted() {
helper
.addInputLines(
"Builder.java",
"""
package com.google.gporeba;
public final
|
Builder
|
java
|
micronaut-projects__micronaut-core
|
core-processor/src/main/java/io/micronaut/inject/beans/visitor/IntrospectedToBeanPropertiesTransformer.java
|
{
"start": 1376,
"end": 3707
}
|
class ____ implements TypedAnnotationTransformer<Introspected> {
@Override
public List<AnnotationValue<?>> transform(AnnotationValue<Introspected> annotation, VisitorContext visitorContext) {
// Mapping bellow should only set the members when the value is different then the default one
Introspected.AccessKind[] accessKinds = annotation.enumValues(BeanProperties.MEMBER_ACCESS_KIND, Introspected.AccessKind.class);
AnnotationValueBuilder<Annotation> beanPropertiesBuilder = AnnotationValue.builder(BeanProperties.class.getName(), RetentionPolicy.CLASS);
if (accessKinds.length != 0 && !Arrays.equals(accessKinds, Introspected.DEFAULT_ACCESS_KIND)) {
beanPropertiesBuilder = beanPropertiesBuilder.member(BeanProperties.MEMBER_ACCESS_KIND, Arrays.stream(accessKinds).map(Enum::name).toArray(String[]::new));
}
Introspected.Visibility[] visibilities = annotation.enumValues(BeanProperties.MEMBER_VISIBILITY, Introspected.Visibility.class);
if (visibilities.length != 0 && !Arrays.equals(visibilities, Introspected.DEFAULT_VISIBILITY)) {
beanPropertiesBuilder = beanPropertiesBuilder.member(BeanProperties.MEMBER_VISIBILITY, Arrays.stream(visibilities).map(Enum::name).toArray(String[]::new));
}
String[] includes = annotation.stringValues(BeanProperties.MEMBER_INCLUDES);
if (includes.length > 0) {
beanPropertiesBuilder = beanPropertiesBuilder.member(BeanProperties.MEMBER_INCLUDES, includes);
}
String[] excludes = annotation.stringValues(BeanProperties.MEMBER_EXCLUDES);
if (excludes.length > 0) {
beanPropertiesBuilder = beanPropertiesBuilder.member(BeanProperties.MEMBER_EXCLUDES, excludes);
}
String[] excludedAnnotations = annotation.stringValues(BeanProperties.MEMBER_EXCLUDED_ANNOTATIONS);
if (excludedAnnotations.length > 0) {
beanPropertiesBuilder = beanPropertiesBuilder.member(BeanProperties.MEMBER_EXCLUDED_ANNOTATIONS, excludedAnnotations);
}
return List.of(
annotation.mutate().stereotype(beanPropertiesBuilder.build()).build()
);
}
@Override
public Class<Introspected> annotationType() {
return Introspected.class;
}
}
|
IntrospectedToBeanPropertiesTransformer
|
java
|
apache__camel
|
components/camel-salesforce/camel-salesforce-component/src/main/java/org/apache/camel/component/salesforce/internal/processor/CompositeApiProcessor.java
|
{
"start": 2115,
"end": 2211
}
|
class ____ extends AbstractSalesforceProcessor {
@FunctionalInterface
|
CompositeApiProcessor
|
java
|
playframework__playframework
|
persistence/play-java-jdbc/src/main/java/play/db/HikariCPComponents.java
|
{
"start": 312,
"end": 548
}
|
interface ____ extends ConnectionPoolComponents {
Environment environment();
default ConnectionPool connectionPool() {
return new DefaultConnectionPool(new HikariCPConnectionPool(environment().asScala()));
}
}
|
HikariCPComponents
|
java
|
apache__flink
|
flink-table/flink-sql-jdbc-driver/src/test/java/org/apache/flink/table/jdbc/FlinkConnectionTest.java
|
{
"start": 1297,
"end": 4371
}
|
class ____ extends FlinkJdbcDriverTestBase {
@Test
public void testCatalogSchema() throws Exception {
try (FlinkConnection connection = new FlinkConnection(getDriverUri())) {
assertEquals("default_catalog", connection.getCatalog());
assertEquals("default_database", connection.getSchema());
assertThrowsExactly(
SQLException.class,
() -> connection.setCatalog("invalid_catalog"),
"Set catalog[invalid_catalog] fail");
assertThrowsExactly(
SQLException.class,
() -> connection.setSchema("invalid_database"),
"Set schema[invalid_database] fail");
assertEquals("default_catalog", connection.getCatalog());
assertEquals("default_database", connection.getSchema());
// Create new catalog and database
Executor executor = connection.getExecutor();
StatementResult result =
executor.executeStatement(
"CREATE CATALOG test_catalog WITH ('type'='generic_in_memory');");
assertTrue(result.hasNext());
assertEquals("OK", result.next().getString(0).toString());
connection.setCatalog("test_catalog");
result = executor.executeStatement("CREATE DATABASE test_database;");
assertTrue(result.hasNext());
assertEquals("OK", result.next().getString(0).toString());
connection.setSchema("test_database");
assertEquals("test_catalog", connection.getCatalog());
assertEquals("test_database", connection.getSchema());
}
}
@Test
public void testClientInfo() throws Exception {
Properties properties = new Properties();
properties.setProperty("key3", "val3");
DriverUri driverUri = getDriverUri("jdbc:flink://%s:%s?key1=val1&key2=val2", properties);
try (FlinkConnection connection = new FlinkConnection(driverUri)) {
assertEquals("val1", connection.getClientInfo("key1"));
assertEquals("val2", connection.getClientInfo("key2"));
assertEquals("val3", connection.getClientInfo("key3"));
connection.setClientInfo("key1", "val11");
Properties resetProp = new Properties();
resetProp.setProperty("key2", "val22");
resetProp.setProperty("key3", "val33");
resetProp.setProperty("key4", "val44");
connection.setClientInfo(resetProp);
assertEquals("val11", connection.getClientInfo("key1"));
assertEquals("val44", connection.getClientInfo("key4"));
Properties clientInfo = connection.getClientInfo();
assertEquals("val11", clientInfo.getProperty("key1"));
assertEquals("val22", clientInfo.getProperty("key2"));
assertEquals("val33", clientInfo.getProperty("key3"));
assertEquals("val44", clientInfo.getProperty("key4"));
}
}
}
|
FlinkConnectionTest
|
java
|
spring-projects__spring-boot
|
module/spring-boot-devtools/src/main/java/org/springframework/boot/devtools/filewatch/DirectorySnapshot.java
|
{
"start": 1184,
"end": 5102
}
|
class ____ {
private static final Set<String> DOTS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList(".", "..")));
private final File directory;
private final Date time;
private final Set<FileSnapshot> files;
/**
* Create a new {@link DirectorySnapshot} for the given directory.
* @param directory the source directory
*/
DirectorySnapshot(File directory) {
Assert.notNull(directory, "'directory' must not be null");
Assert.isTrue(!directory.isFile(), () -> "'directory' [%s] must not be a file".formatted(directory));
this.directory = directory;
this.time = new Date();
Set<FileSnapshot> files = new LinkedHashSet<>();
collectFiles(directory, files);
this.files = Collections.unmodifiableSet(files);
}
private void collectFiles(File source, Set<FileSnapshot> result) {
File[] children = source.listFiles();
if (children != null) {
for (File child : children) {
if (child.isDirectory() && !DOTS.contains(child.getName())) {
collectFiles(child, result);
}
else if (child.isFile()) {
result.add(new FileSnapshot(child));
}
}
}
}
ChangedFiles getChangedFiles(DirectorySnapshot snapshot, @Nullable FileFilter triggerFilter) {
Assert.notNull(snapshot, "'snapshot' must not be null");
File directory = this.directory;
Assert.isTrue(snapshot.directory.equals(directory),
() -> "'snapshot' source directory must be '" + directory + "'");
Set<ChangedFile> changes = new LinkedHashSet<>();
Map<File, FileSnapshot> previousFiles = getFilesMap();
for (FileSnapshot currentFile : snapshot.files) {
if (acceptChangedFile(triggerFilter, currentFile)) {
FileSnapshot previousFile = previousFiles.remove(currentFile.getFile());
if (previousFile == null) {
changes.add(new ChangedFile(directory, currentFile.getFile(), Type.ADD));
}
else if (!previousFile.equals(currentFile)) {
changes.add(new ChangedFile(directory, currentFile.getFile(), Type.MODIFY));
}
}
}
for (FileSnapshot previousFile : previousFiles.values()) {
if (acceptChangedFile(triggerFilter, previousFile)) {
changes.add(new ChangedFile(directory, previousFile.getFile(), Type.DELETE));
}
}
return new ChangedFiles(directory, changes);
}
private boolean acceptChangedFile(@Nullable FileFilter triggerFilter, FileSnapshot file) {
return (triggerFilter == null || !triggerFilter.accept(file.getFile()));
}
private Map<File, FileSnapshot> getFilesMap() {
Map<File, FileSnapshot> files = new LinkedHashMap<>();
for (FileSnapshot file : this.files) {
files.put(file.getFile(), file);
}
return files;
}
@Override
public boolean equals(@Nullable Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (obj instanceof DirectorySnapshot other) {
return equals(other, null);
}
return super.equals(obj);
}
boolean equals(DirectorySnapshot other, @Nullable FileFilter filter) {
if (this.directory.equals(other.directory)) {
Set<FileSnapshot> ourFiles = filter(this.files, filter);
Set<FileSnapshot> otherFiles = filter(other.files, filter);
return ourFiles.equals(otherFiles);
}
return false;
}
private Set<FileSnapshot> filter(Set<FileSnapshot> source, @Nullable FileFilter filter) {
if (filter == null) {
return source;
}
Set<FileSnapshot> filtered = new LinkedHashSet<>();
for (FileSnapshot file : source) {
if (filter.accept(file.getFile())) {
filtered.add(file);
}
}
return filtered;
}
@Override
public int hashCode() {
int hashCode = this.directory.hashCode();
hashCode = 31 * hashCode + this.files.hashCode();
return hashCode;
}
/**
* Return the source directory of this snapshot.
* @return the source directory
*/
File getDirectory() {
return this.directory;
}
@Override
public String toString() {
return this.directory + " snapshot at " + this.time;
}
}
|
DirectorySnapshot
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/refaster/TemplatingTest.java
|
{
"start": 8178,
"end": 9051
}
|
class ____ {",
" public char example(String str) {",
" return str.charAt(Refaster.anyOf(1, 3, 5));",
" }",
"}");
assertThat(UTemplater.createTemplate(context, getMethodDeclaration("example")))
.isEqualTo(
ExpressionTemplate.create(
ImmutableMap.of("str", UClassType.create("java.lang.String")),
UMethodInvocation.create(
UMemberSelect.create(
UFreeIdent.create("str"),
"charAt",
UMethodType.create(UPrimitiveType.CHAR, UPrimitiveType.INT)),
UAnyOf.create(ULiteral.intLit(1), ULiteral.intLit(3), ULiteral.intLit(5))),
UPrimitiveType.CHAR));
}
@Test
public void unary() {
compile(
"import java.util.Arrays;",
"
|
InstanceMethodExample
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/common/telemetry/internals/SinglePointMetric.java
|
{
"start": 1247,
"end": 1434
}
|
class ____ a telemetry metric that does not yet contain resource tags.
* These additional resource tags will be added before emitting metrics by the telemetry reporter.
*/
public
|
represents
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/legacy/MoreStuff.java
|
{
"start": 200,
"end": 1494
}
|
class ____ implements Serializable {
private String stringId;
private int intId;
private Collection stuffs;
private String name;
public boolean equals(Object other) {
return ( (MoreStuff) other ).getIntId()==intId && ( (MoreStuff) other ).getStringId().equals(stringId);
}
public int hashCode() {
return stringId.hashCode();
}
/**
* Returns the stuffs.
* @return Collection
*/
public Collection getStuffs() {
return stuffs;
}
/**
* Sets the stuffs.
* @param stuffs The stuffs to set
*/
public void setStuffs(Collection stuffs) {
this.stuffs = stuffs;
}
/**
* Returns the name.
* @return String
*/
public String getName() {
return name;
}
/**
* Sets the name.
* @param name The name to set
*/
public void setName(String name) {
this.name = name;
}
/**
* Returns the intId.
* @return int
*/
public int getIntId() {
return intId;
}
/**
* Returns the stringId.
* @return String
*/
public String getStringId() {
return stringId;
}
/**
* Sets the intId.
* @param intId The intId to set
*/
public void setIntId(int intId) {
this.intId = intId;
}
/**
* Sets the stringId.
* @param stringId The stringId to set
*/
public void setStringId(String stringId) {
this.stringId = stringId;
}
}
|
MoreStuff
|
java
|
micronaut-projects__micronaut-core
|
http-server/src/main/java/io/micronaut/http/server/util/ProxyHeaderParser.java
|
{
"start": 1125,
"end": 5994
}
|
class ____ {
private static final String FOR = "for";
private static final String BY = "by";
private static final String HOST = "host";
private static final String PROTO = "proto";
private static final String PARAM_DELIMITER = ";";
private static final String ELEMENT_DELIMITER = ",";
private static final String PAIR_DELIMITER = "=";
private static final String X_FORWARDED_PROTO = "X-Forwarded-Proto";
private static final String X_FORWARDED_HOST = "X-Forwarded-Host";
private static final String X_FORWARDED_FOR = "X-Forwarded-For";
private static final String X_FORWARDED_PORT = "X-Forwarded-Port";
private List<String> forwardedFor = new ArrayList<>();
private String forwardedBy = null;
private String forwardedHost = null;
private String forwardedProto = null;
private Integer forwardedPort = null;
/**
* @param request The request
*/
public ProxyHeaderParser(HttpRequest request) {
HttpHeaders headers = request.getHeaders();
if (headers.contains(HttpHeaders.FORWARDED)) {
headers.getAll(HttpHeaders.FORWARDED)
.stream()
.flatMap(header -> {
if (header.contains(ELEMENT_DELIMITER)) {
return Arrays.stream(header.split(ELEMENT_DELIMITER));
} else {
return Stream.of(header);
}
}).forEach(forwarded -> {
String[] params = forwarded.split(PARAM_DELIMITER);
for (String param: params) {
String[] parts = param.split(PAIR_DELIMITER);
if (parts.length == 2) {
String key = parts[0].trim();
String value = trimQuotes(parts[1].trim());
if (key.equalsIgnoreCase(FOR)) {
forwardedFor.add(value);
} else if (key.equalsIgnoreCase(BY) && forwardedBy == null) {
forwardedBy = value;
} else if (key.equalsIgnoreCase(PROTO) && forwardedProto == null) {
forwardedProto = value;
} else if (key.equalsIgnoreCase(HOST) && forwardedHost == null) {
if (value.contains(":")) {
String[] host = value.split(":");
forwardedHost = host[0];
forwardedPort = Integer.valueOf(host[1]);
} else {
forwardedHost = value;
}
}
}
}
});
} else {
forwardedProto = StringUtils.trimToNull(headers.get(X_FORWARDED_PROTO));
forwardedHost = headers.get(X_FORWARDED_HOST);
try {
if (forwardedHost != null && forwardedHost.contains(":")) {
String[] parts = forwardedHost.split(":");
forwardedHost = parts[0];
forwardedPort = Integer.valueOf(parts[1]);
} else {
String portHeader = headers.get(X_FORWARDED_PORT);
if (portHeader != null) {
forwardedPort = Integer.valueOf(portHeader);
}
}
} catch (NumberFormatException ignored) { }
String forwardedForHeader = headers.get(X_FORWARDED_FOR);
if (forwardedForHeader != null) {
forwardedFor = Arrays.stream(forwardedForHeader.split(ELEMENT_DELIMITER))
.map(String::trim)
.collect(Collectors.toList());
}
}
}
/**
* @return The client addresses
*/
@NonNull
public List<String> getFor() {
return forwardedFor;
}
/**
* @return The proxy
*/
public String getBy() {
return forwardedBy;
}
/**
* @return The host
*/
public String getHost() {
return forwardedHost;
}
/**
* @return The scheme or protocol
*/
public String getScheme() {
return forwardedProto;
}
/**
* @return The port
*/
public Integer getPort() {
return forwardedPort;
}
private String trimQuotes(String value) {
if (value != null && value.startsWith("\"")) {
return value.substring(1, value.length() - 1);
} else {
return value;
}
}
}
|
ProxyHeaderParser
|
java
|
quarkusio__quarkus
|
test-framework/common/src/main/java/io/quarkus/test/common/TestInstantiator.java
|
{
"start": 1399,
"end": 2234
}
|
class ____ {
public static Object instantiate(Class<?> clazz) {
CDI<Object> cdi = CDI.current();
Instance<?> instance = cdi.select(clazz);
if (instance.isResolvable()) {
return instance.get();
}
if (clazz.getTypeParameters().length > 0) {
// fallback for generic test classes, whose set of bean types
// does not contain a `Class` but a `ParameterizedType` instead
for (Instance.Handle<Object> handle : cdi.select(Object.class).handles()) {
if (clazz.equals(handle.getBean().getBeanClass())) {
return handle.get();
}
}
}
throw new IllegalStateException("No bean: " + clazz);
}
}
}
|
Delegate
|
java
|
apache__maven
|
impl/maven-logging/src/test/java/org/apache/maven/slf4j/MavenBaseLoggerTimestampTest.java
|
{
"start": 1229,
"end": 5310
}
|
class ____ {
private ByteArrayOutputStream logOutput;
private PrintStream originalErr;
private static final String LOGGER_NAME = "test.logger";
private MavenBaseLogger logger;
@BeforeEach
void setUp() {
// Reset configuration before each test
System.clearProperty(Constants.MAVEN_LOGGER_SHOW_DATE_TIME);
System.clearProperty(Constants.MAVEN_LOGGER_DATE_TIME_FORMAT);
// Reset static initialization flag
MavenBaseLogger.initialized = false;
// Capture System.err
logOutput = new ByteArrayOutputStream();
originalErr = System.err;
System.setErr(new PrintStream(logOutput));
}
@AfterEach
void tearDown() {
System.setErr(originalErr);
System.clearProperty(Constants.MAVEN_LOGGER_SHOW_DATE_TIME);
System.clearProperty(Constants.MAVEN_LOGGER_DATE_TIME_FORMAT);
MavenBaseLogger.initialized = false;
}
@Test
void whenShowDateTimeIsFalseShouldNotIncludeTimestamp() {
// Given
System.setProperty(Constants.MAVEN_LOGGER_SHOW_DATE_TIME, "false");
initializeLogger();
// When
logger.info("Test message");
String output = getLastLine(logOutput.toString());
// Then
assertTrue(
output.matches("^\\[main\\] INFO test.logger - Test message$"),
"Should not include timestamp but was: " + output);
}
@Test
void whenShowDateTimeIsTrueWithoutFormatShouldShowElapsedTime() { // Changed test name and expectation
// Given
System.setProperty(Constants.MAVEN_LOGGER_SHOW_DATE_TIME, "true");
initializeLogger();
// When
logger.info("Test message");
String output = getLastLine(logOutput.toString());
// Then
assertTrue(
output.matches("^\\d+ \\[main\\] INFO test.logger - Test message$"),
"Should show elapsed time when no format specified but was: " + output);
}
@ParameterizedTest
@ValueSource(strings = {"yyyy-MM-dd HH:mm:ss", "dd/MM/yyyy HH:mm:ss.SSS", "HH:mm:ss"})
void whenCustomDateFormatShouldFormatCorrectly(String dateFormat) {
// Given
System.setProperty(Constants.MAVEN_LOGGER_SHOW_DATE_TIME, "true");
System.setProperty(Constants.MAVEN_LOGGER_DATE_TIME_FORMAT, dateFormat);
initializeLogger();
// When
logger.info("Test message");
String output = getLastLine(logOutput.toString());
// Then
String patternStr = dateFormat
.replace("yyyy", "\\d{4}")
.replace("MM", "\\d{2}")
.replace("dd", "\\d{2}")
.replace("HH", "\\d{2}")
.replace("mm", "\\d{2}")
.replace("ss", "\\d{2}")
.replace("SSS", "\\d{3}")
.replace("/", "\\/")
.replace(".", "\\.");
assertTrue(
output.matches("^" + patternStr + " \\[main\\] INFO test.logger - Test message$"),
"Should match custom date format but was: " + output);
}
@Test
void whenInvalidDateFormatShouldUseElapsedMillis() {
// Given
System.setProperty(Constants.MAVEN_LOGGER_SHOW_DATE_TIME, "true");
System.setProperty(Constants.MAVEN_LOGGER_DATE_TIME_FORMAT, "invalid-format");
initializeLogger();
// When
logger.info("Test message");
String output = getLastLine(logOutput.toString());
// Then
assertTrue(
output.matches("^\\d+ \\[main\\] INFO test.logger - Test message$"),
"Should show elapsed milliseconds when format is invalid but was: " + output);
}
private void initializeLogger() {
MavenBaseLogger.CONFIG_PARAMS.init();
logger = new MavenBaseLogger(LOGGER_NAME);
logOutput.reset();
}
private String getLastLine(String output) {
String[] lines = output.split("\\R");
return lines[lines.length - 1].trim();
}
}
|
MavenBaseLoggerTimestampTest
|
java
|
quarkusio__quarkus
|
core/deployment/src/main/java/io/quarkus/deployment/execannotations/ExecutionModelAnnotationsProcessor.java
|
{
"start": 894,
"end": 5514
}
|
class ____ {
private static final Logger log = Logger.getLogger(ExecutionModelAnnotationsProcessor.class);
private static final DotName BLOCKING = DotName.createSimple(Blocking.class);
private static final DotName NON_BLOCKING = DotName.createSimple(NonBlocking.class);
private static final DotName RUN_ON_VIRTUAL_THREAD = DotName.createSimple(RunOnVirtualThread.class);
@BuildStep
@Produce(GeneratedClassBuildItem.class) // only to make sure this build step is executed
void check(ExecutionModelAnnotationsConfig config, CombinedIndexBuildItem index,
List<ExecutionModelAnnotationsAllowedBuildItem> predicates) {
if (config.detectionMode() == ExecutionModelAnnotationsConfig.Mode.DISABLED) {
return;
}
StringBuilder message = new StringBuilder("\n");
doCheck(message, index.getIndex(), predicates, BLOCKING);
doCheck(message, index.getIndex(), predicates, NON_BLOCKING);
doCheck(message, index.getIndex(), predicates, RUN_ON_VIRTUAL_THREAD);
if (message.length() > 1) {
message.append("The @Blocking, @NonBlocking and @RunOnVirtualThread annotations may only be used "
+ "on \"entrypoint\" methods (methods invoked by various frameworks in Quarkus)\n");
message.append("Using the @Blocking, @NonBlocking and @RunOnVirtualThread annotations on methods "
+ "that can only be invoked by application code is invalid");
if (config.detectionMode() == ExecutionModelAnnotationsConfig.Mode.WARN) {
log.warn(message);
} else {
throw new IllegalStateException(message.toString());
}
}
}
private void doCheck(StringBuilder message, IndexView index,
List<ExecutionModelAnnotationsAllowedBuildItem> predicates, DotName annotationName) {
List<String> badMethods = new ArrayList<>();
for (AnnotationInstance annotation : index.getAnnotations(annotationName)) {
// these annotations may be put on classes too, but we'll ignore that for now
if (annotation.target() != null && annotation.target().kind() == AnnotationTarget.Kind.METHOD) {
MethodInfo method = annotation.target().asMethod();
boolean allowed = false;
for (ExecutionModelAnnotationsAllowedBuildItem predicate : predicates) {
if (predicate.matches(method)) {
allowed = true;
break;
}
}
if (!allowed) {
badMethods.add(methodToString(method));
}
}
}
if (!badMethods.isEmpty()) {
message.append("Wrong usage(s) of @").append(annotationName.withoutPackagePrefix()).append(" found:\n");
for (String method : badMethods) {
message.append("\t- ").append(method).append("\n");
}
}
}
/**
* @deprecated this method will be removed in Quarkus 3.24, which gives extensions 2 releases
* to start producing {@code JsonRPCProvidersBuildItem} always, not just in dev mode
*/
@Deprecated(since = "3.22", forRemoval = true)
@BuildStep
ExecutionModelAnnotationsAllowedBuildItem devuiJsonRpcServices() {
return new ExecutionModelAnnotationsAllowedBuildItem(new Predicate<MethodInfo>() {
@Override
public boolean test(MethodInfo method) {
// gross hack to allow methods declared in Dev UI JSON RPC service classes,
// as the proper way (consuming `JsonRPCProvidersBuildItem`) only works in dev mode
String clazz = method.declaringClass().name().toString().toLowerCase(Locale.ROOT);
return clazz.startsWith("io.quarkus.")
|| clazz.startsWith("io.quarkiverse.")
|| clazz.endsWith("jsonrpcservice");
}
});
}
@SuppressForbidden(reason = "Using Type.toString() to build an informative message")
private String methodToString(MethodInfo method) {
StringBuilder result = new StringBuilder();
result.append(method.declaringClass().name()).append('.').append(method.name());
StringJoiner joiner = new StringJoiner(", ", "(", ")");
for (Type parameter : method.parameterTypes()) {
joiner.add(parameter.toString());
}
result.append(joiner);
return result.toString();
}
}
|
ExecutionModelAnnotationsProcessor
|
java
|
spring-projects__spring-boot
|
build-plugin/spring-boot-maven-plugin/src/test/java/org/springframework/boot/maven/CommandLineBuilderTests.java
|
{
"start": 1468,
"end": 5318
}
|
class ____ {
public static final String CLASS_NAME = ClassWithMainMethod.class.getName();
@Test
@SuppressWarnings("NullAway") // Maven can't deal with @Nullable arrays / varargs
void buildWithNullJvmArgumentsIsIgnored() {
assertThat(CommandLineBuilder.forMainClass(CLASS_NAME).withJvmArguments((String[]) null).build())
.containsExactly(CLASS_NAME);
}
@Test
void buildWithNullIntermediateJvmArgumentIsIgnored() {
assertThat(CommandLineBuilder.forMainClass(CLASS_NAME)
.withJvmArguments("-verbose:class", null, "-verbose:gc")
.build()).containsExactly("-verbose:class", "-verbose:gc", CLASS_NAME);
}
@Test
void buildWithJvmArgument() {
assertThat(CommandLineBuilder.forMainClass(CLASS_NAME).withJvmArguments("-verbose:class").build())
.containsExactly("-verbose:class", CLASS_NAME);
}
@Test
void buildWithNullSystemPropertyIsIgnored() {
assertThat(CommandLineBuilder.forMainClass(CLASS_NAME).withSystemProperties(null).build())
.containsExactly(CLASS_NAME);
}
@Test
void buildWithSystemProperty() {
assertThat(CommandLineBuilder.forMainClass(CLASS_NAME).withSystemProperties(Map.of("flag", "enabled")).build())
.containsExactly("-Dflag=enabled", CLASS_NAME);
}
@Test
@SuppressWarnings("NullAway") // Maven can't deal with @Nullable arrays / varargs
void buildWithNullArgumentsIsIgnored() {
assertThat(CommandLineBuilder.forMainClass(CLASS_NAME).withArguments((String[]) null).build())
.containsExactly(CLASS_NAME);
}
@Test
void buildWithNullIntermediateArgumentIsIgnored() {
assertThat(CommandLineBuilder.forMainClass(CLASS_NAME).withArguments("--test", null, "--another").build())
.containsExactly(CLASS_NAME, "--test", "--another");
}
@Test
@DisabledOnOs(OS.WINDOWS)
void buildWithClassPath(@TempDir Path tempDir) throws Exception {
Path file = tempDir.resolve("test.jar");
Path file1 = tempDir.resolve("test1.jar");
assertThat(CommandLineBuilder.forMainClass(CLASS_NAME)
.withClasspath(file.toUri().toURL(), file1.toUri().toURL())
.build()).containsExactly("-cp", file + File.pathSeparator + file1, CLASS_NAME);
}
@Test
@EnabledOnOs(OS.WINDOWS)
void buildWithClassPathOnWindows(@TempDir Path tempDir) throws Exception {
Path file = tempDir.resolve("test.jar");
Path file1 = tempDir.resolve("test1.jar");
List<String> args = CommandLineBuilder.forMainClass(CLASS_NAME)
.withClasspath(file.toUri().toURL(), file1.toUri().toURL())
.build();
assertThat(args).hasSize(3);
assertThat(args.get(0)).isEqualTo("-cp");
assertThat(args.get(1)).startsWith("@");
assertThat(args.get(2)).isEqualTo(CLASS_NAME);
assertThat(Paths.get(args.get(1).substring(1)))
.hasContent("\"" + (file + File.pathSeparator + file1).replace("\\", "\\\\") + "\"");
}
@Test
void buildAndRunWithLongClassPath() throws IOException, InterruptedException {
StringBuilder classPath = new StringBuilder(ManagementFactory.getRuntimeMXBean().getClassPath());
// Simulates [CreateProcess error=206, The filename or extension is too long]
while (classPath.length() < 35000) {
classPath.append(File.pathSeparator).append(classPath);
}
URL[] urls = Arrays.stream(classPath.toString().split(File.pathSeparator)).map(this::toURL).toArray(URL[]::new);
List<String> command = CommandLineBuilder.forMainClass(ClassWithMainMethod.class.getName())
.withClasspath(urls)
.build();
ProcessBuilder pb = new JavaExecutable().processBuilder(command.toArray(new String[0]));
Process process = pb.start();
assertThat(process.waitFor()).isEqualTo(0);
try (InputStream inputStream = process.getInputStream()) {
assertThat(inputStream).hasContent("Hello World");
}
}
private URL toURL(String path) {
try {
return Paths.get(path).toUri().toURL();
}
catch (MalformedURLException ex) {
throw new RuntimeException(ex);
}
}
}
|
CommandLineBuilderTests
|
java
|
elastic__elasticsearch
|
qa/multi-cluster-search/src/test/java/org/elasticsearch/search/CCSDuelIT.java
|
{
"start": 6181,
"end": 6908
}
|
class ____ extends ESRestTestCase {
private static final String INDEX_NAME = "ccs_duel_index";
private static final String REMOTE_INDEX_NAME = "my_remote_cluster:" + INDEX_NAME;
private static final String[] TAGS = new String[] { "java", "xml", "sql", "html", "php", "ruby", "python", "perl" };
private static boolean init = false;
@Before
public void init() throws Exception {
super.initClient();
if (init == false) {
init = true;
String destinationCluster = System.getProperty("tests.rest.suite");
// we index docs with private randomness otherwise the two clusters end up with exactly the same documents
// given that this test
|
CCSDuelIT
|
java
|
apache__camel
|
core/camel-core-processor/src/main/java/org/apache/camel/processor/RollbackProcessor.java
|
{
"start": 1169,
"end": 3486
}
|
class ____ extends BaseProcessorSupport implements Traceable, IdAware, RouteIdAware {
private String id;
private String routeId;
private boolean markRollbackOnly;
private boolean markRollbackOnlyLast;
private String message;
public RollbackProcessor() {
}
public RollbackProcessor(String message) {
this.message = message;
}
@Override
public boolean process(Exchange exchange, AsyncCallback callback) {
if (isMarkRollbackOnlyLast()) {
// only mark the last route (current) as rollback
// this is needed when you have multiple transactions in play
exchange.setRollbackOnlyLast(true);
} else {
// default to mark the entire route as rollback
exchange.setRollbackOnly(true);
}
if (markRollbackOnly || markRollbackOnlyLast) {
// do not do anything more as we should only mark the rollback
callback.done(true);
return true;
}
// throw exception to rollback
if (message != null) {
exchange.setException(new RollbackExchangeException(message, exchange));
} else {
exchange.setException(new RollbackExchangeException(exchange));
}
callback.done(true);
return true;
}
@Override
public String toString() {
return id;
}
@Override
public String getTraceLabel() {
return "rollback";
}
@Override
public String getId() {
return id;
}
@Override
public void setId(String id) {
this.id = id;
}
@Override
public String getRouteId() {
return routeId;
}
@Override
public void setRouteId(String routeId) {
this.routeId = routeId;
}
public String getMessage() {
return message;
}
public boolean isMarkRollbackOnly() {
return markRollbackOnly;
}
public void setMarkRollbackOnly(boolean markRollbackOnly) {
this.markRollbackOnly = markRollbackOnly;
}
public boolean isMarkRollbackOnlyLast() {
return markRollbackOnlyLast;
}
public void setMarkRollbackOnlyLast(boolean markRollbackOnlyLast) {
this.markRollbackOnlyLast = markRollbackOnlyLast;
}
}
|
RollbackProcessor
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/component/file/FileConsumerFileFilterTest.java
|
{
"start": 1151,
"end": 2665
}
|
class ____ extends ContextTestSupport {
@Override
protected Registry createCamelRegistry() throws Exception {
Registry jndi = super.createCamelRegistry();
jndi.bind("myFilter", new MyFileFilter<>());
return jndi;
}
@Test
public void testFilterFiles() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(0);
template.sendBodyAndHeader(fileUri(), "This is a file to be filtered",
Exchange.FILE_NAME,
"skipme.txt");
mock.setResultWaitTime(100);
mock.assertIsSatisfied();
}
@Test
public void testFilterFilesWithARegularFile() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedBodiesReceived("Hello World");
template.sendBodyAndHeader(fileUri(), "This is a file to be filtered",
Exchange.FILE_NAME,
"skipme.txt");
template.sendBodyAndHeader(fileUri(), "Hello World", Exchange.FILE_NAME,
"hello.txt");
mock.assertIsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from(fileUri("?initialDelay=0&delay=10&filter=#myFilter"))
.convertBodyTo(String.class).to("mock:result");
}
};
}
// START SNIPPET: e1
public static
|
FileConsumerFileFilterTest
|
java
|
apache__logging-log4j2
|
log4j-api/src/main/java/org/apache/logging/log4j/util/LoaderUtil.java
|
{
"start": 9164,
"end": 9275
}
|
class ____ to load
* @return the loaded class
* @throws NoClassDefFoundError if the specified
|
name
|
java
|
apache__flink
|
flink-core-api/src/main/java/org/apache/flink/api/java/tuple/builder/Tuple7Builder.java
|
{
"start": 1560,
"end": 2063
}
|
class ____<T0, T1, T2, T3, T4, T5, T6> {
private List<Tuple7<T0, T1, T2, T3, T4, T5, T6>> tuples = new ArrayList<>();
public Tuple7Builder<T0, T1, T2, T3, T4, T5, T6> add(
T0 f0, T1 f1, T2 f2, T3 f3, T4 f4, T5 f5, T6 f6) {
tuples.add(new Tuple7<>(f0, f1, f2, f3, f4, f5, f6));
return this;
}
@SuppressWarnings("unchecked")
public Tuple7<T0, T1, T2, T3, T4, T5, T6>[] build() {
return tuples.toArray(new Tuple7[tuples.size()]);
}
}
|
Tuple7Builder
|
java
|
reactor__reactor-core
|
reactor-core/src/main/java/reactor/core/publisher/FluxFlatMap.java
|
{
"start": 1762,
"end": 3686
}
|
class ____<T, R> extends InternalFluxOperator<T, R> {
final Function<? super T, ? extends Publisher<? extends R>> mapper;
final boolean delayError;
final int maxConcurrency;
final Supplier<? extends Queue<R>> mainQueueSupplier;
final int prefetch;
final Supplier<? extends Queue<R>> innerQueueSupplier;
FluxFlatMap(Flux<? extends T> source,
Function<? super T, ? extends Publisher<? extends R>> mapper,
boolean delayError,
int maxConcurrency,
Supplier<? extends Queue<R>> mainQueueSupplier,
int prefetch,
Supplier<? extends Queue<R>> innerQueueSupplier) {
super(source);
if (prefetch <= 0) {
throw new IllegalArgumentException("prefetch > 0 required but it was " + prefetch);
}
if (maxConcurrency <= 0) {
throw new IllegalArgumentException("maxConcurrency > 0 required but it was " + maxConcurrency);
}
this.mapper = Objects.requireNonNull(mapper, "mapper");
this.delayError = delayError;
this.prefetch = prefetch;
this.maxConcurrency = maxConcurrency;
this.mainQueueSupplier =
Objects.requireNonNull(mainQueueSupplier, "mainQueueSupplier");
this.innerQueueSupplier =
Objects.requireNonNull(innerQueueSupplier, "innerQueueSupplier");
}
@Override
public int getPrefetch() {
return prefetch;
}
@Override
public @Nullable CoreSubscriber<? super T> subscribeOrReturn(CoreSubscriber<? super R> actual) {
if (trySubscribeScalarMap(source, actual, mapper, false, true)) {
return null;
}
return new FlatMapMain<>(actual,
mapper,
delayError,
maxConcurrency,
mainQueueSupplier,
prefetch, innerQueueSupplier);
}
/**
* Checks if the source is a Supplier and if the mapper's publisher output is also
* a supplier, thus avoiding subscribing to any of them.
*
* @param source the source publisher
* @param s the end consumer
* @param mapper the mapper function
* @param fuseableExpected if true, the parent
|
FluxFlatMap
|
java
|
apache__camel
|
components/camel-azure/camel-azure-files/src/main/java/org/apache/camel/component/file/azure/FilesURIStrings.java
|
{
"start": 1251,
"end": 2551
}
|
class ____ {
public static final char QUERY_SEPARATOR = '?';
private FilesURIStrings() {
}
/**
* Get the base uri part before the options as they can be non URI valid such as the expression using $ chars and
* the URI constructor will regard $ as an illegal character and we don't want to enforce end users to to escape the
* $ for the expression (file language)
*/
static URI getBaseURI(String uri) throws URISyntaxException {
String baseUri = StringHelper.before(uri, QUERY_SEPARATOR, uri);
return new URI(baseUri);
}
static String reconstructBase64EncodedValue(String value) {
// base64 allows + and =, URI encoded as %2B and %3D
// Camel URI configurers decode both + and %2B to a space
return value.replace(" ", "+");
}
/**
* Uses encoding style expected by the files service: it preserves time separator ':' and encodes base64 plus '+',
* slash '/' and padding '='.
*/
static String encodeTokenValue(String value) throws URISyntaxException {
return URISupport.createQueryString(Collections.singletonMap("x", value)).substring(2)
.replace("+", "%2B") // sig is base64
.replace("%3A", ":"); // se has time separator
}
}
|
FilesURIStrings
|
java
|
netty__netty
|
codec-memcache/src/main/java/io/netty/handler/codec/memcache/binary/BinaryMemcacheResponseStatus.java
|
{
"start": 847,
"end": 1505
}
|
class ____ {
private BinaryMemcacheResponseStatus() {
// disallow construction
}
public static final short SUCCESS = 0x00;
public static final short KEY_ENOENT = 0x01;
public static final short KEY_EEXISTS = 0x02;
public static final short E2BIG = 0x03;
public static final short EINVA = 0x04;
public static final short NOT_STORED = 0x05;
public static final short DELTA_BADVAL = 0x06;
public static final short AUTH_ERROR = 0x20;
public static final short AUTH_CONTINUE = 0x21;
public static final short UNKNOWN_COMMAND = 0x81;
public static final short ENOMEM = 0x82;
}
|
BinaryMemcacheResponseStatus
|
java
|
apache__flink
|
flink-metrics/flink-metrics-dropwizard/src/main/java/org/apache/flink/dropwizard/metrics/DropwizardHistogramStatistics.java
|
{
"start": 1066,
"end": 1153
}
|
class ____ a {@link Snapshot} instance and forwards the method calls accordingly.
*/
|
wraps
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/UnnecessaryQualifierTest.java
|
{
"start": 1318,
"end": 1695
}
|
interface ____ {}
""")
.addSourceLines(
"ProvidesSomething.java",
"""
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.annotation.Retention;
import javax.inject.Qualifier;
@Qualifier
@Retention(RUNTIME)
public @
|
Qual
|
java
|
apache__camel
|
core/camel-api/src/main/java/org/apache/camel/Experimental.java
|
{
"start": 1195,
"end": 1252
}
|
interface ____ {
int revision() default 1;
}
|
Experimental
|
java
|
google__guice
|
core/test/com/google/inject/BoundProviderTest.java
|
{
"start": 761,
"end": 2343
}
|
class ____ extends TestCase {
public void testFooProvider() throws CreationException {
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(Foo.class).toProvider(FooProvider.class);
}
});
Foo a = injector.getInstance(Foo.class);
Foo b = injector.getInstance(Foo.class);
assertEquals(0, a.i);
assertEquals(0, b.i);
assertNotNull(a.bar);
assertNotNull(b.bar);
assertNotSame(a.bar, b.bar);
}
public void testSingletonFooProvider() throws CreationException {
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(Foo.class).toProvider(SingletonFooProvider.class);
bind(Foo.class)
.annotatedWith(named("javax"))
.toProvider(JavaxInjectSingletonFooProvider.class);
}
});
Foo a = injector.getInstance(Foo.class);
Foo b = injector.getInstance(Foo.class);
assertEquals(0, a.i);
assertEquals(1, b.i);
assertNotNull(a.bar);
assertNotNull(b.bar);
assertSame(a.bar, b.bar);
var javaxKey = Key.get(Foo.class, named("javax"));
a = injector.getInstance(javaxKey);
b = injector.getInstance(javaxKey);
assertEquals(0, a.i);
assertEquals(1, b.i);
assertNotNull(a.bar);
assertNotNull(b.bar);
assertSame(a.bar, b.bar);
}
static
|
BoundProviderTest
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/MultipleOutputs.java
|
{
"start": 9253,
"end": 9367
}
|
class ____ a named output.
*
* @param conf job conf
* @param namedOutput named output
* @return
|
for
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/common/network/IfConfig.java
|
{
"start": 848,
"end": 926
}
|
class ____ log {@code ifconfig}-style output at DEBUG logging.
*/
public final
|
to
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/parser/deser/DefaultObjectDeserializerTest_collection.java
|
{
"start": 238,
"end": 1044
}
|
class ____ extends TestCase {
public void test_0() throws Exception {
String input = "[{}]";
List<HashMap> map = JSON.parseObject(input,
new TypeReference<List<HashMap>>() {
}.getType());
Assert.assertEquals(HashMap.class, map.get(0).getClass());
}
public void test_1() throws Exception {
String input = "{}";
BO<HashMap> map = JSON.parseObject(input,
new TypeReference<BO<HashMap>>() {
}.getType());
}
public void test_2() throws Exception {
Exception error = null;
try {
String input = "{'map':{}}";
MyMap<String, HashMap> map = JSON.parseObject(input,
new TypeReference<MyMap<String, HashMap>>() {
}.getType());
} catch (Exception ex) {
error = ex;
}
Assert.assertNotNull(error);
}
public static
|
DefaultObjectDeserializerTest_collection
|
java
|
alibaba__nacos
|
core/src/main/java/com/alibaba/nacos/core/utils/GlobalExecutor.java
|
{
"start": 1207,
"end": 4057
}
|
class ____ {
private static final ScheduledExecutorService COMMON_EXECUTOR = ExecutorFactory.Managed
.newScheduledExecutorService(ClassUtils.getCanonicalName(GlobalExecutor.class), 4,
new NameThreadFactory("com.alibaba.nacos.core.common"));
private static final ScheduledExecutorService DISTRO_EXECUTOR = ExecutorFactory.Managed
.newScheduledExecutorService(ClassUtils.getCanonicalName(GlobalExecutor.class),
EnvUtil.getAvailableProcessors(2), new NameThreadFactory("com.alibaba.nacos.core.protocal.distro"));
public static final ThreadPoolExecutor sdkRpcExecutor = new ThreadPoolExecutor(
EnvUtil.getAvailableProcessors(RemoteUtils.getRemoteExecutorTimesOfProcessors()),
EnvUtil.getAvailableProcessors(RemoteUtils.getRemoteExecutorTimesOfProcessors()), 60L, TimeUnit.SECONDS,
new LinkedBlockingQueue<>(RemoteUtils.getRemoteExecutorQueueSize()),
new ThreadFactoryBuilder().daemon(true).nameFormat("nacos-grpc-executor-%d").build());
public static final ThreadPoolExecutor clusterRpcExecutor = new ThreadPoolExecutor(
EnvUtil.getAvailableProcessors(RemoteUtils.getRemoteExecutorTimesOfProcessors()),
EnvUtil.getAvailableProcessors(RemoteUtils.getRemoteExecutorTimesOfProcessors()), 60L, TimeUnit.SECONDS,
new LinkedBlockingQueue<>(RemoteUtils.getRemoteExecutorQueueSize()),
new ThreadFactoryBuilder().daemon(true).nameFormat("nacos-cluster-grpc-executor-%d").build());
public static void runWithoutThread(Runnable runnable) {
runnable.run();
}
public static void executeByCommon(Runnable runnable) {
if (COMMON_EXECUTOR.isShutdown()) {
return;
}
COMMON_EXECUTOR.execute(runnable);
}
public static void scheduleByCommon(Runnable runnable, long delayMs) {
if (COMMON_EXECUTOR.isShutdown()) {
return;
}
COMMON_EXECUTOR.schedule(runnable, delayMs, TimeUnit.MILLISECONDS);
}
public static void scheduleWithFixDelayByCommon(Runnable runnable, long delayMs) {
if (COMMON_EXECUTOR.isShutdown()) {
return;
}
COMMON_EXECUTOR.scheduleWithFixedDelay(runnable, delayMs, delayMs, TimeUnit.MILLISECONDS);
}
public static void submitLoadDataTask(Runnable runnable) {
DISTRO_EXECUTOR.submit(runnable);
}
public static void submitLoadDataTask(Runnable runnable, long delay) {
DISTRO_EXECUTOR.schedule(runnable, delay, TimeUnit.MILLISECONDS);
}
public static void schedulePartitionDataTimedSync(Runnable runnable, long interval) {
DISTRO_EXECUTOR.scheduleWithFixedDelay(runnable, interval, interval, TimeUnit.MILLISECONDS);
}
}
|
GlobalExecutor
|
java
|
apache__camel
|
tooling/maven/camel-package-maven-plugin/src/main/java/org/apache/camel/maven/packaging/ModelWriterGeneratorMojo.java
|
{
"start": 17507,
"end": 22034
}
|
class ____ {
private final Member field;
private final Member getter;
private final Member setter;
private final String name;
private final Type type;
public Property(Member field, Member getter, Member setter, String name, Type type) {
this.field = field;
this.getter = getter;
this.setter = setter;
this.name = name;
this.type = type;
}
@Override
public String toString() {
return "Property{" +
"name='" + name + '\'' +
", type=" + type +
", field=" + field +
", getter=" + getter +
", setter=" + setter +
'}';
}
private Stream<Member> members() {
return Stream.of(field, getter, setter).filter(Objects::nonNull);
}
public String getName() {
return name;
}
public Type getType() {
return type;
}
public GenericType getGenericType() {
return new GenericType(type);
}
public String getGetter() {
return Optional.ofNullable(getter)
.orElseThrow(() -> new IllegalArgumentException("No getter for property defined by " + members().toList()))
.getName();
}
@SuppressWarnings("unchecked")
public <T extends Annotation> T getAnnotation(Class<T> annotationClass) {
return (T) annotations().filter(annotationClass::isInstance).findFirst().orElse(null);
}
public <T extends Annotation> boolean hasAnnotation(Class<T> annotationClass) {
return getAnnotation(annotationClass) != null;
}
private Stream<? extends Annotation> annotations() {
return members().flatMap(m -> Stream.of(((AnnotatedElement) m).getAnnotations()));
}
public boolean isAttribute() {
return hasAnnotation(XmlAttribute.class);
}
public boolean isAnyAttribute() {
return hasAnnotation(XmlAnyAttribute.class);
}
public boolean isValue() {
return hasAnnotation(XmlValue.class);
}
public boolean isElement() {
return !isAttribute() && !isAnyAttribute() && !isValue();
}
public boolean isElementRefs() {
return hasAnnotation(XmlElementRefs.class);
}
public boolean isElementRef() {
return hasAnnotation(XmlElementRef.class);
// || member.getDeclaringClass() == outputDefinitionClass && "setOutputs".equals(member.getName());
}
public XmlElementRefs getXmlElementRefs() {
return getAnnotation(XmlElementRefs.class);
}
public XmlElementRef getXmlElementRef() {
return getAnnotation(XmlElementRef.class);
}
public XmlElements getXmlElements() {
return getAnnotation(XmlElements.class);
}
public XmlElement getXmlElement() {
return getAnnotation(XmlElement.class);
}
public XmlAnyElement getXmlAnyElement() {
return getAnnotation(XmlAnyElement.class);
}
public XmlRootElement getXmlRootElement() {
return getAnnotation(XmlRootElement.class);
}
public XmlElementWrapper getXmlElementWrapper() {
return getAnnotation(XmlElementWrapper.class);
}
public XmlJavaTypeAdapter getXmlJavaTypeAdapter() {
return getAnnotation(XmlJavaTypeAdapter.class);
}
public String getAttributeName() {
String an = getAnnotation(XmlAttribute.class).name();
if ("##default".equals(an)) {
an = getName();
}
return an;
}
public String getDefaultValue() {
String answer = null;
Metadata m = getAnnotation(Metadata.class);
if (m != null) {
answer = m.defaultValue();
}
if (answer == null || answer.isBlank()) {
answer = "null";
} else {
if (answer.equals("\"")) {
answer = "\\\"";
}
if (answer.equals("\\")) {
answer = "\\\\";
}
answer = "\"" + answer + "\"";
}
return answer;
}
}
}
|
Property
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/RefEndpointBuilderFactory.java
|
{
"start": 12818,
"end": 13120
}
|
class ____ extends AbstractEndpointBuilder implements RefEndpointBuilder, AdvancedRefEndpointBuilder {
public RefEndpointBuilderImpl(String path) {
super(componentName, path);
}
}
return new RefEndpointBuilderImpl(path);
}
}
|
RefEndpointBuilderImpl
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.