language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | bumptech__glide | annotation/compiler/src/main/java/com/bumptech/glide/annotation/compiler/AppModuleGenerator.java | {
"start": 1453,
"end": 1518
} | class ____ something like this:
*
* <pre>
* <code>
* final | looks |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/ordered/UseReservedKeywordInOrderByTest.java | {
"start": 1333,
"end": 1537
} | class ____ {
@Id
private Integer id;
private String name;
@Column(name = "update_date")
private Date update;
@OneToOne
Location location;
}
@Entity(name = "Location")
public static | Person |
java | apache__rocketmq | proxy/src/test/java/org/apache/rocketmq/proxy/common/AddressTest.java | {
"start": 1006,
"end": 2285
} | class ____ {
@Test
public void testConstructorWithIPv4() {
HostAndPort hostAndPort = HostAndPort.fromString("192.168.1.1:8080");
Address address = new Address(hostAndPort);
assertEquals(Address.AddressScheme.IPv4, address.getAddressScheme());
assertEquals(hostAndPort, address.getHostAndPort());
}
@Test
public void testConstructorWithIPv6() {
HostAndPort hostAndPort = HostAndPort.fromString("[2001:db8::1]:8080");
Address address = new Address(hostAndPort);
assertEquals(Address.AddressScheme.IPv6, address.getAddressScheme());
assertEquals(hostAndPort, address.getHostAndPort());
}
@Test
public void testConstructorWithDomainName() {
HostAndPort hostAndPort = HostAndPort.fromString("example.com:8080");
Address address = new Address(hostAndPort);
assertEquals(Address.AddressScheme.DOMAIN_NAME, address.getAddressScheme());
assertEquals(hostAndPort, address.getHostAndPort());
}
@Test
public void testConstructorWithNullHostAndPort() {
Address address = new Address(null);
assertEquals(Address.AddressScheme.UNRECOGNIZED, address.getAddressScheme());
assertNull(address.getHostAndPort());
}
} | AddressTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/collections/classification/set/EntityWithNaturallySortedSet.java | {
"start": 551,
"end": 1256
} | class ____ {
// ...
//end::collections-sortedset-natural-ex[]
@Id
private Integer id;
@Basic
private String name;
//tag::collections-sortedset-natural-ex[]
@ElementCollection
@SortNatural
private SortedSet<Name> names;
//end::collections-sortedset-natural-ex[]
private EntityWithNaturallySortedSet() {
// for Hibernate use
}
public EntityWithNaturallySortedSet(Integer id, String name) {
this.id = id;
this.name = name;
}
public Integer getId() {
return id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
//tag::collections-sortedset-natural-ex[]
}
//end::collections-sortedset-natural-ex[]
| EntityWithNaturallySortedSet |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ClientBaseWithFixes.java | {
"start": 3693,
"end": 6880
} | class ____ implements Watcher {
// XXX this doesn't need to be volatile! (Should probably be final)
volatile CountDownLatch clientConnected;
volatile boolean connected;
protected ZooKeeper client;
public void initializeWatchedClient(ZooKeeper zk) {
if (client != null) {
throw new RuntimeException("Watched Client was already set");
}
client = zk;
}
public CountdownWatcher() {
reset();
}
synchronized public void reset() {
clientConnected = new CountDownLatch(1);
connected = false;
}
@Override
synchronized public void process(WatchedEvent event) {
if (event.getState() == KeeperState.SyncConnected ||
event.getState() == KeeperState.ConnectedReadOnly) {
connected = true;
notifyAll();
clientConnected.countDown();
} else {
connected = false;
notifyAll();
}
}
synchronized boolean isConnected() {
return connected;
}
@VisibleForTesting
public synchronized void waitForConnected(long timeout)
throws InterruptedException, TimeoutException {
long expire = Time.now() + timeout;
long left = timeout;
while(!connected && left > 0) {
wait(left);
left = expire - Time.now();
}
if (!connected) {
throw new TimeoutException("Did not connect");
}
}
@VisibleForTesting
public synchronized void waitForDisconnected(long timeout)
throws InterruptedException, TimeoutException {
long expire = Time.now() + timeout;
long left = timeout;
while(connected && left > 0) {
wait(left);
left = expire - Time.now();
}
if (connected) {
throw new TimeoutException("Did not disconnect");
}
}
}
protected TestableZooKeeper createClient()
throws IOException, InterruptedException {
return createClient(hostPort);
}
protected TestableZooKeeper createClient(String hp)
throws IOException, InterruptedException {
CountdownWatcher watcher = new CountdownWatcher();
return createClient(watcher, hp);
}
private LinkedList<ZooKeeper> allClients;
private boolean allClientsSetup = false;
protected TestableZooKeeper createClient(CountdownWatcher watcher, String hp)
throws IOException, InterruptedException {
return createClient(watcher, hp, CONNECTION_TIMEOUT);
}
protected TestableZooKeeper createClient(CountdownWatcher watcher,
String hp, int timeout) throws IOException, InterruptedException {
watcher.reset();
TestableZooKeeper zk = new TestableZooKeeper(hp, timeout, watcher);
if (!watcher.clientConnected.await(timeout, TimeUnit.MILLISECONDS)) {
fail("Unable to connect to server");
}
synchronized (this) {
if (!allClientsSetup) {
LOG.error("allClients never setup");
fail("allClients never setup");
}
if (allClients != null) {
allClients.add(zk);
} else {
// test done - close the zk, not needed
zk.close();
}
}
watcher.initializeWatchedClient(zk);
return zk;
}
public static | CountdownWatcher |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/component/bean/BeanWithVariablesAndBodyInjectionTest.java | {
"start": 1410,
"end": 2830
} | class ____ extends ContextTestSupport {
private static final Logger LOG = LoggerFactory.getLogger(BeanWithVariablesAndBodyInjectionTest.class);
protected final MyBean myBean = new MyBean();
@Test
public void testSendMessage() {
template.send("direct:in", new Processor() {
public void process(Exchange exchange) {
exchange.setProperty("p1", "abc");
exchange.setProperty("p2", 123);
Message in = exchange.getIn();
exchange.setVariable("h1", "xyz");
exchange.setVariable("h2", 456);
in.setBody("TheBody");
}
});
Map<String, Object> foo = myBean.variables;
assertNotNull(foo, "myBean.foo");
assertEquals("xyz", foo.get("h1"), "foo.h1");
assertEquals(456, foo.get("h2"), "foo.h2");
assertEquals("TheBody", myBean.body, "body");
}
@Override
protected Registry createCamelRegistry() throws Exception {
Registry answer = super.createCamelRegistry();
answer.bind("myBean", myBean);
return answer;
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("direct:in").to("bean:myBean?method=myMethod");
}
};
}
public static | BeanWithVariablesAndBodyInjectionTest |
java | square__javapoet | src/test/java/com/squareup/javapoet/JavaFileTest.java | {
"start": 20410,
"end": 21010
} | class ____<T extends com.taco.bell.Taco> {\n"
+ "}\n");
}
@Test public void superclassReferencesSelf() throws Exception {
String source = JavaFile.builder("com.squareup.tacos",
TypeSpec.classBuilder("Taco")
.superclass(ParameterizedTypeName.get(
ClassName.get(Comparable.class), ClassName.get("com.squareup.tacos", "Taco")))
.build())
.build()
.toString();
assertThat(source).isEqualTo(""
+ "package com.squareup.tacos;\n"
+ "\n"
+ "import java.lang.Comparable;\n"
+ "\n"
+ " | Taco |
java | elastic__elasticsearch | test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java | {
"start": 5554,
"end": 11861
} | class ____ extends FieldTypeTestCase {
protected static final Settings SETTINGS = Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current())
.build();
protected static final ToXContent.Params INCLUDE_DEFAULTS = new ToXContent.MapParams(Map.of("include_defaults", "true"));
protected Collection<? extends Plugin> getPlugins() {
return emptyList();
}
protected Settings getIndexSettings() {
return SETTINGS;
}
protected final Settings.Builder getIndexSettingsBuilder() {
return Settings.builder().put(getIndexSettings());
}
protected IndexAnalyzers createIndexAnalyzers(IndexSettings indexSettings) {
return createIndexAnalyzers();
}
protected static IndexAnalyzers createIndexAnalyzers() {
return IndexAnalyzers.of(Map.of("default", new NamedAnalyzer("default", AnalyzerScope.INDEX, new StandardAnalyzer())));
}
protected static String randomIndexOptions() {
return randomFrom("docs", "freqs", "positions", "offsets");
}
protected final DocumentMapper createDocumentMapper(XContentBuilder mappings, IndexMode indexMode) throws IOException {
return switch (indexMode) {
case STANDARD, LOOKUP -> createDocumentMapper(mappings);
case TIME_SERIES -> createTimeSeriesModeDocumentMapper(mappings);
case LOGSDB -> createLogsModeDocumentMapper(mappings);
};
}
protected final DocumentMapper createDocumentMapper(XContentBuilder mappings) throws IOException {
return createMapperService(mappings).documentMapper();
}
protected final DocumentMapper createTimeSeriesModeDocumentMapper(XContentBuilder mappings) throws IOException {
Settings settings = Settings.builder()
.put(IndexSettings.MODE.getKey(), "time_series")
.put(IndexMetadata.INDEX_ROUTING_PATH.getKey(), "uid")
.build();
return createMapperService(settings, mappings).documentMapper();
}
protected final DocumentMapper createLogsModeDocumentMapper(XContentBuilder mappings) throws IOException {
Settings settings = Settings.builder().put(IndexSettings.MODE.getKey(), IndexMode.LOGSDB.getName()).build();
return createMapperService(settings, mappings).documentMapper();
}
protected final DocumentMapper createDocumentMapper(IndexVersion version, XContentBuilder mappings) throws IOException {
return createMapperService(version, mappings).documentMapper();
}
protected final DocumentMapper createDocumentMapper(String mappings) throws IOException {
var mapperService = createMapperService(mapping(b -> {}));
merge(mapperService, mappings);
return mapperService.documentMapper();
}
public final MapperService createMapperService(XContentBuilder mappings) throws IOException {
return createMapperService(getVersion(), mappings);
}
public final MapperService createSytheticSourceMapperService(XContentBuilder mappings) throws IOException {
var settings = Settings.builder().put("index.mapping.source.mode", "synthetic").build();
return createMapperService(getVersion(), settings, () -> true, mappings);
}
protected IndexVersion getVersion() {
return IndexVersion.current();
}
protected final MapperService createMapperService(Settings settings, XContentBuilder mappings) throws IOException {
return createMapperService(getVersion(), settings, () -> true, mappings);
}
protected final MapperService createMapperService(BooleanSupplier idFieldEnabled, XContentBuilder mappings) throws IOException {
return createMapperService(getVersion(), getIndexSettings(), idFieldEnabled, mappings);
}
public final MapperService createMapperService(String mappings) throws IOException {
MapperService mapperService = createMapperService(mapping(b -> {}));
merge(mapperService, mappings);
return mapperService;
}
protected final MapperService createMapperService(Settings settings, String mappings) throws IOException {
MapperService mapperService = createMapperService(IndexVersion.current(), settings, () -> true, mapping(b -> {}));
merge(mapperService, mappings);
return mapperService;
}
protected final MapperService createMapperService(IndexVersion indexVersion, Settings settings, XContentBuilder mappings)
throws IOException {
MapperService mapperService = createMapperService(indexVersion, settings, () -> true, mappings);
return mapperService;
}
protected final MapperService createMapperService(IndexVersion version, XContentBuilder mapping) throws IOException {
return createMapperService(version, getIndexSettings(), () -> true, mapping);
}
/**
* Create a {@link MapperService} like we would for an index.
*/
protected final MapperService createMapperService(
IndexVersion version,
Settings settings,
BooleanSupplier idFieldDataEnabled,
XContentBuilder mapping
) throws IOException {
MapperService mapperService = createMapperService(version, settings, idFieldDataEnabled);
return withMapping(mapperService, mapping);
}
protected final MapperService createMapperService(IndexVersion version, Settings settings, BooleanSupplier idFieldDataEnabled) {
return new TestMapperServiceBuilder().indexVersion(version).settings(settings).idFieldDataEnabled(idFieldDataEnabled).build();
}
public MapperService createMapperServiceWithNamespaceValidator(String mappings, RootObjectMapperNamespaceValidator validator)
throws IOException {
MapperService mapperService = new TestMapperServiceBuilder().indexVersion(getVersion())
.settings(getIndexSettings())
.idFieldDataEnabled(() -> true)
.namespaceValidator(validator)
.build();
merge(mapperService, mappings);
return mapperService;
}
protected final MapperService withMapping(MapperService mapperService, XContentBuilder mapping) throws IOException {
merge(mapperService, mapping);
return mapperService;
};
protected | MapperServiceTestCase |
java | processing__processing4 | app/src/processing/app/ui/Splash.java | {
"start": 2673,
"end": 5233
} | class ____.
* @param args the command line arguments
*/
@SuppressWarnings("SameParameterValue")
static void invokeMain(String className, String[] args) {
try {
Class.forName(className)
.getMethod("main", String[].class)
.invoke(null, new Object[] { args });
} catch (Exception e) {
throw new InternalError("Failed to invoke main method", e);
}
}
// /**
// * Load the optional properties.txt file from the 'lib' sub-folder
// * that can be used to pass entries to System.properties.
// */
// static private void initProperties() {
// try {
// File propsFile = Platform.getContentFile("properties.txt");
// if (propsFile != null && propsFile.exists()) {
// Settings props = new Settings(propsFile);
// for (Map.Entry<String, String> entry : props.getMap().entrySet()) {
// System.setProperty(entry.getKey(), entry.getValue());
// }
// }
// } catch (Exception e) {
// // No crying over spilt milk, but...
// e.printStackTrace();
// }
// }
static public boolean getDisableHiDPI() {
File propsFile = Platform.getContentFile("disable_hidpi");
return propsFile != null && propsFile.exists();
}
// Should only be called from Windows, but not restricted to Windows
// so not enforced. Unlikely to work on macOS because it modifies
// a file inside the .app, but may be useful on Linux.
static public void setDisableHiDPI(boolean disabled) {
try {
File propsFile = Platform.getContentFile("disable_hidpi");
if (propsFile != null) {
if (disabled) {
if (!propsFile.exists()) { // don't recreate if exists
new FileOutputStream(propsFile).close();
}
} else if (propsFile.exists()) {
boolean success = propsFile.delete();
if (!success) {
System.err.println("Could not delete disable_hidpi");
}
}
}
} catch (Exception e) {
// No crying over spilt milk, but...
e.printStackTrace();
}
}
static public void main(String[] args) {
// Has to be done before AWT is initialized, so the hack lives here
// instead of Base or anywhere else that might make more sense.
if (getDisableHiDPI()) {
System.setProperty("sun.java2d.uiScale.enabled", "false");
}
try {
showSplash(Platform.getContentFile("lib/about-2x.png"));
invokeMain("processing.app.Base", args);
disposeSplash();
} catch (Exception e) {
e.printStackTrace();
}
}
}
| name |
java | elastic__elasticsearch | modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/HtmlStripProcessorTests.java | {
"start": 518,
"end": 1091
} | class ____ extends AbstractStringProcessorTestCase<String> {
@Override
protected AbstractStringProcessor<String> newProcessor(String field, boolean ignoreMissing, String targetField) {
return new HtmlStripProcessor(randomAlphaOfLength(10), null, field, ignoreMissing, targetField);
}
@Override
protected String modifyInput(String input) {
return "<p><b>test</b>" + input + "<p><b>test</b>";
}
@Override
protected String expectedResult(String input) {
return "\ntest" + input + "\ntest";
}
}
| HtmlStripProcessorTests |
java | apache__hadoop | hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/security/AbfsDtFetcher.java | {
"start": 1470,
"end": 2925
} | class ____ implements DtFetcher {
private static final String FETCH_FAILED =
"Filesystem not generating Delegation Tokens";
/**
* Returns the service name for the scheme..
*/
public Text getServiceName() {
return new Text(getScheme());
}
/**
* Get the scheme for this specific fetcher.
* @return a scheme.
*/
protected String getScheme() {
return FileSystemUriSchemes.ABFS_SCHEME;
}
public boolean isTokenRequired() {
return UserGroupInformation.isSecurityEnabled();
}
/**
* Returns Token object via FileSystem, null if bad argument.
* @param conf - a Configuration object used with FileSystem.get()
* @param creds - a Credentials object to which token(s) will be added
* @param renewer - the renewer to send with the token request
* @param url - the URL to which the request is sent
* @return a Token, or null if fetch fails.
*/
public Token<?> addDelegationTokens(Configuration conf,
Credentials creds,
String renewer,
String url) throws Exception {
if (!url.startsWith(getServiceName().toString())) {
url = getServiceName().toString() + "://" + url;
}
FileSystem fs = FileSystem.get(URI.create(url), conf);
Token<?> token = fs.getDelegationToken(renewer);
if (token == null) {
throw new IOException(FETCH_FAILED + ": " + url);
}
creds.addToken(token.getService(), token);
return token;
}
}
| AbfsDtFetcher |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/security/ReservationsACLsManager.java | {
"start": 1515,
"end": 2618
} | class ____ {
private boolean isReservationACLsEnable;
Map<String, Map<ReservationACL, AccessControlList>> reservationAcls =
new HashMap<>();
public ReservationsACLsManager(Configuration conf) throws YarnException {
this.isReservationACLsEnable = conf.getBoolean(
YarnConfiguration.YARN_RESERVATION_ACL_ENABLE,
YarnConfiguration.DEFAULT_YARN_RESERVATION_ACL_ENABLE)
&& conf.getBoolean(YarnConfiguration.YARN_ACL_ENABLE,
YarnConfiguration.DEFAULT_YARN_ACL_ENABLE);
}
public boolean checkAccess(UserGroupInformation callerUGI,
ReservationACL acl, String queueName) {
if (!isReservationACLsEnable) {
return true;
}
if (this.reservationAcls.containsKey(queueName)) {
Map<ReservationACL, AccessControlList> acls = this.reservationAcls.get(
queueName);
if (acls != null && acls.containsKey(acl)) {
return acls.get(acl).isUserAllowed(callerUGI);
} else {
// Give access if acl is undefined for queue.
return true;
}
}
return false;
}
}
| ReservationsACLsManager |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/bugs/_373/Branch.java | {
"start": 233,
"end": 501
} | class ____ {
private BranchLocation branchLocation;
public BranchLocation getBranchLocation() {
return branchLocation;
}
public void setBranchLocation(BranchLocation branchLocation) {
this.branchLocation = branchLocation;
}
}
| Branch |
java | playframework__playframework | documentation/manual/working/javaGuide/main/async/code/javaguide/async/JavaAsync.java | {
"start": 676,
"end": 777
} | class ____ {
@Test
public void promiseWithTimeout() throws Exception {
// #timeout
| JavaAsync |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-client/deployment/src/test/java/io/quarkus/rest/client/reactive/registerclientheaders/MyHeadersFactory.java | {
"start": 372,
"end": 783
} | class ____ implements ClientHeadersFactory {
@Inject
BeanManager beanManager;
@Override
public MultivaluedMap<String, String> update(MultivaluedMap<String, String> incomingHeaders,
MultivaluedMap<String, String> clientOutgoingHeaders) {
assertNotNull(beanManager);
clientOutgoingHeaders.add("foo", "bar");
return clientOutgoingHeaders;
}
}
| MyHeadersFactory |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/component/bean/ClassComponentWithPropertiesSetFromEndpointTest.java | {
"start": 978,
"end": 1628
} | class ____ extends ContextTestSupport {
@Test
public void testClassComponent() throws Exception {
getMockEndpoint("mock:result").expectedBodiesReceived("Bye World");
template.sendBody("direct:start", "World");
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").to("class:org.apache.camel.component.bean.MyPrefixBean?bean.prefix=Bye").to("mock:result");
}
};
}
}
| ClassComponentWithPropertiesSetFromEndpointTest |
java | apache__camel | components/camel-base64/src/test/java/org/apache/camel/dataformat/base64/SpringBase64DataFormatTestBase.java | {
"start": 1092,
"end": 2061
} | class ____ extends CamelSpringTestSupport {
protected Base64DataFormat format = new Base64DataFormat();
@EndpointInject("mock:result")
private MockEndpoint result;
protected void runEncoderTest(byte[] raw, byte[] expected) throws Exception {
result.setExpectedMessageCount(1);
template.sendBody("direct:startEncode", raw);
MockEndpoint.assertIsSatisfied(context);
byte[] encoded = result.getReceivedExchanges().get(0).getIn().getBody(byte[].class);
assertArrayEquals(expected, encoded);
}
protected void runDecoderTest(byte[] encoded, byte[] expected) throws Exception {
result.setExpectedMessageCount(1);
template.sendBody("direct:startDecode", encoded);
MockEndpoint.assertIsSatisfied(context);
byte[] decoded = result.getReceivedExchanges().get(0).getIn().getBody(byte[].class);
assertArrayEquals(expected, decoded);
}
}
| SpringBase64DataFormatTestBase |
java | google__auto | value/src/main/java/com/google/auto/value/extension/memoized/processor/MemoizeExtension.java | {
"start": 3714,
"end": 5273
} | class ____ extends AutoValueExtension {
private static final ImmutableSet<String> DO_NOT_PULL_DOWN_ANNOTATIONS =
ImmutableSet.of(Override.class.getCanonicalName(), MEMOIZED_NAME);
// Maven is configured to shade (rewrite) com.google packages to prevent dependency conflicts.
// Split up the package here with a call to concat to prevent Maven from finding and rewriting it,
// so that this will be able to find the LazyInit annotation if it's on the classpath.
private static final ClassName LAZY_INIT =
ClassName.get("com".concat(".google.errorprone.annotations.concurrent"), "LazyInit");
private static final AnnotationSpec SUPPRESS_WARNINGS =
AnnotationSpec.builder(SuppressWarnings.class).addMember("value", "$S", "Immutable").build();
@Override
public IncrementalExtensionType incrementalType(ProcessingEnvironment processingEnvironment) {
return IncrementalExtensionType.ISOLATING;
}
@Override
public boolean applicable(Context context) {
return !memoizedMethods(context).isEmpty();
}
@Override
public String generateClass(
Context context, String className, String classToExtend, boolean isFinal) {
return new Generator(context, className, classToExtend, isFinal).generate();
}
private static ImmutableSet<ExecutableElement> memoizedMethods(Context context) {
return methodsIn(context.autoValueClass().getEnclosedElements()).stream()
.filter(m -> getAnnotationMirror(m, MEMOIZED_NAME).isPresent())
.collect(toImmutableSet());
}
static final | MemoizeExtension |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/threadsafety/SynchronizeOnNonFinalFieldTest.java | {
"start": 3682,
"end": 3863
} | class ____ extends Writer {
void m() {
synchronized (lock) {
}
}
}
""")
.doTest();
}
}
| Test |
java | micronaut-projects__micronaut-core | core/src/main/java/io/micronaut/core/annotation/NonNull.java | {
"start": 1586,
"end": 1845
} | interface ____ {
/**
* Whether the nullable behaviour is inherited by subclasses or implementors in interfaces.
* @return True if it should be inherited (defaults to false)
* @since 4.1.0
*/
boolean inherited() default false;
}
| NonNull |
java | apache__flink | flink-table/flink-sql-gateway-api/src/main/java/org/apache/flink/table/gateway/api/endpoint/SqlGatewayEndpointFactoryUtils.java | {
"start": 4576,
"end": 4962
} | class ____ extends FactoryHelper<SqlGatewayEndpointFactory> {
private EndpointFactoryHelper(
SqlGatewayEndpointFactory factory, Map<String, String> configOptions) {
super(factory, configOptions, PROPERTY_VERSION);
}
}
/** The default context of {@link SqlGatewayEndpointFactory}. */
@Internal
public static | EndpointFactoryHelper |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/jobmaster/slotpool/DefaultDeclarativeSlotPoolTest.java | {
"start": 37352,
"end": 38110
} | class ____
implements Consumer<Collection<ResourceRequirement>> {
private final BlockingQueue<Collection<ResourceRequirement>> resourceRequirementsQueue =
new ArrayBlockingQueue<>(2);
@Override
public void accept(Collection<ResourceRequirement> resourceRequirements) {
resourceRequirementsQueue.offer(resourceRequirements);
}
private Collection<ResourceRequirement> takeResourceRequirements()
throws InterruptedException {
return resourceRequirementsQueue.take();
}
public boolean hasNextResourceRequirements() {
return !resourceRequirementsQueue.isEmpty();
}
}
static final | NewResourceRequirementsService |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/connector/source/abilities/SupportsWatermarkPushDown.java | {
"start": 3107,
"end": 3838
} | interface ____ {
/**
* Provides a {@link WatermarkStrategy} which defines how to generate {@link Watermark}s in the
* stream source.
*
* <p>The {@link WatermarkStrategy} is a builder/factory for the actual runtime implementation
* consisting of {@link TimestampAssigner} (assigns the event-time timestamps to each record)
* and the {@link WatermarkGenerator} (generates the watermarks).
*
* <p>Note: If necessary, the watermark strategy will contain required computed column
* expressions and consider metadata columns (if {@link SupportsReadingMetadata} is
* implemented).
*/
void applyWatermark(WatermarkStrategy<RowData> watermarkStrategy);
}
| SupportsWatermarkPushDown |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/events/ListenerTest.java | {
"start": 3564,
"end": 4194
} | class ____ {
@Id
private Long id;
private String name;
private Date dateOfBirth;
@Transient
private long age;
private Date lastUpdate;
public void setLastUpdate(Date lastUpdate) {
this.lastUpdate = lastUpdate;
}
/**
* Set the transient property at load time based on a calculation.
* Note that a native Hibernate formula mapping is better for this purpose.
*/
@PostLoad
public void calculateAge() {
age = ChronoUnit.YEARS.between( LocalDateTime.ofInstant(
Instant.ofEpochMilli( dateOfBirth.getTime() ), ZoneOffset.UTC ),
LocalDateTime.now()
);
}
}
public static | Person |
java | quarkusio__quarkus | integration-tests/jpa/src/main/java/io/quarkus/it/jpa/util/MyCdiContext.java | {
"start": 172,
"end": 908
} | class ____ {
public static void checkAvailable(MyCdiContext injected, BeanInstantiator beanInstantiator) {
assertThat(injected)
.as("CDI context should be available")
.isNotNull()
.returns(true, MyCdiContext::worksProperly);
assertThat(beanInstantiator).isEqualTo(BeanInstantiator.ARC);
}
public static void checkNotAvailable(MyCdiContext injected, BeanInstantiator beanInstantiator) {
assertThat(injected)
.as("CDI context should not be available")
.isNull();
assertThat(beanInstantiator).isEqualTo(BeanInstantiator.HIBERNATE);
}
public boolean worksProperly() {
return true;
}
}
| MyCdiContext |
java | micronaut-projects__micronaut-core | inject-java/src/test/groovy/io/micronaut/inject/beans/concopy/Message.java | {
"start": 4645,
"end": 5039
} | class ____ extends PayloadMessage<Map<String, Object>> {
/**
* Get the required value of the message's <code>type</code> field.
*
* @return The message's type
*/
@Override
@JsonIgnore
@NonNull
String getMessageType() {
return Types.CONNECTION_INIT;
}
}
/**
* A graphql-ws message for connection acknowledgement.
*/
final | ConnectionInitMessage |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/DebeziumPostgresComponentBuilderFactory.java | {
"start": 8885,
"end": 22077
} | class ____ is responsible for persistence of
* connector offsets.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: org.apache.kafka.connect.storage.FileOffsetBackingStore
* Group: consumer
*
* @param offsetStorage the value to set
* @return the dsl builder
*/
default DebeziumPostgresComponentBuilder offsetStorage(java.lang.String offsetStorage) {
doSetProperty("offsetStorage", offsetStorage);
return this;
}
/**
* Path to file where offsets are to be stored. Required when
* offset.storage is set to the FileOffsetBackingStore.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*
* @param offsetStorageFileName the value to set
* @return the dsl builder
*/
default DebeziumPostgresComponentBuilder offsetStorageFileName(java.lang.String offsetStorageFileName) {
doSetProperty("offsetStorageFileName", offsetStorageFileName);
return this;
}
/**
* The number of partitions used when creating the offset storage topic.
* Required when offset.storage is set to the 'KafkaOffsetBackingStore'.
*
* The option is a: <code>int</code> type.
*
* Group: consumer
*
* @param offsetStoragePartitions the value to set
* @return the dsl builder
*/
default DebeziumPostgresComponentBuilder offsetStoragePartitions(int offsetStoragePartitions) {
doSetProperty("offsetStoragePartitions", offsetStoragePartitions);
return this;
}
/**
* Replication factor used when creating the offset storage topic.
* Required when offset.storage is set to the KafkaOffsetBackingStore.
*
* The option is a: <code>int</code> type.
*
* Group: consumer
*
* @param offsetStorageReplicationFactor the value to set
* @return the dsl builder
*/
default DebeziumPostgresComponentBuilder offsetStorageReplicationFactor(int offsetStorageReplicationFactor) {
doSetProperty("offsetStorageReplicationFactor", offsetStorageReplicationFactor);
return this;
}
/**
* The name of the Kafka topic where offsets are to be stored. Required
* when offset.storage is set to the KafkaOffsetBackingStore.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*
* @param offsetStorageTopic the value to set
* @return the dsl builder
*/
default DebeziumPostgresComponentBuilder offsetStorageTopic(java.lang.String offsetStorageTopic) {
doSetProperty("offsetStorageTopic", offsetStorageTopic);
return this;
}
/**
* Whether autowiring is enabled. This is used for automatic autowiring
* options (the option must be marked as autowired) by looking up in the
* registry to find if there is a single instance of matching type,
* which then gets configured on the component. This can be used for
* automatic configuring JDBC data sources, JMS connection factories,
* AWS Clients, etc.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: advanced
*
* @param autowiredEnabled the value to set
* @return the dsl builder
*/
default DebeziumPostgresComponentBuilder autowiredEnabled(boolean autowiredEnabled) {
doSetProperty("autowiredEnabled", autowiredEnabled);
return this;
}
/**
* Specify how binary (blob, binary, etc.) columns should be represented
* in change events, including: 'bytes' represents binary data as byte
* array (default); 'base64' represents binary data as base64-encoded
* string; 'base64-url-safe' represents binary data as
* base64-url-safe-encoded string; 'hex' represents binary data as
* hex-encoded (base16) string.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: bytes
* Group: postgres
*
* @param binaryHandlingMode the value to set
* @return the dsl builder
*/
default DebeziumPostgresComponentBuilder binaryHandlingMode(java.lang.String binaryHandlingMode) {
doSetProperty("binaryHandlingMode", binaryHandlingMode);
return this;
}
/**
* Regular expressions matching columns to exclude from change events.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: postgres
*
* @param columnExcludeList the value to set
* @return the dsl builder
*/
default DebeziumPostgresComponentBuilder columnExcludeList(java.lang.String columnExcludeList) {
doSetProperty("columnExcludeList", columnExcludeList);
return this;
}
/**
* Regular expressions matching columns to include in change events.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: postgres
*
* @param columnIncludeList the value to set
* @return the dsl builder
*/
default DebeziumPostgresComponentBuilder columnIncludeList(java.lang.String columnIncludeList) {
doSetProperty("columnIncludeList", columnIncludeList);
return this;
}
/**
* A comma-separated list of regular expressions matching
* fully-qualified names of columns that adds the columns original type
* and original length as parameters to the corresponding field schemas
* in the emitted change records.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: postgres
*
* @param columnPropagateSourceType the value to set
* @return the dsl builder
*/
default DebeziumPostgresComponentBuilder columnPropagateSourceType(java.lang.String columnPropagateSourceType) {
doSetProperty("columnPropagateSourceType", columnPropagateSourceType);
return this;
}
/**
* The maximum time in milliseconds to wait for connection validation to
* complete. Defaults to 60 seconds.
*
* The option is a: <code>long</code> type.
*
* Default: 1m
* Group: postgres
*
* @param connectionValidationTimeoutMs the value to set
* @return the dsl builder
*/
default DebeziumPostgresComponentBuilder connectionValidationTimeoutMs(long connectionValidationTimeoutMs) {
doSetProperty("connectionValidationTimeoutMs", connectionValidationTimeoutMs);
return this;
}
/**
* Optional list of custom converters that would be used instead of
* default ones. The converters are defined using '.type' config option
* and configured using options '.'.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: postgres
*
* @param converters the value to set
* @return the dsl builder
*/
default DebeziumPostgresComponentBuilder converters(java.lang.String converters) {
doSetProperty("converters", converters);
return this;
}
/**
* The custom metric tags will accept key-value pairs to customize the
* MBean object name which should be appended the end of regular name,
* each key would represent a tag for the MBean object name, and the
* corresponding value would be the value of that tag the key is. For
* example: k1=v1,k2=v2.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: postgres
*
* @param customMetricTags the value to set
* @return the dsl builder
*/
default DebeziumPostgresComponentBuilder customMetricTags(java.lang.String customMetricTags) {
doSetProperty("customMetricTags", customMetricTags);
return this;
}
/**
* The name of the database from which the connector should capture
* changes.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: postgres
*
* @param databaseDbname the value to set
* @return the dsl builder
*/
default DebeziumPostgresComponentBuilder databaseDbname(java.lang.String databaseDbname) {
doSetProperty("databaseDbname", databaseDbname);
return this;
}
/**
* Resolvable hostname or IP address of the database server.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: postgres
*
* @param databaseHostname the value to set
* @return the dsl builder
*/
default DebeziumPostgresComponentBuilder databaseHostname(java.lang.String databaseHostname) {
doSetProperty("databaseHostname", databaseHostname);
return this;
}
/**
* A semicolon separated list of SQL statements to be executed when a
* JDBC connection to the database is established. Note that the
* connector may establish JDBC connections at its own discretion, so
* this should typically be used for configuration of session parameters
* only, but not for executing DML statements. Use doubled semicolon
* (';;') to use a semicolon as a character and not as a delimiter.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: postgres
*
* @param databaseInitialStatements the value to set
* @return the dsl builder
*/
default DebeziumPostgresComponentBuilder databaseInitialStatements(java.lang.String databaseInitialStatements) {
doSetProperty("databaseInitialStatements", databaseInitialStatements);
return this;
}
/**
* Password of the database user to be used when connecting to the
* database.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: postgres
*
* @param databasePassword the value to set
* @return the dsl builder
*/
default DebeziumPostgresComponentBuilder databasePassword(java.lang.String databasePassword) {
doSetProperty("databasePassword", databasePassword);
return this;
}
/**
* Port of the database server.
*
* The option is a: <code>int</code> type.
*
* Default: 5432
* Group: postgres
*
* @param databasePort the value to set
* @return the dsl builder
*/
default DebeziumPostgresComponentBuilder databasePort(int databasePort) {
doSetProperty("databasePort", databasePort);
return this;
}
/**
* Time to wait for a query to execute, given in milliseconds. Defaults
* to 600 seconds (600,000 ms); zero means there is no limit.
*
* The option is a: <code>int</code> type.
*
* Default: 10m
* Group: postgres
*
* @param databaseQueryTimeoutMs the value to set
* @return the dsl builder
*/
default DebeziumPostgresComponentBuilder databaseQueryTimeoutMs(int databaseQueryTimeoutMs) {
doSetProperty("databaseQueryTimeoutMs", databaseQueryTimeoutMs);
return this;
}
/**
* File containing the SSL Certificate for the client. See the Postgres
* SSL docs for further information.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: postgres
*
* @param databaseSslcert the value to set
* @return the dsl builder
*/
default DebeziumPostgresComponentBuilder databaseSslcert(java.lang.String databaseSslcert) {
doSetProperty("databaseSslcert", databaseSslcert);
return this;
}
/**
* A name of | that |
java | netty__netty | example/src/main/java/io/netty/example/spdy/client/SpdyClientStreamIdHandler.java | {
"start": 1076,
"end": 1799
} | class ____ extends ChannelOutboundHandlerAdapter {
private int currentStreamId = 1;
public boolean acceptOutboundMessage(Object msg) {
return msg instanceof HttpMessage;
}
@Override
public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) {
if (acceptOutboundMessage(msg)) {
HttpMessage httpMsg = (HttpMessage) msg;
if (!httpMsg.headers().contains(SpdyHttpHeaders.Names.STREAM_ID)) {
httpMsg.headers().setInt(Names.STREAM_ID, currentStreamId);
// Client stream IDs are always odd
currentStreamId += 2;
}
}
ctx.write(msg, promise);
}
}
| SpdyClientStreamIdHandler |
java | quarkusio__quarkus | independent-projects/qute/core/src/test/java/io/quarkus/qute/TraceListenerTest.java | {
"start": 618,
"end": 6494
} | class ____ {
private static final Comparator<String> NODE_COMPARATOR = (o1, o2) -> o1.compareTo(o2);
@Test
public void trackTemplate() {
Engine engine = Engine.builder()
.addDefaults()
.addValueResolver(new ReflectionValueResolver())
.enableTracing(true)
.build();
String templateId = "hello";
Template template = engine.parse("""
<html>
{#for item in items}
{#if item_count > 0}FOO{/if}
{item} {item_count}
{/for}
</html>
""", null, templateId);
StringBuilder trace = new StringBuilder();
engine.addTraceListener(new TraceListener() {
@Override
public void onStartTemplate(TemplateEvent event) {
String templateId = event.getTemplateInstance().getTemplate().getId();
trace.append("<").append(templateId).append(">");
}
@Override
public void onEndTemplate(TemplateEvent event) {
String templateId = event.getTemplateInstance().getTemplate().getId();
trace.append("</").append(templateId).append(">");
}
});
List<String> items = List.of("foo", "bar", "baz");
template.data("items", items).render();
assertEquals("<hello></hello>", trace.toString());
}
@Test
public void trackNodes() {
Engine engine = Engine.builder()
.addDefaults()
.addValueResolver(new ReflectionValueResolver())
.enableTracing(true)
.build();
Template template = engine.parse("""
<html>
{#for item in items}
{#if item_count > 0}FOO{/if}
{item} {item_count}
{/for}
</html>
""");
List<String> actualBeforeResolve = new ArrayList<>();
List<String> actualAfterResolve = new ArrayList<>();
engine.addTraceListener(new TraceListener() {
@Override
public void onBeforeResolve(ResolveEvent event) {
actualBeforeResolve.add(toStringNode(event.getTemplateNode()));
}
@Override
public void onAfterResolve(ResolveEvent event) {
actualAfterResolve.add(toStringNode(event.getTemplateNode()));
}
});
List<String> items = List.of("foo", "bar", "baz");
template.data("items", items).render();
// Before and after resolve, nodes are equal but not necessarily in the same
// order, as some nodes may take longer to resolve.
List<String> sortedBeforeResolve = new ArrayList<>(actualBeforeResolve);
List<String> sortedAfterResolve = new ArrayList<>(actualAfterResolve);
Collections.sort(sortedBeforeResolve, NODE_COMPARATOR);
Collections.sort(sortedAfterResolve, NODE_COMPARATOR);
assertEquals(sortedAfterResolve, sortedAfterResolve);
assertArrayEquals(expectedBeforeResolve().toArray(), actualBeforeResolve.toArray());
}
@Test
public void testRegistration() {
Engine engine = Engine.builder()
.addDefaults()
.addValueResolver(new ReflectionValueResolver())
.enableTracing(true)
.build();
TraceListener empty = new TraceListener() {
};
engine.addTraceListener(empty);
assertTrue(engine.getTraceManager().hasTraceListeners());
engine.removeTraceListener(empty);
assertFalse(engine.getTraceManager().hasTraceListeners());
assertNull(Engine.builder().addDefaults().enableTracing(false).build().getTraceManager());
}
private static List<String> expectedBeforeResolve() {
List<String> actual = new ArrayList<String>();
actual.add("TextNode [value=<html>]");
actual.add("SectionNode [helper=LoopSectionHelper, origin= ]");
actual.add("TextNode [value= ]");
actual.add("SectionNode [helper=IfSectionHelper, origin= ]");
actual.add("TextNode [value=FOO]");
actual.add("TextNode [value= ]");
actual.add("ExpressionNode [expression=Expression [namespace=null, parts=[item], literal=null]]");
actual.add("TextNode [value= ]");
actual.add("ExpressionNode [expression=Expression [namespace=null, parts=[item_count], literal=null]]");
actual.add("TextNode [value=]");
actual.add("TextNode [value= ]");
actual.add("SectionNode [helper=IfSectionHelper, origin= ]");
actual.add("TextNode [value=FOO]");
actual.add("TextNode [value= ]");
actual.add("ExpressionNode [expression=Expression [namespace=null, parts=[item], literal=null]]");
actual.add("TextNode [value= ]");
actual.add("ExpressionNode [expression=Expression [namespace=null, parts=[item_count], literal=null]]");
actual.add("TextNode [value=]");
actual.add("TextNode [value= ]");
actual.add("SectionNode [helper=IfSectionHelper, origin= ]");
actual.add("TextNode [value=FOO]");
actual.add("TextNode [value= ]");
actual.add("ExpressionNode [expression=Expression [namespace=null, parts=[item], literal=null]]");
actual.add("TextNode [value= ]");
actual.add("ExpressionNode [expression=Expression [namespace=null, parts=[item_count], literal=null]]");
actual.add("TextNode [value=]");
actual.add("TextNode [value=</html>]");
return actual;
}
private static String toStringNode(TemplateNode templateNode) {
return templateNode.toString().replace("\r", "").replace("\n", "");
}
}
| TraceListenerTest |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/BlockRecoveryCommand.java | {
"start": 1793,
"end": 2239
} | class ____ extends DatanodeCommand {
final Collection<RecoveringBlock> recoveringBlocks;
/**
* This is a block with locations from which it should be recovered
* and the new generation stamp, which the block will have after
* successful recovery.
*
* The new generation stamp of the block, also plays role of the recovery id.
*/
@InterfaceAudience.Private
@InterfaceStability.Evolving
public static | BlockRecoveryCommand |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/engine/jdbc/mutation/internal/MutationExecutorSingleNonBatched.java | {
"start": 620,
"end": 2294
} | class ____ extends AbstractSingleMutationExecutor {
private final PreparedStatementGroupSingleTable statementGroup;
private final GeneratedValuesMutationDelegate generatedValuesDelegate;
public MutationExecutorSingleNonBatched(
PreparableMutationOperation mutationOperation,
GeneratedValuesMutationDelegate generatedValuesDelegate,
SharedSessionContractImplementor session) {
super( mutationOperation, session );
this.generatedValuesDelegate = generatedValuesDelegate;
this.statementGroup = new PreparedStatementGroupSingleTable( mutationOperation, generatedValuesDelegate, session );
prepareForNonBatchedWork( null, session );
}
@Override
protected PreparedStatementGroupSingleTable getStatementGroup() {
return statementGroup;
}
@Override
protected GeneratedValues performNonBatchedOperations(
Object modelReference,
ValuesAnalysis valuesAnalysis,
TableInclusionChecker inclusionChecker,
OperationResultChecker resultChecker,
SharedSessionContractImplementor session) {
if ( generatedValuesDelegate != null ) {
return generatedValuesDelegate.performMutation(
statementGroup.getSingleStatementDetails(),
getJdbcValueBindings(),
modelReference,
session
);
}
else {
performNonBatchedMutation(
statementGroup.getSingleStatementDetails(),
null,
getJdbcValueBindings(),
inclusionChecker,
resultChecker,
session
);
return null;
}
}
@Override
public void release() {
// nothing to do - `#performNonBatchedMutation` already releases the statement
assert statementGroup.getSingleStatementDetails().getStatement() == null;
}
}
| MutationExecutorSingleNonBatched |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jpa/lock/QueryLockingTest.java | {
"start": 1723,
"end": 11479
} | class ____ {
@Test
public void testOverallLockMode(EntityManagerFactoryScope scope) {
scope.inTransaction( em -> {
org.hibernate.query.Query query = em.createQuery( "from Lockable l" )
.unwrap( org.hibernate.query.Query.class );
assertEquals( LockMode.NONE, query.getLockOptions().getLockMode() );
// NOTE : LockModeType.READ should map to LockMode.OPTIMISTIC
query.setLockMode( LockModeType.READ );
assertEquals( LockMode.OPTIMISTIC, query.getLockOptions().getLockMode() );
query.setHint( HINT_NATIVE_LOCK_MODE, LockModeType.PESSIMISTIC_WRITE );
assertEquals( LockMode.PESSIMISTIC_WRITE, query.getLockOptions().getLockMode() );
} );
}
@Test
@JiraKey( value = "HHH-8756" )
public void testNoneLockModeForNonSelectQueryAllowed(EntityManagerFactoryScope scope) {
scope.inTransaction( em -> {
org.hibernate.query.Query query = em.createQuery( "delete from Lockable l" )
.unwrap( org.hibernate.query.Query.class );
assertEquals( LockMode.NONE, query.getLockOptions().getLockMode() );
query.setLockMode( LockModeType.NONE );
} );
// ensure other modes still throw the exception
scope.inTransaction( em -> {
org.hibernate.query.Query query = em.createQuery( "delete from Lockable l" ).unwrap( org.hibernate.query.Query.class );
assertEquals( LockMode.NONE, query.getLockOptions().getLockMode() );
Assertions.assertThrows(
IllegalStateException.class,
() -> {
// Throws IllegalStateException
query.setLockMode( LockModeType.PESSIMISTIC_WRITE );
},
"IllegalStateException should have been thrown."
);
} );
}
@Test
public void testNativeSql(EntityManagerFactoryScope scope) {
scope.inTransaction( em -> {
NativeQuery query = em.createNativeQuery( "select * from lockable l" ).unwrap( NativeQuery.class );
// the spec disallows calling setLockMode() and getLockMode()
// on a native SQL query and requires that an IllegalStateException
// be thrown
Assertions.assertThrows(
IllegalStateException.class,
() -> query.setLockMode( LockModeType.READ ),
"Should have thrown IllegalStateException"
);
Assertions.assertThrows(
IllegalStateException.class,
() -> query.getLockMode(),
"Should have thrown IllegalStateException"
);
// however, we should be able to set it using hints
query.setHint( HINT_NATIVE_LOCK_MODE, LockModeType.READ );
// NOTE : LockModeType.READ should map to LockMode.OPTIMISTIC
assertEquals( LockMode.OPTIMISTIC, query.getLockOptions().getLockMode() );
query.setHint( HINT_NATIVE_LOCK_MODE, LockModeType.PESSIMISTIC_WRITE );
assertEquals( LockMode.PESSIMISTIC_WRITE, query.getLockOptions().getLockMode() );
} );
}
@Test
@SkipForDialect( dialectClass = CockroachDialect.class )
public void testPessimisticForcedIncrementOverall(EntityManagerFactoryScope scope) {
Lockable lock = new Lockable( "name" );
scope.inTransaction( em -> em.persist( lock ) );
Integer initial = lock.getVersion();
assertNotNull( initial );
Integer id = scope.fromTransaction( em -> {
Lockable reread = em.createQuery( "from Lockable", Lockable.class ).setLockMode( LockModeType.PESSIMISTIC_FORCE_INCREMENT ).getSingleResult();
assertFalse( reread.getVersion().equals( initial ) );
return reread.getId();
} );
scope.inTransaction( em -> em.remove( em.getReference( Lockable.class, id ) ) );
}
@Test
@SkipForDialect( dialectClass = CockroachDialect.class )
public void testPessimisticForcedIncrementSpecific(EntityManagerFactoryScope scope) {
Lockable lock = new Lockable( "name" );
scope.inTransaction( em -> em.persist( lock ) );
Integer initial = lock.getVersion();
assertNotNull( initial );
Integer id = scope.fromTransaction( em -> {
Lockable reread = em.createQuery( "from Lockable l", Lockable.class )
.setHint( HINT_NATIVE_LOCK_MODE + ".l", LockModeType.PESSIMISTIC_FORCE_INCREMENT )
.getSingleResult();
assertFalse( reread.getVersion().equals( initial ) );
return reread.getId();
} );
scope.inTransaction( em -> em.remove( em.getReference( Lockable.class, id ) ) );
}
@Test
public void testOptimisticForcedIncrementOverall(EntityManagerFactoryScope scope) {
Lockable lock = new Lockable( "name" );
scope.inTransaction( em -> em.persist( lock ) );
Integer initial = lock.getVersion();
assertNotNull( initial );
Integer id = scope.fromTransaction( em -> {
Lockable reread = em.createQuery( "from Lockable", Lockable.class ).setLockMode( LockModeType.OPTIMISTIC_FORCE_INCREMENT ).getSingleResult();
assertEquals( initial, reread.getVersion() );
return reread.getId();
} );
scope.inTransaction( em -> {
Lockable reread = em.createQuery( "from Lockable", Lockable.class ).getSingleResult();
assertFalse( reread.getVersion().equals( initial ) );
} );
scope.inTransaction( em -> em.remove( em.getReference( Lockable.class, id ) ) );
}
@Test
public void testOptimisticForcedIncrementSpecific(EntityManagerFactoryScope scope) {
Lockable lock = new Lockable( "name" );
scope.inTransaction( em -> em.persist( lock ) );
Integer initial = lock.getVersion();
assertNotNull( initial );
Integer id = scope.fromTransaction( em -> {
Lockable reread = em.createQuery( "from Lockable l", Lockable.class )
.setHint( HINT_NATIVE_LOCK_MODE, LockModeType.OPTIMISTIC_FORCE_INCREMENT )
.getSingleResult();
assertEquals( initial, reread.getVersion() );
return reread.getId();
} );
scope.inTransaction( em -> {
Lockable reread = em.createQuery( "from Lockable", Lockable.class ).getSingleResult();
assertFalse( reread.getVersion().equals( initial ) );
} );
scope.inTransaction( em -> em.remove( em.getReference( Lockable.class, id ) ) );
}
@Test
public void testOptimisticOverall(EntityManagerFactoryScope scope) {
Lockable lock = new Lockable( "name" );
scope.inTransaction( em -> em.persist( lock ) );
Integer initial = lock.getVersion();
assertNotNull( initial );
Integer id = scope.fromTransaction( em -> {
Lockable reread = em.createQuery( "from Lockable", Lockable.class )
.setLockMode( LockModeType.OPTIMISTIC )
.getSingleResult();
assertEquals( initial, reread.getVersion() );
assertTrue( em.unwrap( SessionImplementor.class ).getActionQueue().hasBeforeTransactionActions() );
return reread.getId();
} );
scope.inTransaction( em -> {
Lockable reread = em.createQuery( "from Lockable", Lockable.class ).getSingleResult();
assertEquals( initial, reread.getVersion() );
} );
scope.inTransaction( em -> em.remove( em.getReference( Lockable.class, id ) ) );
}
@Test
public void testOptimisticSpecific(EntityManagerFactoryScope scope) {
Lockable lock = new Lockable( "name" );
scope.inTransaction( em -> em.persist( lock ) );
Integer initial = lock.getVersion();
assertNotNull( initial );
Integer id = scope.fromTransaction( em -> {
Lockable reread = em.createQuery( "from Lockable l", Lockable.class )
.setHint( HINT_NATIVE_LOCK_MODE, LockModeType.OPTIMISTIC )
.getSingleResult();
assertEquals( initial, reread.getVersion() );
assertTrue( em.unwrap( SessionImplementor.class ).getActionQueue().hasBeforeTransactionActions() );
return reread.getId();
} );
scope.inTransaction( em -> {
Lockable reread = em.createQuery( "from Lockable", Lockable.class ).getSingleResult();
assertEquals( initial, reread.getVersion() );
} );
scope.inTransaction( em -> em.remove( em.getReference( Lockable.class, id ) ) );
}
/**
* lock some entities via a query and check the resulting lock mode type via EntityManager
*/
@Test
@RequiresDialectFeature( feature = DialectFeatureChecks.SupportFollowOnLocking.class, reverse = true)
public void testEntityLockModeStateAfterQueryLocking(EntityManagerFactoryScope scope) {
// Create some test data
scope.inEntityManager( em -> {
em.getTransaction().begin();
em.persist( new LocalEntity( 1, "test" ) );
em.getTransaction().commit();
// em.close();
// issue the query with locking
// em = getOrCreateEntityManager();
em.getTransaction().begin();
Query query = em.createQuery( "select l from LocalEntity l" );
assertEquals( LockModeType.NONE, query.getLockMode() );
query.setLockMode( LockModeType.PESSIMISTIC_READ );
assertEquals( LockModeType.PESSIMISTIC_READ, query.getLockMode() );
List<LocalEntity> results = query.getResultList();
// and check the lock mode for each result
for ( LocalEntity e : results ) {
assertEquals( LockModeType.PESSIMISTIC_READ, em.getLockMode( e ) );
}
em.getTransaction().commit();
} );
// clean up test data
scope.inTransaction( em -> em.createQuery( "delete from LocalEntity" ).executeUpdate() );
}
@Test
@JiraKey(value = "HHH-11376")
@RequiresDialect( SQLServerDialect.class )
public void testCriteriaWithPessimisticLock(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
CriteriaBuilder builder = entityManager.getCriteriaBuilder();
CriteriaQuery<Person> criteria = builder.createQuery( Person.class );
Root<Person> personRoot = criteria.from( Person.class );
ParameterExpression<Long> personIdParameter = builder.parameter( Long.class );
// Eagerly fetch the parent
personRoot.fetch( "parent", JoinType.LEFT );
criteria.select( personRoot )
.where( builder.equal( personRoot.get( "id" ), personIdParameter ) );
final List<Person> resultList = entityManager.createQuery( criteria )
.setParameter( personIdParameter, 1L )
.setLockMode( LockModeType.PESSIMISTIC_WRITE )
.getResultList();
assertTrue( resultList.isEmpty() );
} );
}
@Entity(name = "LocalEntity")
@Table(name = "LocalEntity")
public static | QueryLockingTest |
java | spring-projects__spring-framework | spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/method/annotation/ServletAnnotationControllerHandlerMethodTests.java | {
"start": 145947,
"end": 147334
} | class ____ {
@PostMapping("/foo")
public ResponseEntity<String> foo(HttpEntity<byte[]> requestEntity) {
assertThat(requestEntity).isNotNull();
assertThat(requestEntity.getHeaders().getFirst("MyRequestHeader")).isEqualTo("MyValue");
String body = new String(requestEntity.getBody(), StandardCharsets.UTF_8);
assertThat(body).isEqualTo("Hello World");
URI location = URI.create("/foo");
return ResponseEntity.created(location).header("MyResponseHeader", "MyValue").body(body);
}
@GetMapping("/bar")
public ResponseEntity<Void> bar() {
return ResponseEntity.notFound().header("MyResponseHeader", "MyValue").build();
}
@GetMapping("/baz")
public ResponseEntity<String> baz() {
return ResponseEntity.ok().header("MyResponseHeader", "MyValue").body("body");
}
@RequestMapping(path = "/stores", method = RequestMethod.HEAD)
public ResponseEntity<Void> headResource() {
return ResponseEntity.ok().header("h1", "v1").build();
}
@GetMapping("/stores")
public ResponseEntity<String> getResource() {
return ResponseEntity.ok().body("body");
}
@GetMapping("/test-entity")
public ResponseEntity<TestEntity> testEntity() {
TestEntity entity = new TestEntity();
entity.setName("Foo Bar");
return ResponseEntity.ok().contentType(MediaType.APPLICATION_XML).body(entity);
}
}
@XmlRootElement
static | ResponseEntityController |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/query/results/internal/dynamic/DynamicResultBuilderBasicConverted.java | {
"start": 1094,
"end": 5294
} | class ____<O,R> implements DynamicResultBuilderBasic {
private final String columnAlias;
private final BasicValueConverter<O,R> basicValueConverter;
public DynamicResultBuilderBasicConverted(
String columnAlias,
Class<O> domainJavaType,
Class<R> jdbcJavaType,
AttributeConverter<O, R> converter,
SessionFactoryImplementor sessionFactory) {
this.columnAlias = columnAlias;
final var javaTypeRegistry = sessionFactory.getTypeConfiguration().getJavaTypeRegistry();
@SuppressWarnings("unchecked")
final var converterClass = (Class<AttributeConverter<O, R>>) converter.getClass();
this.basicValueConverter = new AttributeConverterBean<>(
new ProvidedInstanceManagedBeanImpl<>( converter ),
javaTypeRegistry.resolveDescriptor( converterClass ),
javaTypeRegistry.resolveDescriptor( domainJavaType ),
javaTypeRegistry.resolveDescriptor( jdbcJavaType )
);
}
public DynamicResultBuilderBasicConverted(
String columnAlias,
Class<O> domainJavaType,
Class<R> jdbcJavaType,
Class<? extends AttributeConverter<O, R>> converterJavaType,
SessionFactoryImplementor sessionFactory) {
this.columnAlias = columnAlias;
final var beans = sessionFactory.getManagedBeanRegistry();
final var javaTypeRegistry = sessionFactory.getTypeConfiguration().getJavaTypeRegistry();
this.basicValueConverter = new AttributeConverterBean<>(
beans.getBean( converterJavaType ),
javaTypeRegistry.resolveDescriptor( converterJavaType ),
javaTypeRegistry.resolveDescriptor( domainJavaType ),
javaTypeRegistry.resolveDescriptor( jdbcJavaType )
);
}
@Override
public Class<?> getJavaType() {
return basicValueConverter.getDomainJavaType().getJavaTypeClass();
}
@Override
public DynamicResultBuilderBasicConverted<?,?> cacheKeyInstance() {
return this;
}
@Override
public BasicResult<?> buildResult(
JdbcValuesMetadata jdbcResultsMetadata,
int resultPosition,
DomainResultCreationState domainResultCreationState) {
final var sqlAstCreationState = domainResultCreationState.getSqlAstCreationState();
final var typeConfiguration = sqlAstCreationState.getCreationContext().getTypeConfiguration();
final var sqlExpressionResolver = sqlAstCreationState.getSqlExpressionResolver();
final String columnName =
columnAlias != null
? columnAlias
: jdbcResultsMetadata.resolveColumnName( resultPosition + 1 );
final var sqlSelection = sqlExpressionResolver.resolveSqlSelection(
sqlExpressionResolver.resolveSqlExpression(
SqlExpressionResolver.createColumnReferenceKey( columnName ),
state -> resultSetMappingSqlSelection( jdbcResultsMetadata, resultPosition, typeConfiguration )
),
basicValueConverter.getRelationalJavaType(),
null,
typeConfiguration
);
return new BasicResult<>(
sqlSelection.getValuesArrayPosition(),
columnAlias,
basicValueConverter.getDomainJavaType(),
basicValueConverter,
null,
false,
false
);
}
private ResultSetMappingSqlSelection resultSetMappingSqlSelection(
JdbcValuesMetadata jdbcResultsMetadata, int resultPosition, TypeConfiguration typeConfiguration) {
final int jdbcPosition =
columnAlias != null
? jdbcResultsMetadata.resolveColumnPosition( columnAlias )
: resultPosition + 1;
final var basicType = jdbcResultsMetadata.resolveType(
jdbcPosition,
basicValueConverter.getRelationalJavaType(),
typeConfiguration
);
final int valuesArrayPosition = ResultsHelper.jdbcPositionToValuesArrayPosition( jdbcPosition );
return new ResultSetMappingSqlSelection( valuesArrayPosition, (BasicValuedMapping) basicType );
}
@Override
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( o == null || getClass() != o.getClass() ) {
return false;
}
final var that = (DynamicResultBuilderBasicConverted<?, ?>) o;
return Objects.equals( columnAlias, that.columnAlias )
&& basicValueConverter.equals( that.basicValueConverter );
}
@Override
public int hashCode() {
int result = columnAlias != null ? columnAlias.hashCode() : 0;
result = 31 * result + basicValueConverter.hashCode();
return result;
}
}
| DynamicResultBuilderBasicConverted |
java | apache__kafka | streams/src/main/java/org/apache/kafka/streams/kstream/internals/AbstractStream.java | {
"start": 1435,
"end": 1949
} | class ____ follow the serde specification precedence ordering as:
*
* 1) Overridden values via control objects (e.g. Materialized, Serialized, Consumed, etc)
* 2) Serdes that can be inferred from the operator itself (e.g. groupBy().count(), where value serde can default to `LongSerde`).
* 3) Serde inherited from parent operator if possible (note if the key / value types have been changed, then the corresponding serde cannot be inherited).
* 4) Default serde specified in the config.
*/
public abstract | should |
java | apache__flink | flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/operators/join/HashJoinOperator.java | {
"start": 21873,
"end": 22769
} | class ____ extends HashJoinOperator {
BuildLeftSemiOrAntiHashJoinOperator(HashJoinParameter parameter) {
super(parameter);
}
@Override
public void join(RowIterator<BinaryRowData> buildIter, RowData probeRow) throws Exception {
if (buildIter.advanceNext()) {
if (probeRow != null) { // Probe phase
// we must iterator to set probedSet.
//noinspection StatementWithEmptyBody
while (buildIter.advanceNext()) {}
} else { // End Probe phase, iterator build side elements.
collector.collect(buildIter.getRow());
while (buildIter.advanceNext()) {
collector.collect(buildIter.getRow());
}
}
}
}
}
}
| BuildLeftSemiOrAntiHashJoinOperator |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/bugs/_2393/Country.java | {
"start": 232,
"end": 407
} | class ____ {
private final String name;
public Country(String name) {
this.name = name;
}
public String getName() {
return name;
}
}
| Country |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/checkreturnvalue/CanIgnoreReturnValueSuggesterTest.java | {
"start": 18000,
"end": 18518
} | class ____ {
private String name;
public Client setName(String name) {
this.name = name;
return this;
}
public Client getValue2() {
return new Client();
}
}
""")
.addOutputLines(
"Client.java",
"""
package com.google.frobber;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
public final | Client |
java | apache__dubbo | dubbo-common/src/test/java/org/apache/dubbo/common/utils/NetUtilsInterfaceDisplayNameHasMetaCharactersTest.java | {
"start": 1702,
"end": 5525
} | class ____
try (MockedStatic<NetworkInterface> mockedStaticNetif = Mockito.mockStatic(NetworkInterface.class)) {
NetworkInterface mockIgnoredNetif = Mockito.mock(NetworkInterface.class);
NetworkInterface mockSelectedNetif = Mockito.mock(NetworkInterface.class);
NetworkInterface[] mockNetifs = {mockIgnoredNetif, mockSelectedNetif};
Enumeration<NetworkInterface> mockEnumIfs = new Enumeration<NetworkInterface>() {
private int i = 0;
public NetworkInterface nextElement() {
if (mockNetifs != null && i < mockNetifs.length) {
NetworkInterface netif = mockNetifs[i++];
return netif;
} else {
throw new NoSuchElementException();
}
}
public boolean hasMoreElements() {
return (mockNetifs != null && i < mockNetifs.length);
}
};
InetAddress mockSelectedAddr = Mockito.mock(InetAddress.class);
InetAddress[] mockAddrs = {mockSelectedAddr};
Enumeration<InetAddress> mockEnumAddrs = new Enumeration<InetAddress>() {
private int i = 0;
public InetAddress nextElement() {
if (mockAddrs != null && i < mockAddrs.length) {
InetAddress addr = mockAddrs[i++];
return addr;
} else {
throw new NoSuchElementException();
}
}
public boolean hasMoreElements() {
return (mockAddrs != null && i < mockAddrs.length);
}
};
// mock static method getNetworkInterfaces
mockedStaticNetif
.when(() -> {
NetworkInterface.getNetworkInterfaces();
})
.thenReturn(mockEnumIfs);
Mockito.when(mockIgnoredNetif.isUp()).thenReturn(true);
Mockito.when(mockIgnoredNetif.getDisplayName()).thenReturn(IGNORED_DISPLAY_NAME_HAS_METACHARACTERS);
Mockito.when(mockSelectedNetif.isUp()).thenReturn(true);
Mockito.when(mockSelectedNetif.getDisplayName()).thenReturn(SELECTED_DISPLAY_NAME);
Mockito.when(mockSelectedNetif.getInetAddresses()).thenReturn(mockEnumAddrs);
Mockito.when(mockSelectedAddr.isLoopbackAddress()).thenReturn(false);
Mockito.when(mockSelectedAddr.getHostAddress()).thenReturn(SELECTED_HOST_ADDR);
Mockito.when(mockSelectedAddr.isReachable(Mockito.anyInt())).thenReturn(true);
this.setIgnoredInterfaces(IGNORED_DISPLAY_NAME_HAS_METACHARACTERS);
NetworkInterface newNetworkInterface = NetUtils.findNetworkInterface();
assertTrue(!IGNORED_DISPLAY_NAME_HAS_METACHARACTERS.equals(newNetworkInterface.getDisplayName()));
} finally {
// recover the origin ignored interfaces
this.setIgnoredInterfaces(originIgnoredInterfaces);
}
}
private String getIgnoredInterfaces() {
return SystemPropertyConfigUtils.getSystemProperty(
CommonConstants.DubboProperty.DUBBO_NETWORK_IGNORED_INTERFACE);
}
private void setIgnoredInterfaces(String ignoredInterfaces) {
if (ignoredInterfaces != null) {
SystemPropertyConfigUtils.setSystemProperty(
CommonConstants.DubboProperty.DUBBO_NETWORK_IGNORED_INTERFACE, ignoredInterfaces);
} else {
SystemPropertyConfigUtils.setSystemProperty(
CommonConstants.DubboProperty.DUBBO_NETWORK_IGNORED_INTERFACE, "");
}
}
}
| NetworkInterface |
java | quarkusio__quarkus | extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ArcConfig.java | {
"start": 3914,
"end": 5244
} | class ____ as defined by {@link Class#getSimpleName()}, i.e. {@code Foo}</li>
* <li>a package name with suffix {@code .*}, i.e. {@code org.acme.*}, matches a package</li>
* <li>a package name with suffix {@code .**}, i.e. {@code org.acme.**}, matches a package that starts with the value</li>
* </ul>
* Each element value is used to match an alternative bean class, an alternative stereotype annotation type or a bean class
* that declares an alternative producer. If any value matches then the priority of {@link Integer#MAX_VALUE} is used for
* the relevant bean. The priority declared via {@link jakarta.annotation.Priority} is overridden.
*/
Optional<List<String>> selectedAlternatives();
/**
* If set to true then {@code jakarta.enterprise.inject.Produces} is automatically added to all non-void methods that are
* annotated with a scope annotation, a stereotype or a qualifier, and are not annotated with {@code Inject} or
* {@code Produces}, and no parameter is annotated with {@code Disposes}, {@code Observes} or {@code ObservesAsync}.
*/
@WithDefault("true")
boolean autoProducerMethods();
/**
* The list of types that should be excluded from discovery.
* <p>
* An element value can be:
* <ul>
* <li>a fully qualified | name |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/HandlerInstantiationTest.java | {
"start": 2524,
"end": 2778
} | class ____ extends KeyDeserializer
{
public MyKeyDeserializer() { }
@Override
public Object deserializeKey(String key, DeserializationContext ctxt)
{
return "KEY";
}
}
static | MyKeyDeserializer |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/DefaultExecutionGraphFactory.java | {
"start": 2754,
"end": 10555
} | class ____ implements ExecutionGraphFactory {
private final Configuration configuration;
private final ClassLoader userCodeClassLoader;
private final ExecutionDeploymentTracker executionDeploymentTracker;
private final ScheduledExecutorService futureExecutor;
private final Executor ioExecutor;
private final Duration rpcTimeout;
private final JobManagerJobMetricGroup jobManagerJobMetricGroup;
private final BlobWriter blobWriter;
private final ShuffleMaster<?> shuffleMaster;
private final JobMasterPartitionTracker jobMasterPartitionTracker;
private final boolean isDynamicGraph;
private final ExecutionJobVertex.Factory executionJobVertexFactory;
private final boolean nonFinishedHybridPartitionShouldBeUnknown;
public DefaultExecutionGraphFactory(
Configuration configuration,
ClassLoader userCodeClassLoader,
ExecutionDeploymentTracker executionDeploymentTracker,
ScheduledExecutorService futureExecutor,
Executor ioExecutor,
Duration rpcTimeout,
JobManagerJobMetricGroup jobManagerJobMetricGroup,
BlobWriter blobWriter,
ShuffleMaster<?> shuffleMaster,
JobMasterPartitionTracker jobMasterPartitionTracker) {
this(
configuration,
userCodeClassLoader,
executionDeploymentTracker,
futureExecutor,
ioExecutor,
rpcTimeout,
jobManagerJobMetricGroup,
blobWriter,
shuffleMaster,
jobMasterPartitionTracker,
false,
new ExecutionJobVertex.Factory(),
false);
}
public DefaultExecutionGraphFactory(
Configuration configuration,
ClassLoader userCodeClassLoader,
ExecutionDeploymentTracker executionDeploymentTracker,
ScheduledExecutorService futureExecutor,
Executor ioExecutor,
Duration rpcTimeout,
JobManagerJobMetricGroup jobManagerJobMetricGroup,
BlobWriter blobWriter,
ShuffleMaster<?> shuffleMaster,
JobMasterPartitionTracker jobMasterPartitionTracker,
boolean isDynamicGraph,
ExecutionJobVertex.Factory executionJobVertexFactory,
boolean nonFinishedHybridPartitionShouldBeUnknown) {
this.configuration = configuration;
this.userCodeClassLoader = userCodeClassLoader;
this.executionDeploymentTracker = executionDeploymentTracker;
this.futureExecutor = futureExecutor;
this.ioExecutor = ioExecutor;
this.rpcTimeout = rpcTimeout;
this.jobManagerJobMetricGroup = jobManagerJobMetricGroup;
this.blobWriter = blobWriter;
this.shuffleMaster = shuffleMaster;
this.jobMasterPartitionTracker = jobMasterPartitionTracker;
this.isDynamicGraph = isDynamicGraph;
this.executionJobVertexFactory = checkNotNull(executionJobVertexFactory);
this.nonFinishedHybridPartitionShouldBeUnknown = nonFinishedHybridPartitionShouldBeUnknown;
}
@Override
public ExecutionGraph createAndRestoreExecutionGraph(
JobGraph jobGraph,
CompletedCheckpointStore completedCheckpointStore,
CheckpointsCleaner checkpointsCleaner,
CheckpointIDCounter checkpointIdCounter,
CheckpointStatsTracker checkpointStatsTracker,
TaskDeploymentDescriptorFactory.PartitionLocationConstraint partitionLocationConstraint,
long initializationTimestamp,
VertexAttemptNumberStore vertexAttemptNumberStore,
VertexParallelismStore vertexParallelismStore,
ExecutionStateUpdateListener executionStateUpdateListener,
MarkPartitionFinishedStrategy markPartitionFinishedStrategy,
ExecutionPlanSchedulingContext executionPlanSchedulingContext,
Logger log)
throws Exception {
ExecutionDeploymentListener executionDeploymentListener =
new ExecutionDeploymentTrackerDeploymentListenerAdapter(executionDeploymentTracker);
ExecutionStateUpdateListener combinedExecutionStateUpdateListener =
(execution, previousState, newState) -> {
executionStateUpdateListener.onStateUpdate(execution, previousState, newState);
if (newState.isTerminal()) {
executionDeploymentTracker.stopTrackingDeploymentOf(execution);
}
};
final ExecutionGraph newExecutionGraph =
DefaultExecutionGraphBuilder.buildGraph(
jobGraph,
configuration,
futureExecutor,
ioExecutor,
userCodeClassLoader,
completedCheckpointStore,
checkpointsCleaner,
checkpointIdCounter,
rpcTimeout,
blobWriter,
log,
shuffleMaster,
jobMasterPartitionTracker,
partitionLocationConstraint,
executionDeploymentListener,
combinedExecutionStateUpdateListener,
initializationTimestamp,
vertexAttemptNumberStore,
vertexParallelismStore,
checkpointStatsTracker,
isDynamicGraph,
executionJobVertexFactory,
markPartitionFinishedStrategy,
nonFinishedHybridPartitionShouldBeUnknown,
jobManagerJobMetricGroup,
executionPlanSchedulingContext);
final CheckpointCoordinator checkpointCoordinator =
newExecutionGraph.getCheckpointCoordinator();
if (checkpointCoordinator != null) {
// check whether we find a valid checkpoint
if (!checkpointCoordinator.restoreInitialCheckpointIfPresent(
new HashSet<>(newExecutionGraph.getAllVertices().values()))) {
// check whether we can restore from a savepoint
tryRestoreExecutionGraphFromSavepoint(
newExecutionGraph, jobGraph.getSavepointRestoreSettings());
}
}
return newExecutionGraph;
}
/**
* Tries to restore the given {@link ExecutionGraph} from the provided {@link
* SavepointRestoreSettings}, iff checkpointing is enabled.
*
* @param executionGraphToRestore {@link ExecutionGraph} which is supposed to be restored
* @param savepointRestoreSettings {@link SavepointRestoreSettings} containing information about
* the savepoint to restore from
* @throws Exception if the {@link ExecutionGraph} could not be restored
*/
private void tryRestoreExecutionGraphFromSavepoint(
ExecutionGraph executionGraphToRestore,
SavepointRestoreSettings savepointRestoreSettings)
throws Exception {
if (savepointRestoreSettings.restoreSavepoint()) {
final CheckpointCoordinator checkpointCoordinator =
executionGraphToRestore.getCheckpointCoordinator();
if (checkpointCoordinator != null) {
checkpointCoordinator.restoreSavepoint(
savepointRestoreSettings,
executionGraphToRestore.getAllVertices(),
userCodeClassLoader);
}
}
}
}
| DefaultExecutionGraphFactory |
java | eclipse-vertx__vert.x | vertx-core/src/test/java/io/vertx/tests/cluster/NodeInfoSerializationTest.java | {
"start": 875,
"end": 1766
} | class ____ {
@Parameterized.Parameters
public static Collection<Object[]> data() {
return Arrays.asList(new Object[][]{
{new NodeInfo("foo", 13004, null)},
{new NodeInfo("bar", 59500, new JsonObject())},
{new NodeInfo("baz", 30120, new JsonObject().put("foo", "bar"))}
});
}
private final NodeInfo expected;
public NodeInfoSerializationTest(NodeInfo expected) {
this.expected = expected;
}
@Test
public void testSerialization() {
Buffer padding = TestUtils.randomBuffer(TestUtils.randomShort());
Buffer buffer = Buffer.buffer();
buffer.appendBuffer(padding);
expected.writeToBuffer(buffer);
NodeInfo registrationInfo = new NodeInfo();
int pos = registrationInfo.readFromBuffer(padding.length(), buffer);
assertEquals(expected, registrationInfo);
assertEquals(buffer.length(), pos);
}
}
| NodeInfoSerializationTest |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/SchedulingRequestPBImpl.java | {
"start": 2023,
"end": 9729
} | class ____ extends SchedulingRequest {
SchedulingRequestProto proto = SchedulingRequestProto.getDefaultInstance();
SchedulingRequestProto.Builder builder = null;
boolean viaProto = false;
private Priority priority = null;
private ExecutionTypeRequest executionType = null;
private Set<String> allocationTags = null;
private ResourceSizing resourceSizing = null;
private PlacementConstraint placementConstraint = null;
public SchedulingRequestPBImpl() {
builder = SchedulingRequestProto.newBuilder();
}
public SchedulingRequestPBImpl(SchedulingRequestProto proto) {
this.proto = proto;
viaProto = true;
}
public SchedulingRequestProto getProto() {
mergeLocalToProto();
proto = viaProto ? proto : builder.build();
viaProto = true;
return proto;
}
private void mergeLocalToBuilder() {
if (this.priority != null) {
builder.setPriority(convertToProtoFormat(this.priority));
}
if (this.executionType != null) {
builder.setExecutionType(convertToProtoFormat(this.executionType));
}
if (this.allocationTags != null) {
builder.clearAllocationTags();
builder.addAllAllocationTags(this.allocationTags);
}
if (this.resourceSizing != null) {
builder.setResourceSizing(convertToProtoFormat(this.resourceSizing));
}
if (this.placementConstraint != null) {
builder.setPlacementConstraint(
convertToProtoFormat(this.placementConstraint));
}
}
private void mergeLocalToProto() {
if (viaProto) {
maybeInitBuilder();
}
mergeLocalToBuilder();
proto = builder.build();
viaProto = true;
}
private void maybeInitBuilder() {
if (viaProto || builder == null) {
builder = SchedulingRequestProto.newBuilder(proto);
}
viaProto = false;
}
@Override
public long getAllocationRequestId() {
SchedulingRequestProtoOrBuilder p = viaProto ? proto : builder;
return (p.getAllocationRequestId());
}
@Override
public void setAllocationRequestId(long allocationRequestId) {
maybeInitBuilder();
builder.setAllocationRequestId(allocationRequestId);
}
@Override
public Priority getPriority() {
SchedulingRequestProtoOrBuilder p = viaProto ? proto : builder;
if (this.priority != null) {
return this.priority;
}
if (!p.hasPriority()) {
return null;
}
this.priority = convertFromProtoFormat(p.getPriority());
return this.priority;
}
@Override
public void setPriority(Priority priority) {
maybeInitBuilder();
if (priority == null) {
builder.clearPriority();
}
this.priority = priority;
}
@Override
public ExecutionTypeRequest getExecutionType() {
SchedulingRequestProtoOrBuilder p = viaProto ? proto : builder;
if (this.executionType != null) {
return this.executionType;
}
if (!p.hasExecutionType()) {
return null;
}
this.executionType = convertFromProtoFormat(p.getExecutionType());
return this.executionType;
}
@Override
public void setExecutionType(ExecutionTypeRequest executionType) {
maybeInitBuilder();
if (executionType == null) {
builder.clearExecutionType();
}
this.executionType = executionType;
}
@Override
public Set<String> getAllocationTags() {
initAllocationTags();
return this.allocationTags;
}
@Override
public void setAllocationTags(Set<String> allocationTags) {
maybeInitBuilder();
builder.clearAllocationTags();
this.allocationTags = allocationTags;
}
@Override
public ResourceSizing getResourceSizing() {
SchedulingRequestProtoOrBuilder p = viaProto ? proto : builder;
if (this.resourceSizing != null) {
return this.resourceSizing;
}
if (!p.hasResourceSizing()) {
return null;
}
this.resourceSizing = convertFromProtoFormat(p.getResourceSizing());
return this.resourceSizing;
}
@Override
public void setResourceSizing(ResourceSizing resourceSizing) {
maybeInitBuilder();
if (resourceSizing == null) {
builder.clearResourceSizing();
}
this.resourceSizing = resourceSizing;
}
@Override
public PlacementConstraint getPlacementConstraint() {
SchedulingRequestProtoOrBuilder p = viaProto ? proto : builder;
if (this.placementConstraint != null) {
return this.placementConstraint;
}
if (!p.hasPlacementConstraint()) {
return null;
}
this.placementConstraint =
convertFromProtoFormat(p.getPlacementConstraint());
return this.placementConstraint;
}
@Override
public void setPlacementConstraint(PlacementConstraint placementConstraint) {
maybeInitBuilder();
if (placementConstraint == null) {
builder.clearPlacementConstraint();
}
this.placementConstraint = placementConstraint;
}
private PriorityPBImpl convertFromProtoFormat(PriorityProto p) {
return new PriorityPBImpl(p);
}
private PriorityProto convertToProtoFormat(Priority p) {
return ((PriorityPBImpl) p).getProto();
}
private ExecutionTypeRequestPBImpl convertFromProtoFormat(
ExecutionTypeRequestProto p) {
return new ExecutionTypeRequestPBImpl(p);
}
private ExecutionTypeRequestProto convertToProtoFormat(
ExecutionTypeRequest p) {
return ((ExecutionTypeRequestPBImpl) p).getProto();
}
private ResourceSizingPBImpl convertFromProtoFormat(ResourceSizingProto p) {
return new ResourceSizingPBImpl(p);
}
private ResourceSizingProto convertToProtoFormat(ResourceSizing p) {
return ((ResourceSizingPBImpl) p).getProto();
}
private PlacementConstraint convertFromProtoFormat(
PlacementConstraintProto c) {
PlacementConstraintFromProtoConverter fromProtoConverter =
new PlacementConstraintFromProtoConverter(c);
return fromProtoConverter.convert();
}
private PlacementConstraintProto convertToProtoFormat(PlacementConstraint c) {
PlacementConstraintToProtoConverter toProtoConverter =
new PlacementConstraintToProtoConverter(c);
return toProtoConverter.convert();
}
private void initAllocationTags() {
if (this.allocationTags != null) {
return;
}
SchedulingRequestProtoOrBuilder p = viaProto ? proto : builder;
this.allocationTags = new HashSet<>();
this.allocationTags.addAll(p.getAllocationTagsList());
}
@Override
public int hashCode() {
return getProto().hashCode();
}
@Override
public boolean equals(Object other) {
if (other == null) {
return false;
}
if (other instanceof SchedulingRequest) {
if (this == other) {
return true;
}
SchedulingRequest that = (SchedulingRequest) other;
if (getAllocationRequestId() != that.getAllocationRequestId()) {
return false;
}
if (!getAllocationTags().equals(that.getAllocationTags())) {
return false;
}
if (!getPriority().equals(that.getPriority())) {
return false;
}
if(!getExecutionType().equals(that.getExecutionType())) {
return false;
}
if(!getResourceSizing().equals(that.getResourceSizing())) {
return false;
}
return getPlacementConstraint().equals(that.getPlacementConstraint());
}
return false;
}
@Override
public String toString() {
return "SchedulingRequestPBImpl{" +
"priority=" + getPriority() +
", allocationReqId=" + getAllocationRequestId() +
", executionType=" + getExecutionType() +
", allocationTags=" + getAllocationTags() +
", resourceSizing=" + getResourceSizing() +
", placementConstraint=" + getPlacementConstraint() +
'}';
}
}
| SchedulingRequestPBImpl |
java | google__dagger | javatests/dagger/functional/subcomponent/multibindings/SubcomponentBuilderMultibindingsTest.java | {
"start": 5786,
"end": 5860
} | interface ____ {
Foo getFoo();
}
@Module
public | FloatingSub |
java | apache__maven | impl/maven-cli/src/test/java/org/apache/maven/cling/invoker/mvn/MavenInvokerTestSupport.java | {
"start": 1522,
"end": 3284
} | class ____ {
static {
System.setProperty(
"library.jline.path",
Path.of("target/dependency/org/jline/nativ").toAbsolutePath().toString());
}
public static final String POM_STRING = """
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.apache.maven.samples</groupId>
<artifactId>sample</artifactId>
<version>1.0.0</version>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.junit</groupId>
<artifactId>junit-bom</artifactId>
<version>5.11.1</version>
<type>pom</type>
<scope>import</scope>
</dependency>
</dependencies>
</dependencyManagement>
<dependencies>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
</project>
""";
public static final String APP_JAVA_STRING = """
package org.apache.maven.samples.sample;
public | MavenInvokerTestSupport |
java | spring-projects__spring-framework | spring-messaging/src/main/java/org/springframework/messaging/handler/invocation/reactive/ArgumentResolverConfigurer.java | {
"start": 1011,
"end": 1555
} | class ____ {
private final List<HandlerMethodArgumentResolver> customResolvers = new ArrayList<>(8);
/**
* Configure resolvers for custom handler method arguments.
* @param resolvers the resolvers to add
*/
public void addCustomResolver(HandlerMethodArgumentResolver... resolvers) {
Assert.notNull(resolvers, "'resolvers' must not be null");
this.customResolvers.addAll(Arrays.asList(resolvers));
}
public List<HandlerMethodArgumentResolver> getCustomResolvers() {
return this.customResolvers;
}
}
| ArgumentResolverConfigurer |
java | google__guava | android/guava/src/com/google/common/cache/Cache.java | {
"start": 1557,
"end": 1940
} | interface ____ expected to be thread-safe, and can be safely accessed
* by multiple concurrent threads.
*
* @param <K> the type of the cache's keys, which are not permitted to be null
* @param <V> the type of the cache's values, which are not permitted to be null
* @author Charles Fry
* @since 10.0
*/
@DoNotMock("Use CacheBuilder.newBuilder().build()")
@GwtCompatible
public | are |
java | google__dagger | javatests/dagger/hilt/android/ActivityInjectedSavedStateViewModelTest.java | {
"start": 1780,
"end": 2957
} | class ____ {
private static final String DATA_KEY = "TEST_KEY";
@Rule public final HiltAndroidRule rule = new HiltAndroidRule(this);
@Test
public void memberInjectedViewModelWithSavedState() {
Intent intent = new Intent(getApplicationContext(), TestActivity.class);
intent.putExtra(DATA_KEY, "test data");
try (ActivityScenario<TestActivity> scenario = ActivityScenario.launch(intent)) {
scenario.onActivity(
activity -> {
String data = activity.myViewModel.handle.get(DATA_KEY);
assertThat(data).isEqualTo("test data");
});
}
}
// Note that assertion of object not being yet injected is in the SuperActivity, while the
// assertion in the scenario is confirming injection eventually does occur.
@Test
public void notYetMemberInjectedSuperActivity() {
try (ActivityScenario<TestActivityWithSuperActivity> scenario =
ActivityScenario.launch(TestActivityWithSuperActivity.class)) {
scenario.onActivity(activity -> assertThat(activity.someObject).isNotNull());
}
}
@AndroidEntryPoint(FragmentActivity.class)
public static final | ActivityInjectedSavedStateViewModelTest |
java | spring-projects__spring-boot | documentation/spring-boot-docs/src/main/java/org/springframework/boot/docs/howto/webserver/addservletfilterlistener/springbean/disable/MyFilter.java | {
"start": 774,
"end": 812
} | class ____ implements Filter {
}
| MyFilter |
java | google__guava | android/guava/src/com/google/common/hash/Hashing.java | {
"start": 30181,
"end": 30492
} | class ____ {
private long state;
LinearCongruentialGenerator(long seed) {
this.state = seed;
}
double nextDouble() {
state = 2862933555777941757L * state + 1;
return ((double) ((int) (state >>> 33) + 1)) / 0x1.0p31;
}
}
private Hashing() {}
}
| LinearCongruentialGenerator |
java | apache__camel | core/camel-main/src/main/java/org/apache/camel/main/GcpVaultConfigurationProperties.java | {
"start": 1064,
"end": 3322
} | class ____ extends GcpVaultConfiguration implements BootstrapCloseable {
private MainConfigurationProperties parent;
public GcpVaultConfigurationProperties(MainConfigurationProperties parent) {
this.parent = parent;
}
public MainConfigurationProperties end() {
return parent;
}
@Override
public void close() {
parent = null;
}
// getter and setters
// --------------------------------------------------------------
// these are inherited from the parent class
// fluent builders
// --------------------------------------------------------------
/**
* The Service Account Key location
*/
public GcpVaultConfigurationProperties withServiceAccountKey(String serviceAccountKey) {
setServiceAccountKey(serviceAccountKey);
return this;
}
/**
* The GCP Project ID
*/
public GcpVaultConfigurationProperties withProjectId(String projectId) {
setProjectId(projectId);
return this;
}
/**
* The GCP Project ID
*/
public GcpVaultConfigurationProperties withUseDefaultInstance(boolean useDefaultInstance) {
setUseDefaultInstance(useDefaultInstance);
return this;
}
/**
* The Pubsub subscriptionName name
*/
public GcpVaultConfigurationProperties withSubscriptionName(String subscriptionName) {
setSubscriptionName(subscriptionName);
return this;
}
/**
* Whether to automatically reload Camel upon secrets being updated in Google.
*/
public GcpVaultConfigurationProperties withRefreshEnabled(boolean refreshEnabled) {
setRefreshEnabled(refreshEnabled);
return this;
}
/**
* The period (millis) between checking Google for updated secrets.
*/
public GcpVaultConfigurationProperties withRefreshPeriod(long refreshPeriod) {
setRefreshPeriod(refreshPeriod);
return this;
}
/**
* Specify the secret names (or pattern) to check for updates. Multiple secrets can be separated by comma.
*/
public GcpVaultConfigurationProperties withSecrets(String secrets) {
setSecrets(secrets);
return this;
}
}
| GcpVaultConfigurationProperties |
java | qos-ch__slf4j | slf4j-ext/src/main/java/org/slf4j/instrumentation/ToStringHelper.java | {
"start": 1299,
"end": 2215
} | class ____ {
/**
* Prefix to use at the start of the representation. Always used.
*/
private static final String ARRAY_PREFIX = "[";
/**
* Suffix to use at the end of the representation. Always used.
*/
private static final char ARRAY_SUFFIX = ']';
/**
* String separating each element when rendering an array. To be compatible
* with lists comma-space is used.
*/
private static final char[] ELEMENT_SEPARATOR = ", ".toCharArray();
/**
* unrenderableClasses is essentially a Set of Class objects which has for
* some reason failed to render properly when invoked through a toString
* method call. To avoid memory leaks a data structure using weak references
* is needed, but unfortunately the runtime library does not contain a
* WeakHashSet class, so the behavior is emulated with a WeakHashmap with
* the | ToStringHelper |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/client/runtime/src/main/java/org/jboss/resteasy/reactive/client/impl/SseParser.java | {
"start": 240,
"end": 12525
} | class ____ implements Handler<Buffer> {
private static final byte CR = '\r';
private static final byte LF = '\n';
private static final byte COLON = ':';
private static final byte SPACE = ' ';
/**
* Will be non-empty while parsing. When not parsing, may hold partial data from the last chunk received
*/
private byte[] bytes;
/**
* Index in {@link #bytes} where we're parsing, or where we should resume parsing partial data if not parsing.
*/
private int i;
/**
* Holds the current event's comment data as we read them
*/
private final StringBuffer commentBuffer = new StringBuffer();
/**
* Holds the current event's field name as we read a field
*/
private final StringBuffer nameBuffer = new StringBuffer();
/**
* Holds the current event's field value as we read a field
*/
private final StringBuffer valueBuffer = new StringBuffer();
/**
* True if we're at the very beginning of the data stream and could see a BOM
*/
private boolean firstByte = true;
/**
* True if we've started to read at least one byte of an event
*/
private boolean startedEvent = false;
/**
* The event type we're reading. Defaults to "message" and changes with "event" fields
*/
private String eventType;
/**
* The content type we're reading. Defaults to the X-Sse-Element-Type header and changes with the "content-type" fields
*/
private String contentType;
/**
* The content type we're reading. If the X-Sse-Element-Type header is not set, then it defaults to the declared @Produces
* (if any)
*/
private String contentTypeHeader;
/**
* The event data we're reading. Defaults to "" and changes with "data" fields
*/
private final StringBuffer dataBuffer = new StringBuffer();
/**
* The event's last id we're reading. Defaults to null and changes with "id" fields (cannot be reset)
*/
private String lastEventId;
/**
* The event connect time we're reading. Defaults to -1 and changes with "retry" fields (in ms)
*/
private long eventReconnectTime = SseEvent.RECONNECT_NOT_SET;
private final SseEventSourceImpl sseEventSource;
public SseParser(SseEventSourceImpl sseEventSource, String defaultContentType) {
this.sseEventSource = sseEventSource;
this.contentTypeHeader = defaultContentType;
}
public void setSseContentTypeHeader(String sseContentTypeHeader) {
this.contentTypeHeader = sseContentTypeHeader;
}
@Override
public void handle(Buffer event) {
byte[] newBytes = event.getBytes();
event.getByteBuf().release();
// check if we have partial data remaining
if (bytes != null) {
// concat old and new data
byte[] totalBytes = new byte[bytes.length - i + newBytes.length];
System.arraycopy(bytes, i, totalBytes, 0, bytes.length - i);
System.arraycopy(newBytes, 0, totalBytes, bytes.length - i, newBytes.length);
bytes = totalBytes;
} else {
bytes = newBytes;
}
i = 0;
while (hasByte()) {
boolean lastFirstByte = firstByte;
startedEvent = false;
nameBuffer.setLength(0);
valueBuffer.setLength(0);
commentBuffer.setLength(0);
dataBuffer.setLength(0);
contentType = contentTypeHeader;
// SSE spec says default is "message" but JAX-RS says null
eventType = null;
eventReconnectTime = SseEvent.RECONNECT_NOT_SET;
// SSE spec says ID is persistent
boolean needsMoreData = false;
int lastEventStart = i;
try {
parseEvent();
// if we started an event but did not fire it, it means we lacked a final end-of-line and must
// wait for more data
if (startedEvent) {
needsMoreData = true;
}
} catch (NeedsMoreDataException x) {
needsMoreData = true;
}
if (needsMoreData) {
// save the remaining bytes for later
i = lastEventStart;
// be ready to rescan the BOM, but only if we didn't already see it in a previous event
firstByte = lastFirstByte;
return;
}
}
// we ate all the data
bytes = null;
}
private void parseEvent() {
// optional BOM
if (firstByte && i == 0 && 1 < bytes.length) {
if (bytes[0] == (byte) 0xFE
&& bytes[1] == (byte) 0xFF) {
i = 2;
}
}
// comment or field
while (hasByte()) {
int c = readChar();
firstByte = false;
if (c == COLON) {
startedEvent = true;
parseComment();
} else if (isNameChar(c)) {
startedEvent = true;
parseField(c);
} else if (isEofWithSideEffect(c)) {
dispatchEvent();
return;
} else {
throw illegalEventException();
}
}
}
private void dispatchEvent() {
WebTargetImpl webTarget = sseEventSource.getWebTarget();
InboundSseEventImpl event;
// tests don't set a web target, and we don't want them to end up starting vertx just to test parsing
if (webTarget != null)
event = new InboundSseEventImpl(webTarget.getConfiguration(),
webTarget.getSerialisers());
else
event = new InboundSseEventImpl(null, null);
// SSE spec says empty string is the default, but JAX-RS says null if not specified
event.setComment(commentBuffer.length() == 0 ? null : commentBuffer.toString());
// SSE spec says empty string is the default, but JAX-RS says null if not specified
event.setId(lastEventId);
event.setData(dataBuffer.length() == 0 ? "" : dataBuffer.toString());
// SSE spec says "message" is the default, but JAX-RS says null if not specified
event.setName(eventType);
event.setReconnectDelay(eventReconnectTime);
event.setMediaType(contentType != null ? MediaType.valueOf(contentType) : null);
sseEventSource.fireEvent(event);
// make sure we mark that we are done with this event
startedEvent = false;
}
private byte peekByte() {
return bytes[i];
}
private byte readByte() {
if (i >= bytes.length)
throw new NeedsMoreDataException();
return bytes[i++];
}
private boolean hasByte() {
return i < bytes.length;
}
private void parseComment() {
// comment = colon *any-char end-of-line
while (true) {
int c = readChar();
if (isAnyChar(c)) {
commentBuffer.appendCodePoint(c);
} else if (isEofWithSideEffect(c)) {
// we're done
return;
} else {
throw illegalEventException();
}
}
}
private void parseField(int c) {
boolean readingName = true;
nameBuffer.appendCodePoint(c);
// field = 1*name-char [ colon [ space ] *any-char ] end-of-line
while (true) {
c = readChar();
if (isEofWithSideEffect(c)) {
// the colon is optional, so is the data, which we treat as an empty string
processField(nameBuffer.toString(), valueBuffer.toString());
nameBuffer.setLength(0);
valueBuffer.setLength(0);
// we're done
return;
}
if (readingName && isNameChar(c)) {
nameBuffer.appendCodePoint(c);
} else if (readingName && c == COLON) {
readingName = false;
// optional space
if (hasByte() && peekByte() == SPACE) {
i++;
}
} else if (!readingName && isAnyChar(c)) {
valueBuffer.appendCodePoint(c);
} else {
throw illegalEventException();
}
}
}
private void processField(String name, String value) {
switch (name) {
case "event":
eventType = value;
break;
case "content-type":
contentType = value;
break;
case "data":
if (dataBuffer.length() > 0) {
dataBuffer.append((char) LF);
}
dataBuffer.append(value);
break;
case "id":
if (value.indexOf(0) == -1) {
lastEventId = value;
}
break;
case "retry":
try {
eventReconnectTime = Long.parseUnsignedLong(value, 10);
} catch (NumberFormatException x) {
// spec says to ignore it
}
break;
// default is to ignore the field
}
}
private boolean isEofWithSideEffect(int c) {
if (c == CR) {
// eat a LF if there's one left
// FIXME: if our buffer cuts here that's a bad spot
if (hasByte() && peekByte() == LF) {
i++;
}
// we're done
return true;
} else if (c == LF) {
// we're done
return true;
}
return false;
}
private boolean isAnyChar(int c) {
return (c >= 0x0000 && c <= 0x0009)
|| (c >= 0x000B && c <= 0x000C)
|| (c >= 0x000E && c <= 0x10_FFFF);
}
private boolean isNameChar(int c) {
return (c >= 0x0000 && c <= 0x0009)
|| (c >= 0x000B && c <= 0x000C)
|| (c >= 0x000E && c <= 0x0039)
|| (c >= 0x003B && c <= 0x10_FFFF);
}
private int readChar() {
byte b0 = readByte();
// single byte
if ((b0 & 0b1000_0000) == 0) {
return b0;
}
// two bytes
if ((b0 & 0b1110_0000) == 0b1100_0000) {
byte b1 = readByte();
if ((b1 & 0b1100_0000) != 0b1000_0000) {
throw utf8Exception();
}
return ((b0 & 0b0001_1111) << 6)
| (b1 & 0b0011_1111);
}
// three bytes
if ((b0 & 0b1111_0000) == 0b1110_0000) {
byte b1 = readByte();
if ((b1 & 0b1100_0000) != 0b1000_0000) {
throw utf8Exception();
}
byte b2 = readByte();
if ((b2 & 0b1100_0000) != 0b1000_0000) {
throw utf8Exception();
}
return ((b0 & 0b0000_1111) << 12)
| ((b1 & 0b0011_1111) << 6)
| (b2 & 0b0011_1111);
}
// four bytes
if ((b0 & 0b1111_1000) == 0b1111_0000) {
byte b1 = readByte();
if ((b1 & 0b1100_0000) != 0b1000_0000) {
throw utf8Exception();
}
byte b2 = readByte();
if ((b2 & 0b1100_0000) != 0b1000_0000) {
throw utf8Exception();
}
byte b3 = readByte();
if ((b3 & 0b1100_0000) != 0b1000_0000) {
throw utf8Exception();
}
return ((b0 & 0b0000_0111) << 18)
| ((b1 & 0b0011_1111) << 12)
| ((b2 & 0b0011_1111) << 6)
| (b3 & 0b0011_1111);
}
throw utf8Exception();
}
private IllegalStateException utf8Exception() {
return new IllegalStateException("Illegal UTF8 input");
}
private IllegalStateException illegalEventException() {
return new IllegalStateException("Illegal Server-Sent Event input at byte index " + i + " while parsing: "
+ new String(bytes, StandardCharsets.UTF_8));
}
}
| SseParser |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/CyberarkVaultComponentBuilderFactory.java | {
"start": 10179,
"end": 13234
} | class ____
extends AbstractComponentBuilder<CyberArkVaultComponent>
implements CyberarkVaultComponentBuilder {
@Override
protected CyberArkVaultComponent buildConcreteComponent() {
return new CyberArkVaultComponent();
}
private org.apache.camel.component.cyberark.vault.CyberArkVaultConfiguration getOrCreateConfiguration(CyberArkVaultComponent component) {
if (component.getConfiguration() == null) {
component.setConfiguration(new org.apache.camel.component.cyberark.vault.CyberArkVaultConfiguration());
}
return component.getConfiguration();
}
@Override
protected boolean setPropertyOnComponent(
Component component,
String name,
Object value) {
switch (name) {
case "account": getOrCreateConfiguration((CyberArkVaultComponent) component).setAccount((java.lang.String) value); return true;
case "certificatePath": getOrCreateConfiguration((CyberArkVaultComponent) component).setCertificatePath((java.lang.String) value); return true;
case "configuration": ((CyberArkVaultComponent) component).setConfiguration((org.apache.camel.component.cyberark.vault.CyberArkVaultConfiguration) value); return true;
case "conjurClient": getOrCreateConfiguration((CyberArkVaultComponent) component).setConjurClient((org.apache.camel.component.cyberark.vault.client.ConjurClient) value); return true;
case "lazyStartProducer": ((CyberArkVaultComponent) component).setLazyStartProducer((boolean) value); return true;
case "operation": getOrCreateConfiguration((CyberArkVaultComponent) component).setOperation((org.apache.camel.component.cyberark.vault.CyberArkVaultOperations) value); return true;
case "secretId": getOrCreateConfiguration((CyberArkVaultComponent) component).setSecretId((java.lang.String) value); return true;
case "url": getOrCreateConfiguration((CyberArkVaultComponent) component).setUrl((java.lang.String) value); return true;
case "verifySsl": getOrCreateConfiguration((CyberArkVaultComponent) component).setVerifySsl((boolean) value); return true;
case "autowiredEnabled": ((CyberArkVaultComponent) component).setAutowiredEnabled((boolean) value); return true;
case "apiKey": getOrCreateConfiguration((CyberArkVaultComponent) component).setApiKey((java.lang.String) value); return true;
case "authToken": getOrCreateConfiguration((CyberArkVaultComponent) component).setAuthToken((java.lang.String) value); return true;
case "password": getOrCreateConfiguration((CyberArkVaultComponent) component).setPassword((java.lang.String) value); return true;
case "username": getOrCreateConfiguration((CyberArkVaultComponent) component).setUsername((java.lang.String) value); return true;
default: return false;
}
}
}
} | CyberarkVaultComponentBuilderImpl |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java | {
"start": 2056,
"end": 2100
} | class ____ commands from Trash.
*/
public | tests |
java | apache__camel | components/camel-box/camel-box-api/src/main/java/org/apache/camel/component/box/api/BoxTasksManager.java | {
"start": 1339,
"end": 9533
} | class ____ {
private static final Logger LOG = LoggerFactory.getLogger(BoxTasksManager.class);
/**
* Box connection to authenticated user account.
*/
private BoxAPIConnection boxConnection;
/**
* Create tasks manager to manage the tasks of Box connection's authenticated user.
*
* @param boxConnection - Box connection to authenticated user account.
*/
public BoxTasksManager(BoxAPIConnection boxConnection) {
this.boxConnection = boxConnection;
}
/**
* Get a list of any tasks on file.
*
* @param fileId - the id of file.
* @return The list of tasks on file.
*/
public List<BoxTask.Info> getFileTasks(String fileId) {
try {
LOG.debug("Getting tasks of file(id={})", fileId);
BoxHelper.notNull(fileId, BoxHelper.FILE_ID);
BoxFile file = new BoxFile(boxConnection, fileId);
return file.getTasks();
} catch (BoxAPIException e) {
throw new RuntimeCamelException(
buildBoxApiErrorMessage(e), e);
}
}
/**
* Add task to file.
*
* @param fileId - the id of file to add task to.
* @param action - the action the task assignee will be prompted to do.
* @param dueAt - - the day at which this task is due.
* @param message - an optional message to include with the task.
* @return The new task.
*/
public BoxTask addFileTask(String fileId, BoxTask.Action action, Date dueAt, String message) {
try {
LOG.debug("Adding task to file(id={}) to '{}'", fileId, message);
BoxHelper.notNull(fileId, BoxHelper.FILE_ID);
BoxHelper.notNull(action, BoxHelper.ACTION);
BoxHelper.notNull(dueAt, BoxHelper.DUE_AT);
BoxFile fileToAddTaskOn = new BoxFile(boxConnection, fileId);
return fileToAddTaskOn.addTask(action, message, dueAt).getResource();
} catch (BoxAPIException e) {
throw new RuntimeCamelException(
buildBoxApiErrorMessage(e), e);
}
}
/**
* Delete task.
*
* @param taskId - the id of task to delete.
*/
public void deleteTask(String taskId) {
try {
LOG.debug("Deleting task(id={})", taskId);
BoxHelper.notNull(taskId, BoxHelper.TASK_ID);
BoxTask task = new BoxTask(boxConnection, taskId);
task.delete();
} catch (BoxAPIException e) {
throw new RuntimeCamelException(
buildBoxApiErrorMessage(e), e);
}
}
/**
* Get task information.
*
* @param taskId - the id of task.
* @return The task information.
*/
public BoxTask.Info getTaskInfo(String taskId) {
try {
LOG.debug("Getting info for task(id={})", taskId);
BoxHelper.notNull(taskId, BoxHelper.TASK_ID);
BoxTask task = new BoxTask(boxConnection, taskId);
return task.getInfo();
} catch (BoxAPIException e) {
throw new RuntimeCamelException(
buildBoxApiErrorMessage(e), e);
}
}
/**
* Update task information.
*
* @param taskId - the id of task.
* @param info - the updated information
* @return The updated task.
*/
public BoxTask updateTaskInfo(String taskId, BoxTask.Info info) {
try {
LOG.debug("Updating info for task(id={})", taskId);
BoxHelper.notNull(taskId, BoxHelper.TASK_ID);
BoxHelper.notNull(info, BoxHelper.INFO);
BoxTask task = new BoxTask(boxConnection, taskId);
task.updateInfo(info);
return task;
} catch (BoxAPIException e) {
throw new RuntimeCamelException(
buildBoxApiErrorMessage(e), e);
}
}
/**
* Get a list of any assignments for task.
*
* @param taskId - the id of task.
* @return The list of assignments for task.
*/
public List<BoxTaskAssignment.Info> getTaskAssignments(String taskId) {
try {
LOG.debug("Getting assignments for task(id={})", taskId);
BoxHelper.notNull(taskId, BoxHelper.TASK_ID);
BoxTask file = new BoxTask(boxConnection, taskId);
return file.getAssignments();
} catch (BoxAPIException e) {
throw new RuntimeCamelException(
buildBoxApiErrorMessage(e), e);
}
}
/**
* Add assignment for task.
*
* @param taskId - the id of task to add assignment for.
* @param assignTo - the user to assign to task.
* @return The assigned task.
*/
@SuppressWarnings("unused") // compiler for some reason thinks 'if (assignTo
// == null)' clause is dead code.
public BoxTask addAssignmentToTask(String taskId, BoxUser assignTo) {
try {
BoxHelper.notNull(taskId, BoxHelper.TASK_ID);
BoxHelper.notNull(assignTo, BoxHelper.ASSIGN_TO);
LOG.debug("Assigning task(id={}) to user(id={})", taskId, assignTo.getID());
BoxTask task = new BoxTask(boxConnection, taskId);
task.addAssignment(assignTo);
return task;
} catch (BoxAPIException e) {
throw new RuntimeCamelException(
buildBoxApiErrorMessage(e), e);
}
}
/**
* Get task assignment information.
*
* @param taskAssignmentId - the id of task assignment.
* @return The task assignment information.
*/
public BoxTaskAssignment.Info getTaskAssignmentInfo(String taskAssignmentId) {
try {
LOG.debug("Getting info for task(id={})", taskAssignmentId);
BoxHelper.notNull(taskAssignmentId, BoxHelper.TASK_ASSIGNMENT_ID);
BoxTaskAssignment taskAssignment = new BoxTaskAssignment(boxConnection, taskAssignmentId);
return taskAssignment.getInfo();
} catch (BoxAPIException e) {
throw new RuntimeCamelException(
buildBoxApiErrorMessage(e), e);
}
}
// TODO Add this method when BoxTaskAssignment API fixed:
// BoxTaskAssignment.update method currently
// takes BoxTask.Info instead of BoxTaskAssignment.Info
// /**
// * Update task assignment information.
// *
// * @param taskAssignmentId
// * - the id of task assignment.
// * @param info
// * - the updated information
// * @return The updated task assignment.
// */
// public BoxTaskAssignment updateTaskAssignmentInfo(String
// taskAssignmentId, BoxTaskAssignment.Info info) {
// try {
// LOG.debug("Updating info for task(id={})", taskAssignmentId);
// if (taskAssignmentId == null) {
// throw new IllegalArgumentException("Parameter 'taskAssignmentId' can not
// be null");
// }
// if (info == null) {
// throw new IllegalArgumentException("Parameter 'info' can not be null");
// }
//
// BoxTaskAssignment taskAssignment = new BoxTaskAssignment(boxConnection,
// taskAssignmentId);
// taskAssignment.updateInfo(info);
//
// return taskAssignment;
// } catch (BoxAPIException e) {
// throw new RuntimeException(
// String.format("Box API returned the error code %d\n\n%s",
// e.getResponseCode(), e.getResponse()), e);
// }
// }
/**
* Delete task assignment.
*
* @param taskAssignmentId - the id of task assignment to delete.
*/
public void deleteTaskAssignment(String taskAssignmentId) {
try {
LOG.debug("Deleting task(id={})", taskAssignmentId);
BoxHelper.notNull(taskAssignmentId, BoxHelper.TASK_ASSIGNMENT_ID);
BoxTaskAssignment taskAssignment = new BoxTaskAssignment(boxConnection, taskAssignmentId);
taskAssignment.delete();
} catch (BoxAPIException e) {
throw new RuntimeCamelException(
buildBoxApiErrorMessage(e), e);
}
}
}
| BoxTasksManager |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/internal/operators/observable/ObservablePublishTest.java | {
"start": 1522,
"end": 25506
} | class ____ extends RxJavaTest {
@Test
public void publish() throws InterruptedException {
final AtomicInteger counter = new AtomicInteger();
ConnectableObservable<String> o = Observable.unsafeCreate(new ObservableSource<String>() {
@Override
public void subscribe(final Observer<? super String> observer) {
observer.onSubscribe(Disposable.empty());
new Thread(new Runnable() {
@Override
public void run() {
counter.incrementAndGet();
observer.onNext("one");
observer.onComplete();
}
}).start();
}
}).publish();
final CountDownLatch latch = new CountDownLatch(2);
// subscribe once
o.subscribe(new Consumer<String>() {
@Override
public void accept(String v) {
assertEquals("one", v);
latch.countDown();
}
});
// subscribe again
o.subscribe(new Consumer<String>() {
@Override
public void accept(String v) {
assertEquals("one", v);
latch.countDown();
}
});
Disposable connection = o.connect();
try {
if (!latch.await(1000, TimeUnit.MILLISECONDS)) {
fail("subscriptions did not receive values");
}
assertEquals(1, counter.get());
} finally {
connection.dispose();
}
}
@Test
public void backpressureFastSlow() {
ConnectableObservable<Integer> is = Observable.range(1, Flowable.bufferSize() * 2).publish();
Observable<Integer> fast = is.observeOn(Schedulers.computation())
.doOnComplete(new Action() {
@Override
public void run() {
System.out.println("^^^^^^^^^^^^^ completed FAST");
}
});
Observable<Integer> slow = is.observeOn(Schedulers.computation()).map(new Function<Integer, Integer>() {
int c;
@Override
public Integer apply(Integer i) {
if (c == 0) {
try {
Thread.sleep(500);
} catch (InterruptedException e) {
}
}
c++;
return i;
}
}).doOnComplete(new Action() {
@Override
public void run() {
System.out.println("^^^^^^^^^^^^^ completed SLOW");
}
});
TestObserver<Integer> to = new TestObserver<>();
Observable.merge(fast, slow).subscribe(to);
is.connect();
to.awaitDone(5, TimeUnit.SECONDS);
to.assertNoErrors();
assertEquals(Flowable.bufferSize() * 4, to.values().size());
}
// use case from https://github.com/ReactiveX/RxJava/issues/1732
@Test
public void takeUntilWithPublishedStreamUsingSelector() {
final AtomicInteger emitted = new AtomicInteger();
Observable<Integer> xs = Observable.range(0, Flowable.bufferSize() * 2).doOnNext(new Consumer<Integer>() {
@Override
public void accept(Integer t1) {
emitted.incrementAndGet();
}
});
TestObserver<Integer> to = new TestObserver<>();
xs.publish(new Function<Observable<Integer>, Observable<Integer>>() {
@Override
public Observable<Integer> apply(Observable<Integer> xs) {
return xs.takeUntil(xs.skipWhile(new Predicate<Integer>() {
@Override
public boolean test(Integer i) {
return i <= 3;
}
}));
}
}).subscribe(to);
to.awaitDone(5, TimeUnit.SECONDS);
to.assertNoErrors();
to.assertValues(0, 1, 2, 3);
assertEquals(5, emitted.get());
System.out.println(to.values());
}
// use case from https://github.com/ReactiveX/RxJava/issues/1732
@Test
public void takeUntilWithPublishedStream() {
Observable<Integer> xs = Observable.range(0, Flowable.bufferSize() * 2);
TestObserver<Integer> to = new TestObserver<>();
ConnectableObservable<Integer> xsp = xs.publish();
xsp.takeUntil(xsp.skipWhile(new Predicate<Integer>() {
@Override
public boolean test(Integer i) {
return i <= 3;
}
})).subscribe(to);
xsp.connect();
System.out.println(to.values());
}
@Test
public void backpressureTwoConsumers() {
final AtomicInteger sourceEmission = new AtomicInteger();
final AtomicBoolean sourceUnsubscribed = new AtomicBoolean();
final Observable<Integer> source = Observable.range(1, 100)
.doOnNext(new Consumer<Integer>() {
@Override
public void accept(Integer t1) {
sourceEmission.incrementAndGet();
}
})
.doOnDispose(new Action() {
@Override
public void run() {
sourceUnsubscribed.set(true);
}
}).share();
;
final AtomicBoolean child1Unsubscribed = new AtomicBoolean();
final AtomicBoolean child2Unsubscribed = new AtomicBoolean();
final TestObserver<Integer> to2 = new TestObserver<>();
final TestObserver<Integer> to1 = new TestObserver<Integer>() {
@Override
public void onNext(Integer t) {
if (values().size() == 2) {
source.doOnDispose(new Action() {
@Override
public void run() {
child2Unsubscribed.set(true);
}
}).take(5).subscribe(to2);
}
super.onNext(t);
}
};
source.doOnDispose(new Action() {
@Override
public void run() {
child1Unsubscribed.set(true);
}
}).take(5)
.subscribe(to1);
to1.awaitDone(5, TimeUnit.SECONDS);
to2.awaitDone(5, TimeUnit.SECONDS);
to1.assertNoErrors();
to2.assertNoErrors();
assertTrue(sourceUnsubscribed.get());
assertTrue(child1Unsubscribed.get());
assertTrue(child2Unsubscribed.get());
to1.assertValues(1, 2, 3, 4, 5);
to2.assertValues(4, 5, 6, 7, 8);
assertEquals(8, sourceEmission.get());
}
@Test
public void connectWithNoSubscriber() {
TestScheduler scheduler = new TestScheduler();
ConnectableObservable<Long> co = Observable.interval(10, 10, TimeUnit.MILLISECONDS, scheduler).take(3).publish();
co.connect();
// Emit 0
scheduler.advanceTimeBy(15, TimeUnit.MILLISECONDS);
TestObserverEx<Long> to = new TestObserverEx<>();
co.subscribe(to);
// Emit 1 and 2
scheduler.advanceTimeBy(50, TimeUnit.MILLISECONDS);
to.assertValues(1L, 2L);
to.assertNoErrors();
to.assertTerminated();
}
@Test
public void subscribeAfterDisconnectThenConnect() {
ConnectableObservable<Integer> source = Observable.just(1).publish();
TestObserverEx<Integer> to1 = new TestObserverEx<>();
source.subscribe(to1);
Disposable connection = source.connect();
to1.assertValue(1);
to1.assertNoErrors();
to1.assertTerminated();
source.reset();
TestObserverEx<Integer> to2 = new TestObserverEx<>();
source.subscribe(to2);
Disposable connection2 = source.connect();
to2.assertValue(1);
to2.assertNoErrors();
to2.assertTerminated();
System.out.println(connection);
System.out.println(connection2);
}
@Test
public void noSubscriberRetentionOnCompleted() {
ObservablePublish<Integer> source = (ObservablePublish<Integer>)Observable.just(1).publish();
TestObserverEx<Integer> to1 = new TestObserverEx<>();
source.subscribe(to1);
to1.assertNoValues();
to1.assertNoErrors();
to1.assertNotComplete();
source.connect();
to1.assertValue(1);
to1.assertNoErrors();
to1.assertTerminated();
assertEquals(0, source.current.get().get().length);
}
@Test
public void nonNullConnection() {
ConnectableObservable<Object> source = Observable.never().publish();
assertNotNull(source.connect());
assertNotNull(source.connect());
}
@Test
public void noDisconnectSomeoneElse() {
ConnectableObservable<Object> source = Observable.never().publish();
Disposable connection1 = source.connect();
Disposable connection2 = source.connect();
connection1.dispose();
Disposable connection3 = source.connect();
connection2.dispose();
assertTrue(checkPublishDisposed(connection1));
assertTrue(checkPublishDisposed(connection2));
assertFalse(checkPublishDisposed(connection3));
}
@SuppressWarnings("unchecked")
static boolean checkPublishDisposed(Disposable d) {
return ((ObservablePublish.PublishConnection<Object>)d).isDisposed();
}
@Test
public void connectIsIdempotent() {
final AtomicInteger calls = new AtomicInteger();
Observable<Integer> source = Observable.unsafeCreate(new ObservableSource<Integer>() {
@Override
public void subscribe(Observer<? super Integer> t) {
t.onSubscribe(Disposable.empty());
calls.getAndIncrement();
}
});
ConnectableObservable<Integer> conn = source.publish();
assertEquals(0, calls.get());
conn.connect();
conn.connect();
assertEquals(1, calls.get());
conn.connect().dispose();
conn.connect();
conn.connect();
assertEquals(2, calls.get());
}
@Test
public void observeOn() {
ConnectableObservable<Integer> co = Observable.range(0, 1000).publish();
Observable<Integer> obs = co.observeOn(Schedulers.computation());
for (int i = 0; i < 1000; i++) {
for (int j = 1; j < 6; j++) {
List<TestObserverEx<Integer>> tos = new ArrayList<>();
for (int k = 1; k < j; k++) {
TestObserverEx<Integer> to = new TestObserverEx<>();
tos.add(to);
obs.subscribe(to);
}
Disposable connection = co.connect();
for (TestObserverEx<Integer> to : tos) {
to.awaitDone(2, TimeUnit.SECONDS);
to.assertTerminated();
to.assertNoErrors();
assertEquals(1000, to.values().size());
}
connection.dispose();
}
}
}
@Test
public void preNextConnect() {
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
final ConnectableObservable<Integer> co = Observable.<Integer>empty().publish();
co.connect();
Runnable r1 = new Runnable() {
@Override
public void run() {
co.test();
}
};
TestHelper.race(r1, r1);
}
}
@Test
public void connectRace() {
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
final ConnectableObservable<Integer> co = Observable.<Integer>empty().publish();
Runnable r1 = new Runnable() {
@Override
public void run() {
co.connect();
}
};
TestHelper.race(r1, r1);
}
}
@Test
public void selectorCrash() {
Observable.just(1).publish(new Function<Observable<Integer>, ObservableSource<Object>>() {
@Override
public ObservableSource<Object> apply(Observable<Integer> v) throws Exception {
throw new TestException();
}
})
.test()
.assertFailure(TestException.class);
}
@Test
public void source() {
Observable<Integer> o = Observable.never();
assertSame(o, (((HasUpstreamObservableSource<?>)o.publish()).source()));
}
@Test
public void connectThrows() {
ConnectableObservable<Integer> co = Observable.<Integer>empty().publish();
try {
co.connect(new Consumer<Disposable>() {
@Override
public void accept(Disposable d) throws Exception {
throw new TestException();
}
});
} catch (TestException ex) {
// expected
}
}
@Test
public void addRemoveRace() {
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
final ConnectableObservable<Integer> co = Observable.<Integer>empty().publish();
final TestObserver<Integer> to = co.test();
final TestObserver<Integer> to2 = new TestObserver<>();
Runnable r1 = new Runnable() {
@Override
public void run() {
co.subscribe(to2);
}
};
Runnable r2 = new Runnable() {
@Override
public void run() {
to.dispose();
}
};
TestHelper.race(r1, r2);
}
}
@Test
public void disposeOnArrival() {
ConnectableObservable<Integer> co = Observable.<Integer>empty().publish();
co.test(true).assertEmpty();
}
@Test
public void dispose() {
TestHelper.checkDisposed(Observable.never().publish());
TestHelper.checkDisposed(Observable.never().publish(Functions.<Observable<Object>>identity()));
}
@Test
public void empty() {
ConnectableObservable<Integer> co = Observable.<Integer>empty().publish();
co.connect();
}
@Test
public void take() {
ConnectableObservable<Integer> co = Observable.range(1, 2).publish();
TestObserver<Integer> to = co.take(1).test();
co.connect();
to.assertResult(1);
}
@Test
public void just() {
final PublishSubject<Integer> ps = PublishSubject.create();
ConnectableObservable<Integer> co = ps.publish();
TestObserver<Integer> to = new TestObserver<Integer>() {
@Override
public void onNext(Integer t) {
super.onNext(t);
ps.onComplete();
}
};
co.subscribe(to);
co.connect();
ps.onNext(1);
to.assertResult(1);
}
@Test
public void nextCancelRace() {
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
final PublishSubject<Integer> ps = PublishSubject.create();
final ConnectableObservable<Integer> co = ps.publish();
final TestObserver<Integer> to = co.test();
Runnable r1 = new Runnable() {
@Override
public void run() {
ps.onNext(1);
}
};
Runnable r2 = new Runnable() {
@Override
public void run() {
to.dispose();
}
};
TestHelper.race(r1, r2);
}
}
@Test
public void badSource() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
new Observable<Integer>() {
@Override
protected void subscribeActual(Observer<? super Integer> observer) {
observer.onSubscribe(Disposable.empty());
observer.onNext(1);
observer.onComplete();
observer.onNext(2);
observer.onError(new TestException());
observer.onComplete();
}
}
.publish()
.autoConnect()
.test()
.assertResult(1);
TestHelper.assertUndeliverable(errors, 0, TestException.class);
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void noErrorLoss() {
ConnectableObservable<Object> co = Observable.error(new TestException()).publish();
co.connect();
// 3.x: terminal events remain observable until reset
co.test()
.assertFailure(TestException.class);
}
@Test
public void subscribeDisconnectRace() {
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
final PublishSubject<Integer> ps = PublishSubject.create();
final ConnectableObservable<Integer> co = ps.publish();
final Disposable d = co.connect();
final TestObserver<Integer> to = new TestObserver<>();
Runnable r1 = new Runnable() {
@Override
public void run() {
d.dispose();
}
};
Runnable r2 = new Runnable() {
@Override
public void run() {
co.subscribe(to);
}
};
TestHelper.race(r1, r2);
}
}
@Test
public void selectorDisconnectsIndependentSource() {
PublishSubject<Integer> ps = PublishSubject.create();
ps.publish(new Function<Observable<Integer>, ObservableSource<Integer>>() {
@Override
public ObservableSource<Integer> apply(Observable<Integer> v) throws Exception {
return Observable.range(1, 2);
}
})
.test()
.assertResult(1, 2);
assertFalse(ps.hasObservers());
}
@Test
public void selectorLatecommer() {
Observable.range(1, 5)
.publish(new Function<Observable<Integer>, ObservableSource<Integer>>() {
@Override
public ObservableSource<Integer> apply(Observable<Integer> v) throws Exception {
return v.concatWith(v);
}
})
.test()
.assertResult(1, 2, 3, 4, 5);
}
@Test
public void mainError() {
Observable.error(new TestException())
.publish(Functions.<Observable<Object>>identity())
.test()
.assertFailure(TestException.class);
}
@Test
public void selectorInnerError() {
PublishSubject<Integer> ps = PublishSubject.create();
ps.publish(new Function<Observable<Integer>, ObservableSource<Integer>>() {
@Override
public ObservableSource<Integer> apply(Observable<Integer> v) throws Exception {
return Observable.error(new TestException());
}
})
.test()
.assertFailure(TestException.class);
assertFalse(ps.hasObservers());
}
@Test
public void delayedUpstreamOnSubscribe() {
final Observer<?>[] sub = { null };
new Observable<Integer>() {
@Override
protected void subscribeActual(Observer<? super Integer> observer) {
sub[0] = observer;
}
}
.publish()
.connect()
.dispose();
Disposable bs = Disposable.empty();
sub[0].onSubscribe(bs);
assertTrue(bs.isDisposed());
}
@Test
public void doubleOnSubscribe() {
TestHelper.checkDoubleOnSubscribeObservable(new Function<Observable<Object>, ObservableSource<Object>>() {
@Override
public ObservableSource<Object> apply(final Observable<Object> o)
throws Exception {
return Observable.<Integer>never().publish(new Function<Observable<Integer>, ObservableSource<Object>>() {
@Override
public ObservableSource<Object> apply(Observable<Integer> v)
throws Exception {
return o;
}
});
}
}
);
}
@Test
public void disposedUpfront() {
ConnectableObservable<Integer> co = Observable.just(1)
.concatWith(Observable.<Integer>never())
.publish();
TestObserver<Integer> to1 = co.test();
TestObserver<Integer> to2 = co.test(true);
co.connect();
to1.assertValuesOnly(1);
to2.assertEmpty();
((ObservablePublish<Integer>)co).current.get().remove(null);
}
@Test
public void altConnectCrash() {
try {
new ObservablePublish<>(Observable.<Integer>empty())
.connect(new Consumer<Disposable>() {
@Override
public void accept(Disposable t) throws Exception {
throw new TestException();
}
});
fail("Should have thrown");
} catch (TestException expected) {
// expected
}
}
@Test
public void altConnectRace() {
for (int i = 0; i < TestHelper.RACE_LONG_LOOPS; i++) {
final ConnectableObservable<Integer> co =
new ObservablePublish<>(Observable.<Integer>never());
Runnable r = new Runnable() {
@Override
public void run() {
co.connect();
}
};
TestHelper.race(r, r);
}
}
@Test
public void onCompleteAvailableUntilReset() {
ConnectableObservable<Integer> co = Observable.just(1).publish();
TestObserver<Integer> to = co.test();
to.assertEmpty();
co.connect();
to.assertResult(1);
co.test().assertResult();
co.reset();
to = co.test();
to.assertEmpty();
co.connect();
to.assertResult(1);
}
@Test
public void onErrorAvailableUntilReset() {
ConnectableObservable<Integer> co = Observable.just(1)
.concatWith(Observable.<Integer>error(new TestException()))
.publish();
TestObserver<Integer> to = co.test();
to.assertEmpty();
co.connect();
to.assertFailure(TestException.class, 1);
co.test().assertFailure(TestException.class);
co.reset();
to = co.test();
to.assertEmpty();
co.connect();
to.assertFailure(TestException.class, 1);
}
@Test
public void disposeResets() {
PublishSubject<Integer> ps = PublishSubject.create();
ConnectableObservable<Integer> co = ps.publish();
assertFalse(ps.hasObservers());
Disposable d = co.connect();
assertTrue(ps.hasObservers());
d.dispose();
assertFalse(ps.hasObservers());
TestObserver<Integer> to = co.test();
co.connect();
assertTrue(ps.hasObservers());
ps.onNext(1);
to.assertValuesOnly(1);
}
@Test
public void disposeNoNeedForReset() {
PublishSubject<Integer> ps = PublishSubject.create();
ConnectableObservable<Integer> co = ps.publish();
TestObserver<Integer> to = co.test();
Disposable d = co.connect();
ps.onNext(1);
d.dispose();
to = co.test();
to.assertEmpty();
co.connect();
to.assertEmpty();
ps.onNext(2);
to.assertValuesOnly(2);
}
}
| ObservablePublishTest |
java | elastic__elasticsearch | test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java | {
"start": 10205,
"end": 27720
} | class ____ extends ESTestCase {
private NamedWriteableRegistry namedWriteableRegistry;
private final List<Releasable> releasables = new ArrayList<>();
protected ValuesSourceRegistry valuesSourceRegistry;
private AnalysisModule analysisModule;
// A list of field types that should not be tested, or are not currently supported
private static final List<String> TYPE_TEST_BLACKLIST = List.of(
ObjectMapper.CONTENT_TYPE, // Cannot aggregate objects
DenseVectorFieldMapper.CONTENT_TYPE, // Cannot aggregate dense vectors
SparseVectorFieldMapper.CONTENT_TYPE, // Sparse vectors are no longer supported
NestedObjectMapper.CONTENT_TYPE, // TODO support for nested
PassThroughObjectMapper.CONTENT_TYPE, // TODO support for passthrough
CompletionFieldMapper.CONTENT_TYPE, // TODO support completion
FieldAliasMapper.CONTENT_TYPE // TODO support alias
);
ThreadPool threadPool;
ThreadPoolExecutor threadPoolExecutor;
@Before
public final void initPlugins() {
threadPool = new TestThreadPool(AggregatorTestCase.class.getName());
threadPoolExecutor = (ThreadPoolExecutor) threadPool.executor(ThreadPool.Names.SEARCH);
List<SearchPlugin> plugins = new ArrayList<>(getSearchPlugins());
plugins.add(new AggCardinalityUpperBoundPlugin());
SearchModule searchModule = new SearchModule(Settings.EMPTY, plugins);
valuesSourceRegistry = searchModule.getValuesSourceRegistry();
namedWriteableRegistry = new NamedWriteableRegistry(
Stream.concat(
searchModule.getNamedWriteables().stream(),
plugins.stream().flatMap(p -> p instanceof Plugin ? ((Plugin) p).getNamedWriteables().stream() : Stream.empty())
).collect(toList())
);
}
@Before
public void initAnalysisRegistry() throws Exception {
analysisModule = createAnalysisModule();
}
/**
* @return a new analysis module. Tests that require a fully constructed analysis module (used to create an analysis registry)
* should override this method
*/
protected AnalysisModule createAnalysisModule() throws Exception {
return null;
}
/**
* Test cases should override this if they have plugins that need to be loaded, e.g. the plugins their aggregators are in.
*/
protected List<SearchPlugin> getSearchPlugins() {
return List.of();
}
protected <A extends Aggregator> A createAggregator(AggregationBuilder aggregationBuilder, AggregationContext context)
throws IOException {
return createAggregator(new AggregatorFactories.Builder().addAggregator(aggregationBuilder), context);
}
private <A extends Aggregator> A createAggregator(AggregatorFactories.Builder builder, AggregationContext context) throws IOException {
Aggregator[] aggregators = builder.build(context, null).createTopLevelAggregators();
assertThat(aggregators.length, equalTo(1));
@SuppressWarnings("unchecked")
A aggregator = (A) aggregators[0];
return aggregator;
}
/**
* Create a {@linkplain AggregationContext} for testing an {@link Aggregator}.
* While {@linkplain AggregationContext} is {@link Releasable} the caller is
* not responsible for releasing it. Instead, it is released automatically in
* in {@link #cleanupReleasables()}.
*
* Deprecated - this will be made private in a future update
*/
@Deprecated
protected AggregationContext createAggregationContext(IndexReader indexReader, Query query, MappedFieldType... fieldTypes)
throws IOException {
return createAggregationContext(
indexReader,
createIndexSettings(),
query,
new NoneCircuitBreakerService(),
AggregationBuilder.DEFAULT_PREALLOCATION * 5, // We don't know how many bytes to preallocate so we grab a hand full
DEFAULT_MAX_BUCKETS,
false,
false,
fieldTypes
);
}
/**
* Create a {@linkplain AggregationContext} for testing an {@link Aggregator}.
* While {@linkplain AggregationContext} is {@link Releasable} the caller is
* not responsible for releasing it. Instead, it is released automatically in
* in {@link #cleanupReleasables()}.
*/
protected AggregationContext createAggregationContext(
IndexReader indexReader,
IndexSettings indexSettings,
Query query,
CircuitBreakerService breakerService,
long bytesToPreallocate,
int maxBucket,
boolean isInSortOrderExecutionRequired,
boolean supportsParallelCollection,
MappedFieldType... fieldTypes
) throws IOException {
return createAggregationContext(
newIndexSearcher(indexReader, supportsParallelCollection),
indexSettings,
query,
breakerService,
bytesToPreallocate,
maxBucket,
isInSortOrderExecutionRequired,
fieldTypes
);
}
private AggregationContext createAggregationContext(
IndexSearcher searcher,
IndexSettings indexSettings,
Query query,
CircuitBreakerService breakerService,
long bytesToPreallocate,
int maxBucket,
boolean isInSortOrderExecutionRequired,
MappedFieldType... fieldTypes
) {
return createAggregationContext(
searcher,
indexSettings,
query,
breakerService,
bytesToPreallocate,
maxBucket,
isInSortOrderExecutionRequired,
() -> false,
fieldTypes
);
}
/**
* Creates an aggregation context that will randomly report that the query has been cancelled
*/
private AggregationContext createCancellingAggregationContext(
IndexSearcher searcher,
IndexSettings indexSettings,
Query query,
CircuitBreakerService breakerService,
long bytesToPreallocate,
int maxBucket,
boolean isInSortOrderExecutionRequired,
MappedFieldType... fieldTypes
) {
return createAggregationContext(
searcher,
indexSettings,
query,
breakerService,
bytesToPreallocate,
maxBucket,
isInSortOrderExecutionRequired,
() -> ESTestCase.random().nextInt(20) == 0,
fieldTypes
);
}
private AggregationContext createAggregationContext(
IndexSearcher searcher,
IndexSettings indexSettings,
Query query,
CircuitBreakerService breakerService,
long bytesToPreallocate,
int maxBucket,
boolean isInSortOrderExecutionRequired,
Supplier<Boolean> isCancelled,
MappedFieldType... fieldTypes
) {
MappingLookup mappingLookup = MappingLookup.fromMappers(
Mapping.EMPTY,
Arrays.stream(fieldTypes).map(this::buildMockFieldMapper).collect(toList()),
objectMappers(),
// Alias all fields to <name>-alias to test aliases
Arrays.stream(fieldTypes)
.map(ft -> new FieldAliasMapper(ft.name() + "-alias", ft.name() + "-alias", ft.name()))
.collect(toList()),
List.of()
);
BiFunction<MappedFieldType, FieldDataContext, IndexFieldData<?>> fieldDataBuilder = (fieldType, context) -> fieldType
.fielddataBuilder(
new FieldDataContext(
indexSettings.getIndex().getName(),
indexSettings,
context.lookupSupplier(),
context.sourcePathsLookup(),
context.fielddataOperation()
)
).build(new IndexFieldDataCache.None(), breakerService);
BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(indexSettings, BitsetFilterCache.Listener.NOOP);
SearchExecutionContext searchExecutionContext = new SearchExecutionContext(
0,
-1,
indexSettings,
bitsetFilterCache,
fieldDataBuilder,
null,
mappingLookup,
null,
getMockScriptService(),
parserConfig(),
writableRegistry(),
null,
searcher,
System::currentTimeMillis,
null,
null,
() -> true,
valuesSourceRegistry,
emptyMap(),
MapperMetrics.NOOP
) {
@Override
public Iterable<MappedFieldType> dimensionFields() {
return Arrays.stream(fieldTypes).filter(MappedFieldType::isDimension).toList();
}
};
AggregationContext context = new ProductionAggregationContext(
Optional.ofNullable(analysisModule).map(AnalysisModule::getAnalysisRegistry).orElse(null),
searchExecutionContext,
new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), breakerService),
ClusterSettings.createBuiltInClusterSettings(),
bytesToPreallocate,
() -> query,
null,
maxBucket,
() -> buildSubSearchContext(
indexSettings,
searchExecutionContext,
bitsetFilterCache,
breakerService.getBreaker(CircuitBreaker.REQUEST)
),
bitsetFilterCache,
randomInt(),
() -> 0L,
isCancelled,
q -> q,
true,
isInSortOrderExecutionRequired
);
return context;
}
/**
* Build a {@link FieldMapper} to create the {@link MappingLookup} used for the aggs.
* {@code protected} so subclasses can have it.
*/
protected FieldMapper buildMockFieldMapper(MappedFieldType ft) {
return new MockFieldMapper(ft);
}
/**
* {@link ObjectMapper}s to add to the lookup. By default we don't need
* any {@link ObjectMapper}s but testing nested objects will require adding some.
*/
protected List<ObjectMapper> objectMappers() {
return List.of();
}
/**
* Build a {@link SubSearchContext}s to power {@code top_hits}.
*/
private SubSearchContext buildSubSearchContext(
IndexSettings indexSettings,
SearchExecutionContext searchExecutionContext,
BitsetFilterCache bitsetFilterCache,
CircuitBreaker breaker
) {
SearchContext ctx = mock(SearchContext.class);
try {
when(ctx.searcher()).thenReturn(
new ContextIndexSearcher(
searchExecutionContext.searcher().getIndexReader(),
searchExecutionContext.searcher().getSimilarity(),
DisabledQueryCache.INSTANCE,
TrivialQueryCachingPolicy.NEVER,
false
)
);
} catch (IOException e) {
throw new RuntimeException(e);
}
when(ctx.fetchPhase()).thenReturn(new FetchPhase(Arrays.asList(new FetchSourcePhase(), new FetchDocValuesPhase())));
when(ctx.circuitBreaker()).thenReturn(breaker);
when(ctx.memAccountingBufferSize()).thenReturn(1024 * 1024L);
/*
* Use a QueryShardContext that doesn't contain nested documents so we
* don't try to fetch them which would require mocking a whole menagerie
* of stuff.
*/
SearchExecutionContext subContext = spy(searchExecutionContext);
MappingLookup disableNestedLookup = MappingLookup.fromMappers(Mapping.EMPTY, Set.of(), Set.of());
doReturn(new NestedDocuments(disableNestedLookup, bitsetFilterCache::getBitSetProducer, indexSettings.getIndexVersionCreated()))
.when(subContext)
.getNestedDocuments();
when(ctx.getSearchExecutionContext()).thenReturn(subContext);
ShardSearchRequest request = new ShardSearchRequest(
OriginalIndices.NONE,
new SearchRequest().allowPartialSearchResults(randomBoolean()),
new ShardId("index", "indexUUID", 0),
0,
1,
AliasFilter.EMPTY,
1f,
0L,
null
);
when(ctx.request()).thenReturn(request);
IndexShard indexShard = mock(IndexShard.class);
when(indexShard.shardId()).thenReturn(new ShardId("test", "test", 0));
when(indexShard.indexSettings()).thenReturn(indexSettings);
when(ctx.indexShard()).thenReturn(indexShard);
when(ctx.newSourceLoader(null)).thenAnswer(inv -> searchExecutionContext.newSourceLoader(null, false));
when(ctx.newIdLoader()).thenReturn(IdLoader.fromLeafStoredFieldLoader());
when(ctx.innerHits()).thenReturn(new InnerHitsContext());
var res = new SubSearchContext(ctx);
releasables.add(res); // TODO: nasty workaround for not getting the standard resource handling behavior of a real search context
return res;
}
protected IndexSettings createIndexSettings() {
return new IndexSettings(
IndexMetadata.builder("_index")
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()))
.numberOfShards(1)
.numberOfReplicas(0)
.creationDate(System.currentTimeMillis())
.build(),
Settings.EMPTY
);
}
/**
* Sub-tests that need scripting can override this method to provide a script service and pre-baked scripts
*/
protected ScriptService getMockScriptService() {
return null;
}
/**
* Create a RandomIndexWriter that uses the LogDocMergePolicy.
*
* The latest lucene upgrade adds a new merge policy that reverses the order of the documents and it is not compatible with some
* aggregation types. This writer avoids randomization by hardcoding the merge policy to LogDocMergePolicy.
*/
protected static RandomIndexWriter newRandomIndexWriterWithLogDocMergePolicy(Directory directory) throws IOException {
final IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(new LogDocMergePolicy());
return new RandomIndexWriter(random(), directory, conf);
}
/**
* Collects all documents that match the provided query {@link Query} and
* returns the reduced {@link InternalAggregation}.
* <p>
* It runs the aggregation as well using a circuit breaker that randomly throws {@link CircuitBreakingException}
* in order to mak sure the implementation does not leak.
*/
protected <A extends InternalAggregation> A searchAndReduce(IndexReader reader, AggTestConfig aggTestConfig) throws IOException {
IndexSearcher searcher = newIndexSearcher(
reader,
aggTestConfig.builder.supportsParallelCollection(field -> getCardinality(reader, field))
);
IndexSettings indexSettings = createIndexSettings();
// First run it to find circuit breaker leaks on the aggregator
runWithCrankyCircuitBreaker(indexSettings, searcher, aggTestConfig);
CircuitBreakerService breakerService = new NoneCircuitBreakerService();
// Next, try with random cancellations, again looking for leaks
runWithCancellingConfig(indexSettings, searcher, breakerService, aggTestConfig);
// Finally, run it to the end
return searchAndReduce(indexSettings, searcher, breakerService, aggTestConfig, this::createAggregationContext);
}
/**
* Run an aggregation test against the {@link CrankyCircuitBreakerService}
* which fails randomly. This is extracted into a separate function so that
* stack traces will indicate if a bad allocation happened in the cranky CB
* run or the happy path run.
*/
private void runWithCrankyCircuitBreaker(IndexSettings indexSettings, IndexSearcher searcher, AggTestConfig aggTestConfig)
throws IOException {
CircuitBreakerService crankyService = new CrankyCircuitBreakerService();
for (int i = 0; i < 5; i++) {
try {
searchAndReduce(indexSettings, searcher, crankyService, aggTestConfig, this::createAggregationContext);
} catch (CircuitBreakingException e) {
// Circuit breaks from the cranky breaker are expected - it randomly fails, after all
assertThat(e.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE));
}
}
}
private void runWithCancellingConfig(
IndexSettings indexSettings,
IndexSearcher searcher,
CircuitBreakerService breakerService,
AggTestConfig aggTestConfig
) throws IOException {
for (int i = 0; i < 5; i++) {
try {
searchAndReduce(indexSettings, searcher, breakerService, aggTestConfig, this::createCancellingAggregationContext);
} catch (TaskCancelledException e) {
// we don't want to expectThrows this because the randomizer might just never report cancellation,
// but it's also normal that it should throw here.
}
}
}
@FunctionalInterface
public | AggregatorTestCase |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/MetricsRecords.java | {
"start": 1211,
"end": 1245
} | class ____ for tests
*/
public | mainly |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/resource/basic/ContextAndInstanceTest.java | {
"start": 4355,
"end": 4477
} | interface ____ extends AnotherInterface {
}
@Unremovable
@ApplicationScoped
public static | AnotherSubInterface |
java | hibernate__hibernate-orm | hibernate-jfr/src/main/java/org/hibernate/event/jfr/internal/EntityDeleteEvent.java | {
"start": 541,
"end": 953
} | class ____ extends Event implements DiagnosticEvent {
public static final String NAME = "org.hibernate.orm.EntityDeleteEvent";
@Label("Session Identifier")
public String sessionIdentifier;
@Label("Entity Identifier")
public String id;
@Label("Entity Name")
public String entityName;
@Label("Success")
public boolean success;
@Override
public String toString() {
return NAME;
}
}
| EntityDeleteEvent |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/api/Schema.java | {
"start": 45696,
"end": 45806
} | class ____ all kinds of constraints in an unresolved schema. */
@PublicEvolving
public abstract static | for |
java | apache__logging-log4j2 | log4j-perf-test/src/main/java/org/apache/logging/log4j/perf/jmh/ReflectionBenchmark.java | {
"start": 4769,
"end": 5152
} | class ____ {
private String findMethodName(final int depth) {
if (depth == 1) {
return locateMethodName();
}
return findMethodName(depth - 1);
}
private String locateMethodName() {
return StackLocatorUtil.calcLocation(MethodLocator.class.getName()).getMethodName();
}
}
}
| MethodLocator |
java | google__guava | guava/src/com/google/common/base/Predicate.java | {
"start": 1600,
"end": 1898
} | class ____ common predicates and related utilities.
*
* <p>See the Guava User Guide article on <a
* href="https://github.com/google/guava/wiki/FunctionalExplained">the use of {@code Predicate}</a>.
*
* @author Kevin Bourrillion
* @since 2.0
*/
@FunctionalInterface
@GwtCompatible
public | provides |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/issue_1200/Issue1240.java | {
"start": 366,
"end": 958
} | class ____ extends TestCase {
public void test_for_issue() throws Exception {
ParserConfig parserConfig = new ParserConfig();
parserConfig.addAccept("org.springframework.util.LinkedMultiValueMap");
parserConfig.setAutoTypeSupport(true);
LinkedMultiValueMap<String, String> result = new LinkedMultiValueMap();
result.add("test", "11111");
String test = JSON.toJSONString(result, SerializerFeature.WriteClassName);
JSON.parseObject(test, Object.class, parserConfig, JSON.DEFAULT_PARSER_FEATURE, Feature.SupportAutoType);
}
}
| Issue1240 |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/dialect/athena/ast/AthenaObject.java | {
"start": 225,
"end": 494
} | interface ____ extends SQLObject {
void accept0(AthenaASTVisitor visitor);
@Override
default void accept(SQLASTVisitor visitor) {
if (visitor instanceof AthenaASTVisitor) {
accept0((AthenaASTVisitor) visitor);
}
}
}
| AthenaObject |
java | spring-projects__spring-framework | spring-context-support/src/main/java/org/springframework/ui/freemarker/FreeMarkerConfigurationFactoryBean.java | {
"start": 2190,
"end": 2794
} | class ____ extends FreeMarkerConfigurationFactory
implements FactoryBean<Configuration>, InitializingBean, ResourceLoaderAware {
private @Nullable Configuration configuration;
@Override
public void afterPropertiesSet() throws IOException, TemplateException {
this.configuration = createConfiguration();
}
@Override
public @Nullable Configuration getObject() {
return this.configuration;
}
@Override
public Class<? extends Configuration> getObjectType() {
return Configuration.class;
}
@Override
public boolean isSingleton() {
return true;
}
}
| FreeMarkerConfigurationFactoryBean |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/context/support/GenericXmlApplicationContext.java | {
"start": 4603,
"end": 5061
} | class ____ package will be used as a prefix when
* loading each specified resource name
* @param resourceNames relatively-qualified names of resources to load
*/
public void load(Class<?> relativeClass, String... resourceNames) {
Resource[] resources = new Resource[resourceNames.length];
for (int i = 0; i < resourceNames.length; i++) {
resources[i] = new ClassPathResource(resourceNames[i], relativeClass);
}
this.load(resources);
}
}
| whose |
java | spring-projects__spring-data-jpa | spring-data-jpa/src/test/java/org/springframework/data/jpa/repository/query/AbstractJpaQueryTests.java | {
"start": 8218,
"end": 8795
} | class ____ extends AbstractJpaQuery {
DummyJpaQuery(JpaQueryMethod method, EntityManager em) {
super(method, em);
}
@Override
protected JpaQueryExecution getExecution(JpaParametersParameterAccessor accessor) {
return execution;
}
@Override
protected Query doCreateQuery(JpaParametersParameterAccessor accessor) {
return query;
}
@Override
public boolean hasDeclaredCountQuery() {
return true;
}
@Override
protected TypedQuery<Long> doCreateCountQuery(JpaParametersParameterAccessor accessor) {
return countQuery;
}
}
}
| DummyJpaQuery |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/admin/cluster/remote/RemoteInfoResponse.java | {
"start": 884,
"end": 1647
} | class ____ extends ActionResponse implements ToXContentObject {
private final List<RemoteConnectionInfo> infos;
public RemoteInfoResponse(Collection<RemoteConnectionInfo> infos) {
this.infos = List.copyOf(infos);
}
public List<RemoteConnectionInfo> getInfos() {
return infos;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeCollection(infos);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
for (RemoteConnectionInfo info : infos) {
info.toXContent(builder, params);
}
builder.endObject();
return builder;
}
}
| RemoteInfoResponse |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/LogAggregationTestUtils.java | {
"start": 1428,
"end": 3226
} | class ____ {
public static final String REMOTE_LOG_ROOT = "target/app-logs/";
public static void enableFileControllers(Configuration conf,
List<Class<? extends LogAggregationFileController>> fileControllers,
List<String> fileControllerNames) {
enableFcs(conf, REMOTE_LOG_ROOT, fileControllers, fileControllerNames);
}
public static void enableFileControllers(Configuration conf,
String remoteLogRoot,
List<Class<? extends LogAggregationFileController>> fileControllers,
List<String> fileControllerNames) {
enableFcs(conf, remoteLogRoot, fileControllers, fileControllerNames);
}
private static void enableFcs(Configuration conf,
String remoteLogRoot,
List<Class<? extends LogAggregationFileController>> fileControllers,
List<String> fileControllerNames) {
conf.set(YarnConfiguration.LOG_AGGREGATION_FILE_FORMATS,
StringUtils.join(fileControllerNames, ","));
for (int i = 0; i < fileControllers.size(); i++) {
Class<? extends LogAggregationFileController> fileController = fileControllers.get(i);
String controllerName = fileControllerNames.get(i);
conf.setClass(String.format(LOG_AGGREGATION_FILE_CONTROLLER_FMT, controllerName),
fileController, LogAggregationFileController.class);
conf.set(String.format(LOG_AGGREGATION_REMOTE_APP_LOG_DIR_FMT, controllerName),
remoteLogRoot + controllerName + "/");
conf.set(String.format(LOG_AGGREGATION_REMOTE_APP_LOG_DIR_SUFFIX_FMT, controllerName),
controllerName);
}
}
}
| LogAggregationTestUtils |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/core/annotation/MergedAnnotationsCollectionTests.java | {
"start": 8693,
"end": 8765
} | interface ____ {
}
@Meta11
@Retention(RetentionPolicy.RUNTIME)
@ | Direct |
java | spring-projects__spring-boot | configuration-metadata/spring-boot-configuration-processor/src/test/java/org/springframework/boot/configurationprocessor/TestProject.java | {
"start": 4677,
"end": 5012
} | class ____ revert
*/
public void revert(Class<?> type) {
Assert.isTrue(this.sources.stream().anyMatch((sourceFile) -> sourceFile.getClassName().equals(type.getName())),
"Source file for type '" + type + "' does not exist");
this.sources = this.sources.and(SourceFile.forTestClass(type));
}
/**
* Add source code of given | to |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/server/metrics/TestAMRMClientRelayerMetrics.java | {
"start": 2962,
"end": 3095
} | class ____ {
/**
* Mock AMS for easier testing and mocking of request/responses.
*/
public static | TestAMRMClientRelayerMetrics |
java | quarkusio__quarkus | extensions/reactive-mssql-client/deployment/src/test/java/io/quarkus/reactive/mssql/client/DevServicesMsSQLDatasourceTestCase.java | {
"start": 435,
"end": 1420
} | class ____ {
@RegisterExtension
static QuarkusUnitTest test = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addAsResource("container-license-acceptance.txt"))
// Expect no warnings from reactive
.setLogRecordPredicate(record -> record.getLevel().intValue() >= Level.WARNING.intValue()
&& record.getMessage().toLowerCase(Locale.ENGLISH).contains("reactive"))
.assertLogRecords(records -> assertThat(records)
// This is just to get meaningful error messages, as LogRecord doesn't have a toString()
.extracting(LogRecord::getMessage)
.isEmpty());
@Inject
Pool pool;
@Test
public void testDatasource() throws Exception {
pool.withConnection(conn -> conn.query("SELECT 1").execute().replaceWithVoid())
.await().atMost(Duration.ofMinutes(2));
}
}
| DevServicesMsSQLDatasourceTestCase |
java | apache__camel | components/camel-seda/src/main/java/org/apache/camel/component/seda/PriorityBlockingQueueFactory.java | {
"start": 1052,
"end": 1918
} | class ____<E> implements BlockingQueueFactory<E> {
/**
* Comparator used to sort exchanges
*/
private Comparator<E> comparator;
public Comparator<E> getComparator() {
return comparator;
}
public void setComparator(Comparator<E> comparator) {
this.comparator = comparator;
}
@Override
public PriorityBlockingQueue<E> create() {
return comparator == null
? new PriorityBlockingQueue<>()
// PriorityQueue as a default capacity of 11
: new PriorityBlockingQueue<>(11, comparator);
}
@Override
public PriorityBlockingQueue<E> create(int capacity) {
return comparator == null
? new PriorityBlockingQueue<>(capacity)
: new PriorityBlockingQueue<>(capacity, comparator);
}
}
| PriorityBlockingQueueFactory |
java | google__guice | core/test/com/google/inject/errors/MissingConstructorErrorTest.java | {
"start": 3171,
"end": 3320
} | class ____ {
@Inject
DependsOnMissingNoArgConstructor(MissingNoArgConstructor noArgConstructor) {}
}
static | DependsOnMissingNoArgConstructor |
java | apache__kafka | metadata/src/main/java/org/apache/kafka/metadata/LeaderAndIsr.java | {
"start": 1056,
"end": 5490
} | class ____ {
public static final int INITIAL_LEADER_EPOCH = 0;
public static final int INITIAL_PARTITION_EPOCH = 0;
public static final int NO_LEADER = -1;
private final int leader;
private final int leaderEpoch;
private final LeaderRecoveryState leaderRecoveryState;
private final List<BrokerState> isrWithBrokerEpoch;
// The current epoch for the partition for KRaft controllers. The current ZK version for the
// legacy controllers. The epoch is a monotonically increasing value which is incremented
// after every partition change.
private final int partitionEpoch;
public LeaderAndIsr(int leader, List<Integer> isr) {
this(leader, INITIAL_LEADER_EPOCH, isr, LeaderRecoveryState.RECOVERED, INITIAL_PARTITION_EPOCH);
}
public LeaderAndIsr(
int leader,
int leaderEpoch,
List<Integer> isr,
LeaderRecoveryState leaderRecoveryState,
int partitionEpoch
) {
this(
leader,
leaderEpoch,
leaderRecoveryState,
isr.stream().map(brokerId -> new BrokerState().setBrokerId(brokerId)).toList(),
partitionEpoch
);
}
public LeaderAndIsr(
int leader,
int leaderEpoch,
LeaderRecoveryState leaderRecoveryState,
List<BrokerState> isrWithBrokerEpoch,
int partitionEpoch
) {
this.leader = leader;
this.leaderEpoch = leaderEpoch;
this.leaderRecoveryState = leaderRecoveryState;
this.isrWithBrokerEpoch = isrWithBrokerEpoch;
this.partitionEpoch = partitionEpoch;
}
public int leader() {
return leader;
}
public int leaderEpoch() {
return leaderEpoch;
}
public List<BrokerState> isrWithBrokerEpoch() {
return isrWithBrokerEpoch;
}
public LeaderRecoveryState leaderRecoveryState() {
return leaderRecoveryState;
}
public int partitionEpoch() {
return partitionEpoch;
}
public LeaderAndIsr withPartitionEpoch(int partitionEpoch) {
return new LeaderAndIsr(leader, leaderEpoch, leaderRecoveryState, isrWithBrokerEpoch, partitionEpoch);
}
public LeaderAndIsr newLeader(int leader) {
return newLeaderAndIsrWithBrokerEpoch(leader, isrWithBrokerEpoch);
}
public LeaderAndIsr newLeaderAndIsr(int leader, List<Integer> isr) {
return new LeaderAndIsr(leader, leaderEpoch + 1, isr, leaderRecoveryState, partitionEpoch);
}
private LeaderAndIsr newLeaderAndIsrWithBrokerEpoch(int leader, List<BrokerState> isrWithBrokerEpoch) {
return new LeaderAndIsr(leader, leaderEpoch + 1, leaderRecoveryState, isrWithBrokerEpoch, partitionEpoch);
}
public LeaderAndIsr newRecoveringLeaderAndIsr(int leader, List<Integer> isr) {
return new LeaderAndIsr(leader, leaderEpoch + 1, isr, LeaderRecoveryState.RECOVERING, partitionEpoch);
}
public LeaderAndIsr newEpoch() {
return newLeaderAndIsrWithBrokerEpoch(leader, isrWithBrokerEpoch);
}
public Optional<Integer> leaderOpt() {
return leader == LeaderAndIsr.NO_LEADER ? Optional.empty() : Optional.of(leader);
}
public Set<Integer> isr() {
return isrWithBrokerEpoch.stream()
.map(BrokerState::brokerId)
.collect(Collectors.toUnmodifiableSet());
}
@Override
public String toString() {
return "LeaderAndIsr(" +
"leader=" + leader +
", leaderEpoch=" + leaderEpoch +
", isrWithBrokerEpoch=" + isrWithBrokerEpoch +
", leaderRecoveryState=" + leaderRecoveryState +
", partitionEpoch=" + partitionEpoch +
')';
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
LeaderAndIsr that = (LeaderAndIsr) o;
return leader == that.leader && leaderEpoch == that.leaderEpoch && partitionEpoch == that.partitionEpoch &&
leaderRecoveryState == that.leaderRecoveryState && Objects.equals(isrWithBrokerEpoch, that.isrWithBrokerEpoch);
}
@Override
public int hashCode() {
return Objects.hash(leader, leaderEpoch, leaderRecoveryState, isrWithBrokerEpoch, partitionEpoch);
}
}
| LeaderAndIsr |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesCapacitySchedulerMixedModePercentageAndWeightVector.java | {
"start": 3908,
"end": 5817
} | class ____ extends AbstractBinder {
@Override
protected void configure() {
Map<String, String> configMap = new HashMap<>();
configMap.put("yarn.scheduler.capacity.legacy-queue-mode.enabled", "false");
configMap.put("yarn.scheduler.capacity.root.queues", "default, test_1, test_2");
configMap.put("yarn.scheduler.capacity.root.test_1.queues", "test_1_1, test_1_2, test_1_3");
configMap.put("yarn.scheduler.capacity.root.default.capacity", "[memory=1w, vcores=1w]");
configMap.put("yarn.scheduler.capacity.root.test_1.capacity", "[memory=50%, vcores=50%]");
configMap.put("yarn.scheduler.capacity.root.test_2.capacity", "[memory=3w, vcores=3w]");
configMap.put("yarn.scheduler.capacity.root.test_1.test_1_1.capacity",
"[memory=12.5%, vcores=12.5%]");
configMap.put("yarn.scheduler.capacity.root.test_1.test_1_2.capacity",
"[memory=12.5%, vcores=12.5%]");
configMap.put("yarn.scheduler.capacity.root.test_1.test_1_3.capacity",
"[memory=1w, vcores=1w]");
conf = createConfiguration(configMap);
rm = createRM(createConfiguration(configMap));
final HttpServletRequest request = mock(HttpServletRequest.class);
when(request.getScheme()).thenReturn("http");
final HttpServletResponse response = mock(HttpServletResponse.class);
rmWebServices = new RMWebServices(rm, conf);
bind(rm).to(ResourceManager.class).named("rm");
bind(conf).to(Configuration.class).named("conf");
bind(rmWebServices).to(RMWebServices.class);
bind(request).to(HttpServletRequest.class);
rmWebServices.setResponse(response);
bind(response).to(HttpServletResponse.class);
}
}
@Test
public void testSchedulerPercentageAndWeightUsingCapacityVector() throws Exception {
runTest(EXPECTED_FILE_TMPL, "testSchedulerPercentageAndWeight", rm, target());
}
}
| JerseyBinder |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/impl/StartupListenerTest.java | {
"start": 1451,
"end": 3706
} | class ____ implements StartupListener {
private int invoked;
private boolean alreadyStarted;
@Override
public void onCamelContextStarted(CamelContext context, boolean alreadyStarted) {
invoked++;
this.alreadyStarted = alreadyStarted;
if (alreadyStarted) {
// the routes should already been started as we add the listener
// afterwards
assertTrue(context.getRouteController().getRouteStatus("foo").isStarted());
} else {
// the routes should not have been started as they start
// afterwards
assertTrue(context.getRouteController().getRouteStatus("foo").isStopped());
}
}
public int getInvoked() {
return invoked;
}
public boolean isAlreadyStarted() {
return alreadyStarted;
}
}
@Test
public void testStartupListenerComponent() throws Exception {
// and now the routes are started
assertTrue(context.getRouteController().getRouteStatus("foo").isStarted());
getMockEndpoint("mock:result").expectedMessageCount(1);
template.sendBody("direct:foo", "Hello World");
assertMockEndpointsSatisfied();
assertEquals(1, my.getInvoked());
assertFalse(my.isAlreadyStarted());
}
@Test
public void testStartupListenerComponentAlreadyStarted() throws Exception {
// and now the routes are started
assertTrue(context.getRouteController().getRouteStatus("foo").isStarted());
MyStartupListener other = new MyStartupListener();
context.addStartupListener(other);
getMockEndpoint("mock:result").expectedMessageCount(1);
template.sendBody("direct:foo", "Hello World");
assertMockEndpointsSatisfied();
assertEquals(1, other.getInvoked());
assertTrue(other.isAlreadyStarted());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:foo").routeId("foo").to("mock:result");
}
};
}
}
| MyStartupListener |
java | google__guice | core/src/com/google/inject/matcher/Matchers.java | {
"start": 1445,
"end": 1933
} | class ____ extends AbstractMatcher<Object> implements Serializable {
@Override
public boolean matches(Object o) {
return true;
}
@Override
public String toString() {
return "any()";
}
public Object readResolve() {
return any();
}
private static final long serialVersionUID = 0;
}
/** Inverts the given matcher. */
public static <T> Matcher<T> not(final Matcher<? super T> p) {
return new Not<T>(p);
}
private static | Any |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/health/node/HealthInfoCache.java | {
"start": 1257,
"end": 4694
} | class ____ implements ClusterStateListener {
private static final Logger logger = LogManager.getLogger(HealthInfoCache.class);
private volatile ConcurrentHashMap<String, DiskHealthInfo> diskInfoByNode = new ConcurrentHashMap<>();
@Nullable
private volatile DataStreamLifecycleHealthInfo dslHealthInfo = null;
private volatile ConcurrentHashMap<String, RepositoriesHealthInfo> repositoriesInfoByNode = new ConcurrentHashMap<>();
private volatile FileSettingsService.FileSettingsHealthInfo fileSettingsHealthInfo = INDETERMINATE;
private HealthInfoCache() {}
public static HealthInfoCache create(ClusterService clusterService) {
HealthInfoCache healthInfoCache = new HealthInfoCache();
clusterService.addListener(healthInfoCache);
return healthInfoCache;
}
public void updateNodeHealth(
String nodeId,
@Nullable DiskHealthInfo diskHealthInfo,
@Nullable DataStreamLifecycleHealthInfo latestDslHealthInfo,
@Nullable RepositoriesHealthInfo repositoriesHealthInfo,
@Nullable FileSettingsService.FileSettingsHealthInfo fileSettingsHealthInfo
) {
if (diskHealthInfo != null) {
diskInfoByNode.put(nodeId, diskHealthInfo);
}
if (latestDslHealthInfo != null) {
dslHealthInfo = latestDslHealthInfo;
}
if (repositoriesHealthInfo != null) {
repositoriesInfoByNode.put(nodeId, repositoriesHealthInfo);
}
if (fileSettingsHealthInfo != null) {
this.fileSettingsHealthInfo = fileSettingsHealthInfo;
}
}
@Override
public void clusterChanged(ClusterChangedEvent event) {
DiscoveryNode currentHealthNode = HealthNode.findHealthNode(event.state());
DiscoveryNode localNode = event.state().nodes().getLocalNode();
if (currentHealthNode != null && localNode.getId().equals(currentHealthNode.getId())) {
if (event.nodesRemoved()) {
for (DiscoveryNode removedNode : event.nodesDelta().removedNodes()) {
diskInfoByNode.remove(removedNode.getId());
repositoriesInfoByNode.remove(removedNode.getId());
}
}
// Resetting the cache is not synchronized for efficiency and simplicity.
// Processing a delayed update after the cache has been emptied because
// the node is not the health node anymore has small impact since it will
// be reset in the next round again.
} else if (diskInfoByNode.isEmpty() == false || dslHealthInfo != null || repositoriesInfoByNode.isEmpty() == false) {
logger.debug("Node [{}][{}] is no longer the health node, emptying the cache.", localNode.getName(), localNode.getId());
diskInfoByNode = new ConcurrentHashMap<>();
dslHealthInfo = null;
repositoriesInfoByNode = new ConcurrentHashMap<>();
fileSettingsHealthInfo = INDETERMINATE;
}
}
/**
* This returns all the health info stored in this cache
* @return A HealthInfo object wrapping all health data in the cache
*/
public HealthInfo getHealthInfo() {
// A shallow copy is enough because the inner data is immutable.
return new HealthInfo(Map.copyOf(diskInfoByNode), dslHealthInfo, Map.copyOf(repositoriesInfoByNode), fileSettingsHealthInfo);
}
}
| HealthInfoCache |
java | apache__camel | components/camel-rest-openapi/src/test/java/org/apache/camel/component/rest/openapi/OpenApiUtilsTest.java | {
"start": 6043,
"end": 6905
} | class ____ is provided in the schema name
String schemaName = "Tag";
String bindingPackagePath = OpenApiUtils.class.getPackage().getName();
Schema<Object> tagSchema = createTagSchema();
Operation operation = new Operation();
ApiResponses responses = new ApiResponses();
responses.addApiResponse("200", createResponse(tagSchema));
operation.setResponses(responses);
Components components = new Components();
components.addSchemas(schemaName, tagSchema);
OpenApiUtils utils = new OpenApiUtils(new DefaultCamelContext(), bindingPackagePath, components);
assertEquals(Tag.class.getName(), utils.manageResponseBody(operation));
}
@Test
public void shouldReturnCorrectResponseClassNameForSchemaTitle() {
String schemaName = "TagSchema";
//When the | name |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/internal/operators/flowable/FlowableFromSupplierTest.java | {
"start": 1350,
"end": 8651
} | class ____ extends RxJavaTest {
@SuppressWarnings("unchecked")
@Test
public void shouldNotInvokeFuncUntilSubscription() throws Throwable {
Supplier<Object> func = mock(Supplier.class);
when(func.get()).thenReturn(new Object());
Flowable<Object> fromSupplierFlowable = Flowable.fromSupplier(func);
verifyNoInteractions(func);
fromSupplierFlowable.subscribe();
verify(func).get();
}
@SuppressWarnings("unchecked")
@Test
public void shouldCallOnNextAndOnCompleted() throws Throwable {
Supplier<String> func = mock(Supplier.class);
when(func.get()).thenReturn("test_value");
Flowable<String> fromSupplierFlowable = Flowable.fromSupplier(func);
Subscriber<String> subscriber = TestHelper.mockSubscriber();
fromSupplierFlowable.subscribe(subscriber);
verify(subscriber).onNext("test_value");
verify(subscriber).onComplete();
verify(subscriber, never()).onError(any(Throwable.class));
}
@SuppressWarnings("unchecked")
@Test
public void shouldCallOnError() throws Throwable {
Supplier<Object> func = mock(Supplier.class);
Throwable throwable = new IllegalStateException("Test exception");
when(func.get()).thenThrow(throwable);
Flowable<Object> fromSupplierFlowable = Flowable.fromSupplier(func);
Subscriber<Object> subscriber = TestHelper.mockSubscriber();
fromSupplierFlowable.subscribe(subscriber);
verify(subscriber, never()).onNext(any());
verify(subscriber, never()).onComplete();
verify(subscriber).onError(throwable);
}
@SuppressWarnings("unchecked")
@Test
public void shouldNotDeliverResultIfSubscriberUnsubscribedBeforeEmission() throws Throwable {
Supplier<String> func = mock(Supplier.class);
final CountDownLatch funcLatch = new CountDownLatch(1);
final CountDownLatch observerLatch = new CountDownLatch(1);
when(func.get()).thenAnswer(new Answer<String>() {
@Override
public String answer(InvocationOnMock invocation) throws Throwable {
observerLatch.countDown();
try {
funcLatch.await();
} catch (InterruptedException e) {
// It's okay, unsubscription causes Thread interruption
// Restoring interruption status of the Thread
Thread.currentThread().interrupt();
}
return "should_not_be_delivered";
}
});
Flowable<String> fromSupplierFlowable = Flowable.fromSupplier(func);
Subscriber<String> subscriber = TestHelper.mockSubscriber();
TestSubscriber<String> outer = new TestSubscriber<>(subscriber);
fromSupplierFlowable
.subscribeOn(Schedulers.computation())
.subscribe(outer);
// Wait until func will be invoked
observerLatch.await();
// Unsubscribing before emission
outer.cancel();
// Emitting result
funcLatch.countDown();
// func must be invoked
verify(func).get();
// Observer must not be notified at all
verify(subscriber).onSubscribe(any(Subscription.class));
verifyNoMoreInteractions(subscriber);
}
@Test
public void shouldAllowToThrowCheckedException() {
final Exception checkedException = new Exception("test exception");
Flowable<Object> fromSupplierFlowable = Flowable.fromSupplier(new Supplier<Object>() {
@Override
public Object get() throws Exception {
throw checkedException;
}
});
Subscriber<Object> subscriber = TestHelper.mockSubscriber();
fromSupplierFlowable.subscribe(subscriber);
verify(subscriber).onSubscribe(any(Subscription.class));
verify(subscriber).onError(checkedException);
verifyNoMoreInteractions(subscriber);
}
@Test
public void fusedFlatMapExecution() {
final int[] calls = { 0 };
Flowable.just(1).flatMap(new Function<Integer, Publisher<? extends Object>>() {
@Override
public Publisher<? extends Object> apply(Integer v)
throws Exception {
return Flowable.fromSupplier(new Supplier<Object>() {
@Override
public Object get() throws Exception {
return ++calls[0];
}
});
}
})
.test()
.assertResult(1);
assertEquals(1, calls[0]);
}
@Test
public void fusedFlatMapExecutionHidden() {
final int[] calls = { 0 };
Flowable.just(1).hide().flatMap(new Function<Integer, Publisher<? extends Object>>() {
@Override
public Publisher<? extends Object> apply(Integer v)
throws Exception {
return Flowable.fromSupplier(new Supplier<Object>() {
@Override
public Object get() throws Exception {
return ++calls[0];
}
});
}
})
.test()
.assertResult(1);
assertEquals(1, calls[0]);
}
@Test
public void fusedFlatMapNull() {
Flowable.just(1).flatMap(new Function<Integer, Publisher<? extends Object>>() {
@Override
public Publisher<? extends Object> apply(Integer v)
throws Exception {
return Flowable.fromSupplier(new Supplier<Object>() {
@Override
public Object get() throws Exception {
return null;
}
});
}
})
.test()
.assertFailure(NullPointerException.class);
}
@Test
public void fusedFlatMapNullHidden() {
Flowable.just(1).hide().flatMap(new Function<Integer, Publisher<? extends Object>>() {
@Override
public Publisher<? extends Object> apply(Integer v)
throws Exception {
return Flowable.fromSupplier(new Supplier<Object>() {
@Override
public Object get() throws Exception {
return null;
}
});
}
})
.test()
.assertFailure(NullPointerException.class);
}
@Test
public void undeliverableUponCancellation() throws Exception {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
final TestSubscriber<Integer> ts = new TestSubscriber<>();
Flowable.fromSupplier(new Supplier<Integer>() {
@Override
public Integer get() throws Exception {
ts.cancel();
throw new TestException();
}
})
.subscribe(ts);
ts.assertEmpty();
TestHelper.assertUndeliverable(errors, 0, TestException.class);
} finally {
RxJavaPlugins.reset();
}
}
}
| FlowableFromSupplierTest |
java | apache__camel | core/camel-api/src/main/java/org/apache/camel/spi/IdempotentRepository.java | {
"start": 2804,
"end": 3100
} | class ____ about eager vs non-eager mode.
*
* @param key the key of the message for duplicate test
* @return <tt>true</tt> if the key was confirmed
*/
boolean confirm(String key);
/**
* Clear the repository.
* <p/>
* <b>Important:</b> Read the | javadoc |
java | spring-projects__spring-security | config/src/main/java/org/springframework/security/config/http/OAuth2ResourceServerBeanDefinitionParser.java | {
"start": 15975,
"end": 16433
} | class ____
implements AuthenticationManagerResolver<HttpServletRequest> {
private final AuthenticationManager authenticationManager;
StaticAuthenticationManagerResolver(AuthenticationManager authenticationManager) {
this.authenticationManager = authenticationManager;
}
@Override
public AuthenticationManager resolve(HttpServletRequest context) {
return this.authenticationManager;
}
}
static final | StaticAuthenticationManagerResolver |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/event/DirtiesContextEventPublishingTests.java | {
"start": 6327,
"end": 6692
} | class ____ {
@Test
@DirtiesContext
void test1() {
}
@Test
void test2() {
}
}
@SpringJUnitConfig(Config.class)
// add unique property to get a unique ApplicationContext
@TestPropertySource(properties = "DirtiesContextEventPublishingTests.key = method-level-before-method")
static | MethodLevelAfterMethodDirtiesContextWithSubsequentTestMethodTestCase |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/FloatFallibleState.java | {
"start": 598,
"end": 1723
} | class ____ implements AggregatorState {
private float value;
private boolean seen;
private boolean failed;
FloatFallibleState(float init) {
this.value = init;
}
float floatValue() {
return value;
}
void floatValue(float value) {
this.value = value;
}
boolean seen() {
return seen;
}
void seen(boolean seen) {
this.seen = seen;
}
boolean failed() {
return failed;
}
void failed(boolean failed) {
this.failed = failed;
}
/** Extracts an intermediate view of the contents of this state. */
@Override
public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) {
assert blocks.length >= offset + 3;
blocks[offset + 0] = driverContext.blockFactory().newConstantFloatBlockWith(value, 1);
blocks[offset + 1] = driverContext.blockFactory().newConstantBooleanBlockWith(seen, 1);
blocks[offset + 2] = driverContext.blockFactory().newConstantBooleanBlockWith(failed, 1);
}
@Override
public void close() {}
}
| FloatFallibleState |
java | elastic__elasticsearch | libs/entitlement/src/main/java/org/elasticsearch/entitlement/package-info.java | {
"start": 11086,
"end": 12561
} | class ____ a layer (via the {@code PluginsResolver} class): we use them to build a
* Module -> Plugin name (String) map. For modularized plugins we use the list of modules defined in the module layer; for the
* non-modularized ones, we use the unnamed module which is unique to each plugin classloader.
* </p>
* <p>
* This map is then passed down and stored by {@link org.elasticsearch.entitlement.runtime.policy.PolicyManager}. Alongside this map,
* {@link org.elasticsearch.entitlement.runtime.policy.PolicyManager} builds a set of references to modules
* that belong to what we call the "system layer", i.e. the layer containing what we consider system modules, and the set of modules
* that we consider belonging to the "server layer".
* {@link org.elasticsearch.entitlement.runtime.policy.PolicyManager} uses this info to identify the layer, and therefore the policy and
* entitlements, for the caller class.
* </p>
* <p>
* See {@link org.elasticsearch.entitlement.runtime.policy.PolicyManager} for details.
* </p>
*
* <h2>Checks</h2>
* <p>
* The injected prologue calls a {@code check$} method on {@link org.elasticsearch.entitlement.bridge.EntitlementChecker}; its
* implementation (normally on {@link org.elasticsearch.entitlement.runtime.policy.ElasticsearchEntitlementChecker}, unless it is a
* version-specific method) calls the appropriate methods on {@link org.elasticsearch.entitlement.runtime.policy.PolicyManager},
* forwarding the caller | to |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/mysql/createTable/MySqlCreateTableTest39.java | {
"start": 1016,
"end": 2393
} | class ____ extends MysqlTest {
public void test_0() throws Exception {
String sql = "CREATE TABLE lookup" +
" (id INT, INDEX USING BTREE (id))" +
" STATS_PERSISTENT 1;";
MySqlStatementParser parser = new MySqlStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement stmt = statementList.get(0);
// print(statementList);
assertEquals(1, statementList.size());
MySqlSchemaStatVisitor visitor = new MySqlSchemaStatVisitor();
stmt.accept(visitor);
// System.out.println("Tables : " + visitor.getTables());
// System.out.println("fields : " + visitor.getColumns());
// System.out.println("coditions : " + visitor.getConditions());
// System.out.println("orderBy : " + visitor.getOrderByColumns());
assertEquals(1, visitor.getTables().size());
assertEquals(1, visitor.getColumns().size());
assertEquals(0, visitor.getConditions().size());
assertTrue(visitor.getTables().containsKey(new TableStat.Name("lookup")));
String output = SQLUtils.toMySqlString(stmt);
assertEquals("CREATE TABLE lookup (" +
"\n\tid INT," +
"\n\tINDEX USING BTREE(id)" +
"\n) STATS_PERSISTENT = 1;", output);
}
}
| MySqlCreateTableTest39 |
java | apache__camel | components/camel-file/src/main/java/org/apache/camel/component/file/GenericFileHelper.java | {
"start": 977,
"end": 2336
} | class ____ {
private GenericFileHelper() {
}
public static String asExclusiveReadLockKey(GenericFile file, String key) {
// use the copy from absolute path as that was the original path of the
// file when the lock was acquired
// for example if the file consumer uses preMove then the file is moved
// and therefore has another name
// that would no longer match
String path
= file.getCopyFromAbsoluteFilePath() != null ? file.getCopyFromAbsoluteFilePath() : file.getAbsoluteFilePath();
return asExclusiveReadLockKey(path, key);
}
public static String asExclusiveReadLockKey(String path, String key) {
return path + "-" + key;
}
public static <T> Exchange createDummy(GenericFileEndpoint<T> endpoint, Exchange dynamic, Supplier<GenericFile<T>> file) {
Exchange dummy = endpoint.createExchange(file.get());
if (dynamic != null) {
// enrich with data from dynamic source
if (dynamic.getMessage().hasHeaders()) {
MessageHelper.copyHeaders(dynamic.getMessage(), dummy.getMessage(), true);
if (dynamic.hasVariables()) {
dummy.getVariables().putAll(dynamic.getVariables());
}
}
}
return dummy;
}
}
| GenericFileHelper |
java | spring-projects__spring-framework | spring-beans/src/test/java/org/springframework/beans/factory/aot/BeanDefinitionPropertyValueCodeGeneratorDelegatesTests.java | {
"start": 6227,
"end": 6398
} | class ____ {
@Test
void generateWhenString() {
compile("test\n", (instance, compiled) ->
assertThat(instance).isEqualTo("test\n"));
}
}
@Nested
| StringTests |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/scheduler/metrics/StateTimeMetricTest.java | {
"start": 1382,
"end": 5092
} | class ____ {
@Test
void testEnableStateMetrics() {
testMetricSelection(MetricOptions.JobStatusMetrics.STATE);
}
@Test
void testEnableCurrentTimeMetrics() {
testMetricSelection(MetricOptions.JobStatusMetrics.CURRENT_TIME);
}
@Test
void testEnableTotalTimeMetrics() {
testMetricSelection(MetricOptions.JobStatusMetrics.TOTAL_TIME);
}
@Test
void testEnableMultipleMetrics() {
testMetricSelection(
MetricOptions.JobStatusMetrics.CURRENT_TIME,
MetricOptions.JobStatusMetrics.TOTAL_TIME);
}
private static void testMetricSelection(MetricOptions.JobStatusMetrics... selectedMetrics) {
final EnumSet<MetricOptions.JobStatusMetrics> selectedMetricsSet =
EnumSet.noneOf(MetricOptions.JobStatusMetrics.class);
Arrays.stream(selectedMetrics).forEach(selectedMetricsSet::add);
final InterceptingOperatorMetricGroup metricGroup = new InterceptingOperatorMetricGroup();
StateTimeMetric.register(
enable(selectedMetrics), metricGroup, new TestStateTimeMetric(), "test");
final Map<JobStatus, StatusMetricSet> registeredMetrics = extractMetrics(metricGroup);
for (StatusMetricSet metrics : registeredMetrics.values()) {
assertThat(metrics.getState().isPresent())
.isEqualTo(selectedMetricsSet.contains(MetricOptions.JobStatusMetrics.STATE));
assertThat(metrics.getCurrentTime().isPresent())
.isEqualTo(
selectedMetricsSet.contains(
MetricOptions.JobStatusMetrics.CURRENT_TIME));
assertThat(metrics.getTotalTime().isPresent())
.isEqualTo(
selectedMetricsSet.contains(MetricOptions.JobStatusMetrics.TOTAL_TIME));
}
}
static MetricOptions.JobStatusMetricsSettings enable(
MetricOptions.JobStatusMetrics... enabledMetrics) {
final Configuration configuration = new Configuration();
configuration.set(MetricOptions.JOB_STATUS_METRICS, Arrays.asList(enabledMetrics));
return MetricOptions.JobStatusMetricsSettings.fromConfiguration(configuration);
}
static Map<JobStatus, StatusMetricSet> extractMetrics(InterceptingOperatorMetricGroup metrics) {
final Map<JobStatus, StatusMetricSet> extractedMetrics = new EnumMap<>(JobStatus.class);
for (JobStatus jobStatus : JobStatus.values()) {
final String baseMetricName = JobStatusMetrics.getBaseMetricName(jobStatus);
final StatusMetricSet statusMetricSet =
new StatusMetricSet(
(Gauge<Long>)
metrics.get(StateTimeMetric.getStateMetricName(baseMetricName)),
(Gauge<Long>)
metrics.get(
StateTimeMetric.getCurrentTimeMetricName(
baseMetricName)),
(Gauge<Long>)
metrics.get(
StateTimeMetric.getTotalTimeMetricName(
baseMetricName)));
if (statusMetricSet.getState().isPresent()
|| statusMetricSet.getCurrentTime().isPresent()
|| statusMetricSet.getTotalTime().isPresent()) {
extractedMetrics.put(jobStatus, statusMetricSet);
}
}
return extractedMetrics;
}
private static | StateTimeMetricTest |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/jaxb/internal/stax/AbstractEventReader.java | {
"start": 780,
"end": 6109
} | class ____ extends EventReaderDelegate {
private static final String VERSION_ATTRIBUTE_NAME = "version";
private final String rootElementName;
private final XsdDescriptor xsdDescriptor;
private final XMLEventFactory xmlEventFactory;
public AbstractEventReader(
String rootElementName,
XsdDescriptor xsdDescriptor,
XMLEventReader reader,
XMLEventFactory xmlEventFactory) {
super( reader );
this.rootElementName = rootElementName;
this.xsdDescriptor = xsdDescriptor;
this.xmlEventFactory = xmlEventFactory;
}
@Override
public XMLEvent peek() throws XMLStreamException {
return wrap( super.peek() );
}
@Override
public XMLEvent nextEvent() throws XMLStreamException {
return wrap( super.nextEvent() );
}
private XMLEvent wrap(XMLEvent event) {
if ( event != null ) {
if ( event.isStartElement() ) {
return wrap( event.asStartElement() );
}
else if ( event.isEndElement() ) {
return wrap( event.asEndElement() );
}
}
return event;
}
private StartElement wrap(StartElement startElement) {
final List<Attribute> newElementAttributeList = mapAttributes( startElement );
final List<Namespace> newNamespaceList = mapNamespaces( startElement );
// Transfer the location info from the incoming event to the event factory
// so that the event we ask it to generate for us has the same location info
xmlEventFactory.setLocation( startElement.getLocation() );
return xmlEventFactory.createStartElement(
new QName( xsdDescriptor.getNamespaceUri(), startElement.getName().getLocalPart() ),
newElementAttributeList.iterator(),
newNamespaceList.iterator()
);
}
private Iterator<Attribute> existingXmlAttributesIterator(StartElement startElement) {
return startElement.getAttributes();
}
private List<Attribute> mapAttributes(StartElement startElement) {
final List<Attribute> mappedAttributes = new ArrayList<>();
final Iterator<Attribute> existingAttributesIterator = existingXmlAttributesIterator( startElement );
while ( existingAttributesIterator.hasNext() ) {
final Attribute originalAttribute = existingAttributesIterator.next();
final Attribute attributeToUse = mapAttribute( startElement, originalAttribute );
mappedAttributes.add( attributeToUse );
}
return mappedAttributes;
}
private Attribute mapAttribute(StartElement startElement, Attribute originalAttribute) {
// Here we look to see if this attribute is the JPA version attribute, and if so do the following:
// 1) validate its version attribute is valid per our "latest XSD"
// 2) update its version attribute to the latest version if not already
//
// NOTE : atm this is a very simple check using just the attribute's local name
// rather than checking its qualified name. It is possibly (though unlikely)
// that this could match on "other" version attributes in the same element
if ( rootElementName.equals( startElement.getName().getLocalPart() ) ) {
if ( VERSION_ATTRIBUTE_NAME.equals( originalAttribute.getName().getLocalPart() ) ) {
final String specifiedVersion = originalAttribute.getValue();
if ( !XsdHelper.isValidJpaVersion( specifiedVersion ) ) {
throw new BadVersionException( specifiedVersion );
}
return xmlEventFactory.createAttribute( VERSION_ATTRIBUTE_NAME, xsdDescriptor.getVersion() );
}
}
return originalAttribute;
}
private List<Namespace> mapNamespaces(StartElement startElement) {
return mapNamespaces( existingXmlNamespacesIterator( startElement ) );
}
private List<Namespace> mapNamespaces(Iterator<Namespace> originalNamespaceIterator ) {
final List<Namespace> mappedNamespaces = new ArrayList<>();
while ( originalNamespaceIterator.hasNext() ) {
final Namespace originalNamespace = originalNamespaceIterator.next();
final Namespace mappedNamespace = mapNamespace( originalNamespace );
mappedNamespaces.add( mappedNamespace );
}
if ( mappedNamespaces.isEmpty() ) {
mappedNamespaces.add( xmlEventFactory.createNamespace( xsdDescriptor.getNamespaceUri() ) );
}
return mappedNamespaces;
}
private Iterator<Namespace> existingXmlNamespacesIterator(StartElement startElement) {
return startElement.getNamespaces();
}
private Namespace mapNamespace(Namespace originalNamespace) {
if ( shouldBeMappedToLatestJpaDescriptor( originalNamespace.getNamespaceURI() ) ) {
// this is a namespace "to map" so map it
return xmlEventFactory.createNamespace( originalNamespace.getPrefix(), xsdDescriptor.getNamespaceUri() );
}
return originalNamespace;
}
protected abstract boolean shouldBeMappedToLatestJpaDescriptor(String uri);
private XMLEvent wrap(EndElement endElement) {
final List<Namespace> targetNamespaces = mapNamespaces( existingXmlNamespacesIterator( endElement ) );
// Transfer the location info from the incoming event to the event factory
// so that the event we ask it to generate for us has the same location info
xmlEventFactory.setLocation( endElement.getLocation() );
return xmlEventFactory.createEndElement(
new QName( xsdDescriptor.getNamespaceUri(), endElement.getName().getLocalPart() ),
targetNamespaces.iterator()
);
}
private Iterator<Namespace> existingXmlNamespacesIterator(EndElement endElement) {
return endElement.getNamespaces();
}
public static | AbstractEventReader |
java | google__dagger | hilt-compiler/main/java/dagger/hilt/processor/internal/root/AggregatedRootGenerator.java | {
"start": 978,
"end": 2916
} | class ____ {
private final XTypeElement rootElement;
private final XTypeElement originatingRootElement;
private final XTypeElement rootAnnotation;
private final ClassName rootComponentName;
AggregatedRootGenerator(
XTypeElement rootElement,
XTypeElement originatingRootElement,
XTypeElement rootAnnotation,
ClassName rootComponentName) {
this.rootElement = rootElement;
this.originatingRootElement = originatingRootElement;
this.rootAnnotation = rootAnnotation;
this.rootComponentName = rootComponentName;
}
void generate() {
AnnotationSpec.Builder aggregatedRootAnnotation =
AnnotationSpec.builder(ClassNames.AGGREGATED_ROOT)
.addMember("root", "$S", rootElement.getQualifiedName())
.addMember("rootPackage", "$S", rootElement.getClassName().packageName())
.addMember("originatingRoot", "$S", originatingRootElement.getQualifiedName())
.addMember(
"originatingRootPackage", "$S", originatingRootElement.getClassName().packageName())
.addMember("rootAnnotation", "$T.class", rootAnnotation.getClassName())
.addMember("rootComponentPackage", "$S", rootComponentName.packageName());
rootElement
.getClassName()
.simpleNames()
.forEach(name -> aggregatedRootAnnotation.addMember("rootSimpleNames", "$S", name));
originatingRootElement
.getClassName()
.simpleNames()
.forEach(
name -> aggregatedRootAnnotation.addMember("originatingRootSimpleNames", "$S", name));
rootComponentName
.simpleNames()
.forEach(
name -> aggregatedRootAnnotation.addMember("rootComponentSimpleNames", "$S", name));
Processors.generateAggregatingClass(
ClassNames.AGGREGATED_ROOT_PACKAGE,
aggregatedRootAnnotation.build(),
rootElement,
getClass());
}
}
| AggregatedRootGenerator |
java | quarkusio__quarkus | integration-tests/rest-client-reactive-http2/src/main/java/io/quarkus/it/rest/client/http2/multipart/MultipartResource.java | {
"start": 935,
"end": 13347
} | class ____ {
private static final Logger log = Logger.getLogger(MultipartResource.class);
public static final String HELLO_WORLD = "HELLO WORLD";
public static final String GREETING_TXT = "greeting.txt";
public static final int NUMBER = 12342;
@RestClient
MultipartClient client;
@GET
@Path("/client/octet-stream")
@Produces(MediaType.TEXT_PLAIN)
@Blocking
public String sendOctetStreamFile() throws IOException {
java.nio.file.Path tempFile = Files.createTempFile("dummy", ".txt");
Files.write(tempFile, "test".getBytes(UTF_8));
return client.octetStreamFile(tempFile.toFile());
}
@GET
@Path("/client/byte-array-as-binary-file")
@Consumes(MediaType.TEXT_PLAIN)
@Produces(MediaType.TEXT_PLAIN)
@Blocking
public String sendByteArray(@QueryParam("nullFile") @DefaultValue("false") boolean nullFile) {
MultipartClient.WithByteArrayAsBinaryFile data = new MultipartClient.WithByteArrayAsBinaryFile();
if (!nullFile) {
data.file = HELLO_WORLD.getBytes(UTF_8);
}
data.fileName = GREETING_TXT;
return client.sendByteArrayAsBinaryFile(data);
}
@GET
@Path("/client/params/byte-array-as-binary-file")
@Consumes(MediaType.TEXT_PLAIN)
@Produces(MediaType.TEXT_PLAIN)
@Blocking
public String sendByteArrayParams(@QueryParam("nullFile") @DefaultValue("false") boolean nullFile) {
byte[] file = null;
if (!nullFile) {
file = HELLO_WORLD.getBytes(UTF_8);
}
String fileName = GREETING_TXT;
return client.sendByteArrayAsBinaryFile(file, fileName);
}
@GET
@Path("/client/multi-byte-as-binary-file")
@Consumes(MediaType.TEXT_PLAIN)
@Produces(MediaType.TEXT_PLAIN)
@Blocking
public String sendMultiByte(@QueryParam("nullFile") @DefaultValue("false") boolean nullFile) {
MultipartClient.WithMultiByteAsBinaryFile data = new MultipartClient.WithMultiByteAsBinaryFile();
if (!nullFile) {
List<Byte> bytes = new ArrayList<>();
for (byte b : HELLO_WORLD.getBytes(UTF_8)) {
bytes.add(b);
}
data.file = Multi.createFrom().iterable(bytes);
}
data.fileName = GREETING_TXT;
return client.sendMultiByteAsBinaryFile(data);
}
@GET
@Path("/client/params/multi-byte-as-binary-file")
@Consumes(MediaType.TEXT_PLAIN)
@Produces(MediaType.TEXT_PLAIN)
@Blocking
public String sendMultiByteParams(@QueryParam("nullFile") @DefaultValue("false") boolean nullFile) {
Multi<Byte> file = null;
if (!nullFile) {
List<Byte> bytes = new ArrayList<>();
for (byte b : HELLO_WORLD.getBytes(UTF_8)) {
bytes.add(b);
}
file = Multi.createFrom().iterable(bytes);
}
String fileName = GREETING_TXT;
return client.sendMultiByteAsBinaryFile(file, fileName);
}
@GET
@Path("/client/buffer-as-binary-file")
@Consumes(MediaType.TEXT_PLAIN)
@Produces(MediaType.TEXT_PLAIN)
@Blocking
public String sendBuffer(@QueryParam("nullFile") @DefaultValue("false") boolean nullFile) {
MultipartClient.WithBufferAsBinaryFile data = new MultipartClient.WithBufferAsBinaryFile();
if (!nullFile) {
data.file = Buffer.buffer(HELLO_WORLD);
}
data.fileName = GREETING_TXT;
return client.sendBufferAsBinaryFile(data);
}
@GET
@Path("/client/params/buffer-as-binary-file")
@Consumes(MediaType.TEXT_PLAIN)
@Produces(MediaType.TEXT_PLAIN)
@Blocking
public String sendBufferParams(@QueryParam("nullFile") @DefaultValue("false") boolean nullFile) {
Buffer file = null;
if (!nullFile) {
file = Buffer.buffer(HELLO_WORLD);
}
String fileName = GREETING_TXT;
return client.sendBufferAsBinaryFile(file, fileName);
}
@GET
@Path("/client/file-as-binary-file")
@Consumes(MediaType.TEXT_PLAIN)
@Produces(MediaType.TEXT_PLAIN)
@Blocking
public String sendFileAsBinary(@QueryParam("nullFile") @DefaultValue("false") boolean nullFile) throws IOException {
MultipartClient.WithFileAsBinaryFile data = new MultipartClient.WithFileAsBinaryFile();
if (!nullFile) {
File tempFile = createTempHelloWorldFile();
data.file = tempFile;
}
data.fileName = GREETING_TXT;
return client.sendFileAsBinaryFile(data);
}
@GET
@Path("/client/params/file-as-binary-file")
@Consumes(MediaType.TEXT_PLAIN)
@Produces(MediaType.TEXT_PLAIN)
@Blocking
public String sendFileAsBinaryParams(@QueryParam("nullFile") @DefaultValue("false") boolean nullFile) throws IOException {
File file = null;
if (!nullFile) {
File tempFile = createTempHelloWorldFile();
file = tempFile;
}
String fileName = GREETING_TXT;
return client.sendFileAsBinaryFile(file, fileName);
}
@GET
@Path("/client/path-as-binary-file")
@Consumes(MediaType.TEXT_PLAIN)
@Produces(MediaType.TEXT_PLAIN)
@Blocking
public String sendPathAsBinary(@QueryParam("nullFile") @DefaultValue("false") boolean nullFile) throws IOException {
MultipartClient.WithPathAsBinaryFile data = new MultipartClient.WithPathAsBinaryFile();
if (!nullFile) {
File tempFile = createTempHelloWorldFile();
data.file = tempFile.toPath();
}
data.fileName = GREETING_TXT;
return client.sendPathAsBinaryFile(data);
}
@GET
@Path("/client/params/path-as-binary-file")
@Consumes(MediaType.TEXT_PLAIN)
@Produces(MediaType.TEXT_PLAIN)
@Blocking
public String sendPathAsBinaryParams(@QueryParam("nullFile") @DefaultValue("false") boolean nullFile) throws IOException {
java.nio.file.Path file = null;
if (!nullFile) {
File tempFile = createTempHelloWorldFile();
file = tempFile.toPath();
}
String fileName = GREETING_TXT;
return client.sendPathAsBinaryFile(file, fileName);
}
@GET
@Path("/client/byte-array-as-text-file")
@Consumes(MediaType.TEXT_PLAIN)
@Produces(MediaType.TEXT_PLAIN)
@Blocking
public String sendByteArrayAsTextFile() {
MultipartClient.WithByteArrayAsTextFile data = new MultipartClient.WithByteArrayAsTextFile();
data.file = HELLO_WORLD.getBytes(UTF_8);
data.number = NUMBER;
return client.sendByteArrayAsTextFile(data);
}
@GET
@Path("/client/params/byte-array-as-text-file")
@Consumes(MediaType.TEXT_PLAIN)
@Produces(MediaType.TEXT_PLAIN)
@Blocking
public String sendByteArrayAsTextFileParams() {
byte[] file = HELLO_WORLD.getBytes(UTF_8);
int number = NUMBER;
return client.sendByteArrayAsTextFile(file, number);
}
@GET
@Path("/client/buffer-as-text-file")
@Consumes(MediaType.TEXT_PLAIN)
@Produces(MediaType.TEXT_PLAIN)
@Blocking
public String sendBufferAsTextFile() {
MultipartClient.WithBufferAsTextFile data = new MultipartClient.WithBufferAsTextFile();
data.file = Buffer.buffer(HELLO_WORLD);
data.number = NUMBER;
return client.sendBufferAsTextFile(data);
}
@GET
@Path("/client/params/buffer-as-text-file")
@Consumes(MediaType.TEXT_PLAIN)
@Produces(MediaType.TEXT_PLAIN)
@Blocking
public String sendBufferAsTextFileParams() {
Buffer file = Buffer.buffer(HELLO_WORLD);
int number = NUMBER;
return client.sendBufferAsTextFile(file, number);
}
@GET
@Path("/client/file-as-text-file")
@Consumes(MediaType.TEXT_PLAIN)
@Produces(MediaType.TEXT_PLAIN)
@Blocking
public String sendFileAsText() throws IOException {
File tempFile = createTempHelloWorldFile();
MultipartClient.WithFileAsTextFile data = new MultipartClient.WithFileAsTextFile();
data.file = tempFile;
data.number = NUMBER;
return client.sendFileAsTextFile(data);
}
@GET
@Path("/client/params/file-as-text-file")
@Consumes(MediaType.TEXT_PLAIN)
@Produces(MediaType.TEXT_PLAIN)
@Blocking
public String sendFileAsTextParams() throws IOException {
File tempFile = createTempHelloWorldFile();
File file = tempFile;
int number = NUMBER;
return client.sendFileAsTextFile(file, number);
}
@GET
@Path("/client/path-as-text-file")
@Consumes(MediaType.TEXT_PLAIN)
@Produces(MediaType.TEXT_PLAIN)
@Blocking
public String sendPathAsText() throws IOException {
File tempFile = createTempHelloWorldFile();
MultipartClient.WithPathAsTextFile data = new MultipartClient.WithPathAsTextFile();
data.file = tempFile.toPath();
data.number = NUMBER;
return client.sendPathAsTextFile(data);
}
@GET
@Path("/client/params/path-as-text-file")
@Consumes(MediaType.TEXT_PLAIN)
@Produces(MediaType.TEXT_PLAIN)
@Blocking
public String sendPathAsTextParams() throws IOException {
File tempFile = createTempHelloWorldFile();
java.nio.file.Path file = tempFile.toPath();
int number = NUMBER;
return client.sendPathAsTextFile(file, number);
}
@POST
@Path("/echo/octet-stream")
@Consumes(MediaType.APPLICATION_OCTET_STREAM)
public String consumeOctetStream(File file) throws IOException {
return Files.readString(file.toPath());
}
@POST
@Path("/echo/binary")
@Consumes(MediaType.MULTIPART_FORM_DATA)
public String consumeMultipart(MultipartBodyWithBinaryFile body) {
return String.format("fileOk:%s,nameOk:%s", body.file == null ? "null" : containsHelloWorld(body.file),
GREETING_TXT.equals(body.fileName));
}
@POST
@Path("/echo/text")
@Consumes(MediaType.MULTIPART_FORM_DATA)
public String consumeText(MultipartBodyWithTextFile2 body) {
return String.format("fileOk:%s,numberOk:%s", containsHelloWorld(body.file),
NUMBER == Integer.parseInt(body.number[0]));
}
@POST
@Path("/echo/with-pojo")
@Consumes(MediaType.MULTIPART_FORM_DATA)
public String consumeBinaryWithPojo(MultipartBodyWithBinaryFileAndPojo fileWithPojo) {
return String.format("fileOk:%s,nameOk:%s,pojoOk:%s,uuidNull:%s",
containsHelloWorld(fileWithPojo.file),
GREETING_TXT.equals(fileWithPojo.fileName),
fileWithPojo.pojo == null ? "null"
: "some-name".equals(fileWithPojo.pojo.getName()) && "some-value".equals(fileWithPojo.pojo.getValue()),
fileWithPojo.uuid == null);
}
@GET
@Path("/produces/multipart")
@Produces(MediaType.MULTIPART_FORM_DATA)
public MultipartBodyWithTextFile produceMultipart() throws IOException {
File tempFile = createTempHelloWorldFile();
MultipartBodyWithTextFile data = new MultipartBodyWithTextFile();
data.file = tempFile;
data.number = String.valueOf(NUMBER);
return data;
}
@GET
@Path("/produces/input-stream-rest-response")
public RestResponse<? extends InputStream> produceInputStreamRestResponse() throws IOException {
File tempFile = createTempHelloWorldFile();
FileInputStream is = new FileInputStream(tempFile);
return RestResponse.ResponseBuilder
.ok(is)
.type(MediaType.TEXT_PLAIN_TYPE)
.build();
}
private File createTempHelloWorldFile() throws IOException {
File tempFile = File.createTempFile("quarkus-test", ".bin");
tempFile.deleteOnExit();
try (FileOutputStream fileOutputStream = new FileOutputStream(tempFile)) {
fileOutputStream.write(HELLO_WORLD.getBytes());
}
return tempFile;
}
private boolean containsHelloWorld(File file) {
try {
String actual = new String(Files.readAllBytes(file.toPath()));
return HELLO_WORLD.equals(actual);
} catch (IOException e) {
log.error("Failed to contents of uploaded file " + file.getAbsolutePath());
return false;
}
}
public static | MultipartResource |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/query/results/TypedQueryCreationTests.java | {
"start": 955,
"end": 7634
} | class ____ {
@BeforeEach
void prepareTestData(SessionFactoryScope sessions) {
sessions.inTransaction( (session) -> {
session.persist( new SimpleEntity( 1, "first", new SimpleComposite( "value1", "value2" ) ) );
} );
}
@AfterEach
public void dropTestData(SessionFactoryScope scope) {
scope.getSessionFactory().getSchemaManager().truncate();
}
@Test
void testCreateQuery(SessionFactoryScope sessions) {
sessions.inTransaction( (session) -> {
final SimpleEntity rtn = session.createQuery( Queries.ENTITY, SimpleEntity.class ).getSingleResultOrNull();
assertThat( rtn ).isNotNull();
assertThat( rtn.id ).isEqualTo( 1 );
} );
sessions.inTransaction( (session) -> {
final SimpleEntity rtn = session.createQuery( Queries.ENTITY_NO_SELECT, SimpleEntity.class ).getSingleResultOrNull();
assertThat( rtn ).isNotNull();
assertThat( rtn.id ).isEqualTo( 1 );
} );
sessions.inTransaction( (session) -> {
final SimpleComposite rtn = session.createQuery( Queries.COMPOSITE, SimpleComposite.class ).getSingleResultOrNull();
assertThat( rtn ).isNotNull();
assertThat( rtn.value1 ).isEqualTo( "value1" );
assertThat( rtn.value2 ).isEqualTo( "value2" );
} );
sessions.inTransaction( (session) -> {
final Dto rtn = session.createQuery( Queries.ID_NAME_DTO, Dto.class ).getSingleResultOrNull();
assertThat( rtn ).isNotNull();
assertThat( rtn.getKey() ).isEqualTo( 1 );
assertThat( rtn.getText() ).isEqualTo( "first" );
} );
sessions.inTransaction( (session) -> {
final Dto rtn = session.createQuery( Queries.ID_COMP_VAL_DTO, Dto.class ).getSingleResultOrNull();
assertThat( rtn ).isNotNull();
assertThat( rtn.getKey() ).isEqualTo( 1 );
assertThat( rtn.getText() ).isEqualTo( "value1" );
} );
sessions.inTransaction( (session) -> {
final String rtn = session.createQuery( Queries.NAME, String.class ).getSingleResultOrNull();
assertThat( rtn ).isNotNull();
assertThat( rtn ).isEqualTo( "first" );
} );
sessions.inTransaction( (session) -> {
final String rtn = session.createQuery( Queries.COMP_VAL, String.class ).getSingleResultOrNull();
assertThat( rtn ).isNotNull();
assertThat( rtn ).isEqualTo( "value1" );
} );
}
@Test
void testCreateSelectionQuery(SessionFactoryScope sessions) {
sessions.inTransaction( (session) -> {
final SimpleEntity rtn = session.createSelectionQuery( Queries.ENTITY, SimpleEntity.class ).getSingleResultOrNull();
assertThat( rtn ).isNotNull();
assertThat( rtn.id ).isEqualTo( 1 );
} );
sessions.inTransaction( (session) -> {
final SimpleEntity rtn = session.createSelectionQuery( Queries.ENTITY_NO_SELECT, SimpleEntity.class ).getSingleResultOrNull();
assertThat( rtn ).isNotNull();
assertThat( rtn.id ).isEqualTo( 1 );
} );
sessions.inTransaction( (session) -> {
final SimpleComposite rtn = session.createSelectionQuery( Queries.COMPOSITE, SimpleComposite.class ).getSingleResultOrNull();
assertThat( rtn ).isNotNull();
assertThat( rtn.value1 ).isEqualTo( "value1" );
assertThat( rtn.value2 ).isEqualTo( "value2" );
} );
sessions.inTransaction( (session) -> {
final Dto rtn = session.createSelectionQuery( Queries.ID_NAME_DTO, Dto.class ).getSingleResultOrNull();
assertThat( rtn ).isNotNull();
assertThat( rtn.getKey() ).isEqualTo( 1 );
assertThat( rtn.getText() ).isEqualTo( "first" );
} );
sessions.inTransaction( (session) -> {
final Dto rtn = session.createSelectionQuery( Queries.ID_COMP_VAL_DTO, Dto.class ).getSingleResultOrNull();
assertThat( rtn ).isNotNull();
assertThat( rtn.getKey() ).isEqualTo( 1 );
assertThat( rtn.getText() ).isEqualTo( "value1" );
} );
sessions.inTransaction( (session) -> {
final String rtn = session.createSelectionQuery( Queries.NAME, String.class ).getSingleResultOrNull();
assertThat( rtn ).isNotNull();
assertThat( rtn ).isEqualTo( "first" );
} );
sessions.inTransaction( (session) -> {
final String rtn = session.createSelectionQuery( Queries.COMP_VAL, String.class ).getSingleResultOrNull();
assertThat( rtn ).isNotNull();
assertThat( rtn ).isEqualTo( "value1" );
} );
}
@Test
void testCreateNamedQuery(SessionFactoryScope sessions) {
sessions.inTransaction( (session) -> {
final SimpleEntity rtn = session.createNamedQuery( Queries.NAMED_ENTITY, SimpleEntity.class ).getSingleResultOrNull();
assertThat( rtn ).isNotNull();
assertThat( rtn.id ).isEqualTo( 1 );
} );
sessions.inTransaction( (session) -> {
final SimpleEntity rtn = session.createNamedQuery( Queries.NAMED_ENTITY_NO_SELECT, SimpleEntity.class ).getSingleResultOrNull();
assertThat( rtn ).isNotNull();
assertThat( rtn.id ).isEqualTo( 1 );
} );
sessions.inTransaction( (session) -> {
final SimpleComposite rtn = session.createNamedQuery( Queries.NAMED_COMPOSITE, SimpleComposite.class ).getSingleResultOrNull();
assertThat( rtn ).isNotNull();
assertThat( rtn.value1 ).isEqualTo( "value1" );
assertThat( rtn.value2 ).isEqualTo( "value2" );
} );
sessions.inTransaction( (session) -> {
final Dto rtn = session.createNamedQuery( Queries.NAMED_ID_NAME_DTO, Dto.class ).getSingleResultOrNull();
assertThat( rtn ).isNotNull();
assertThat( rtn.getKey() ).isEqualTo( 1 );
assertThat( rtn.getText() ).isEqualTo( "first" );
} );
sessions.inTransaction( (session) -> {
final Dto rtn = session.createNamedQuery( Queries.NAMED_ID_COMP_VAL_DTO, Dto.class ).getSingleResultOrNull();
assertThat( rtn ).isNotNull();
assertThat( rtn.getKey() ).isEqualTo( 1 );
assertThat( rtn.getText() ).isEqualTo( "value1" );
} );
sessions.inTransaction( (session) -> {
final String rtn = session.createNamedQuery( Queries.NAMED_NAME, String.class ).getSingleResultOrNull();
assertThat( rtn ).isNotNull();
assertThat( rtn ).isEqualTo( "first" );
} );
sessions.inTransaction( (session) -> {
final String rtn = session.createNamedQuery( Queries.NAMED_COMP_VAL, String.class ).getSingleResultOrNull();
assertThat( rtn ).isNotNull();
assertThat( rtn ).isEqualTo( "value1" );
} );
}
@Test
void testCriteria(SessionFactoryScope sessions) {
sessions.inTransaction( (session) -> {
final HibernateCriteriaBuilder criteriaBuilder = session.getCriteriaBuilder();
final JpaCriteriaQuery<SimpleEntity> criteria = criteriaBuilder.createQuery( SimpleEntity.class );
final JpaRoot<SimpleEntity> root = criteria.from( SimpleEntity.class );
criteria.select( root );
final SimpleEntity rtn = session.createQuery( criteria ).getSingleResultOrNull();
assertThat( rtn ).isNotNull();
assertThat( rtn.id ).isEqualTo( 1 );
} );
}
}
| TypedQueryCreationTests |
java | spring-projects__spring-boot | core/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/info/ProjectInfoProperties.java | {
"start": 1356,
"end": 1967
} | class ____ {
/**
* Location of the generated build-info.properties file.
*/
private Resource location = new ClassPathResource("META-INF/build-info.properties");
/**
* File encoding.
*/
private Charset encoding = StandardCharsets.UTF_8;
public Resource getLocation() {
return this.location;
}
public void setLocation(Resource location) {
this.location = location;
}
public Charset getEncoding() {
return this.encoding;
}
public void setEncoding(Charset encoding) {
this.encoding = encoding;
}
}
/**
* Git specific info properties.
*/
public static | Build |
java | apache__camel | test-infra/camel-test-infra-azure-common/src/main/java/org/apache/camel/test/infra/azure/common/AzureCredentialsHolder.java | {
"start": 862,
"end": 951
} | interface ____ {
String accountName();
String accountKey();
}
| AzureCredentialsHolder |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.