language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | spring-projects__spring-framework | spring-context-support/src/main/java/org/springframework/ui/freemarker/FreeMarkerConfigurationFactoryBean.java | {
"start": 1846,
"end": 2190
} | class ____ configuration
* details.
*
* <p>Note: Spring's FreeMarker support requires FreeMarker 2.3.33 or higher.
*
* @author Darren Davison
* @since 03.03.2004
* @see #setConfigLocation
* @see #setFreemarkerSettings
* @see #setTemplateLoaderPath
* @see org.springframework.web.servlet.view.freemarker.FreeMarkerConfigurer
*/
public | for |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/security/oauthbearer/internals/secured/RefreshingHttpsJwksVerificationKeyResolver.java | {
"start": 3005,
"end": 5747
} | class ____ implements CloseableVerificationKeyResolver {
private static final Logger log = LoggerFactory.getLogger(RefreshingHttpsJwksVerificationKeyResolver.class);
private final RefreshingHttpsJwks refreshingHttpsJwks;
private final VerificationJwkSelector verificationJwkSelector;
private boolean isInitialized;
public RefreshingHttpsJwksVerificationKeyResolver(RefreshingHttpsJwks refreshingHttpsJwks) {
this.refreshingHttpsJwks = refreshingHttpsJwks;
this.verificationJwkSelector = new VerificationJwkSelector();
}
@Override
public void configure(Map<String, ?> configs, String saslMechanism, List<AppConfigurationEntry> jaasConfigEntries) {
try {
log.debug("configure started");
refreshingHttpsJwks.init();
} catch (IOException e) {
throw new KafkaException(e);
} finally {
isInitialized = true;
}
}
@Override
public void close() {
try {
log.debug("close started");
refreshingHttpsJwks.close();
} finally {
log.debug("close completed");
}
}
@Override
public Key resolveKey(JsonWebSignature jws, List<JsonWebStructure> nestingContext) throws UnresolvableKeyException {
if (!isInitialized)
throw new IllegalStateException("Please call configure() first");
try {
List<JsonWebKey> jwks = refreshingHttpsJwks.getJsonWebKeys();
JsonWebKey jwk = verificationJwkSelector.select(jws, jwks);
if (jwk != null)
return jwk.getKey();
String keyId = jws.getKeyIdHeaderValue();
if (refreshingHttpsJwks.maybeExpediteRefresh(keyId))
log.debug("Refreshing JWKs from {} as no suitable verification key for JWS w/ header {} was found in {}", refreshingHttpsJwks.getLocation(), jws.getHeaders().getFullHeaderAsJsonString(), jwks);
String sb = "Unable to find a suitable verification key for JWS w/ header " + jws.getHeaders().getFullHeaderAsJsonString() +
" from JWKs " + jwks + " obtained from " +
refreshingHttpsJwks.getLocation();
throw new UnresolvableKeyException(sb);
} catch (JoseException | IOException e) {
String sb = "Unable to find a suitable verification key for JWS w/ header " + jws.getHeaders().getFullHeaderAsJsonString() +
" due to an unexpected exception (" + e + ") while obtaining or using keys from JWKS endpoint at " +
refreshingHttpsJwks.getLocation();
throw new UnresolvableKeyException(sb, e);
}
}
}
| RefreshingHttpsJwksVerificationKeyResolver |
java | elastic__elasticsearch | test/framework/src/main/java/org/elasticsearch/cli/MockTerminal.java | {
"start": 907,
"end": 1054
} | class ____ extends Terminal {
/**
* A ByteArrayInputStream that has its bytes set after construction.
*/
private static | MockTerminal |
java | apache__kafka | streams/src/test/java/org/apache/kafka/streams/kstream/internals/KStreamSlidingWindowAggregateTest.java | {
"start": 95493,
"end": 97081
} | class ____ extends InMemoryWindowStore {
InOrderMemoryWindowStore(final String name,
final long retentionPeriod,
final long windowSize,
final boolean retainDuplicates,
final String metricScope) {
super(name, retentionPeriod, windowSize, retainDuplicates, metricScope);
}
@Override
public WindowStoreIterator<byte[]> backwardFetch(final Bytes key, final long timeFrom, final long timeTo) {
throw new UnsupportedOperationException("Backward fetch not supported here");
}
@Override
public KeyValueIterator<Windowed<Bytes>, byte[]> backwardFetch(final Bytes keyFrom,
final Bytes keyTo,
final long timeFrom,
final long timeTo) {
throw new UnsupportedOperationException("Backward fetch not supported here");
}
@Override
public KeyValueIterator<Windowed<Bytes>, byte[]> backwardFetchAll(final long timeFrom, final long timeTo) {
throw new UnsupportedOperationException("Backward fetch not supported here");
}
@Override
public KeyValueIterator<Windowed<Bytes>, byte[]> backwardAll() {
throw new UnsupportedOperationException("Backward fetch not supported here");
}
}
private static | InOrderMemoryWindowStore |
java | dropwizard__dropwizard | dropwizard-hibernate/src/test/java/io/dropwizard/hibernate/ScanningHibernateBundleTest.java | {
"start": 129,
"end": 852
} | class ____ {
@Test
void testFindEntityClassesFromDirectory() {
String packageWithEntities = "io.dropwizard.hibernate.fake.entities.pckg";
assertThat(ScanningHibernateBundle.findEntityClassesFromDirectory(new String[]{packageWithEntities}))
.hasSize(4);
}
@Test
void testFindEntityClassesFromMultipleDirectories() {
String packageWithEntities = "io.dropwizard.hibernate.fake.entities.pckg";
String packageWithEntities2 = "io.dropwizard.hibernate.fake2.entities.pckg";
assertThat(ScanningHibernateBundle.findEntityClassesFromDirectory(new String[]{packageWithEntities, packageWithEntities2}))
.hasSize(8);
}
}
| ScanningHibernateBundleTest |
java | quarkusio__quarkus | integration-tests/gradle/src/test/java/io/quarkus/gradle/FastJarFormatWorksTest.java | {
"start": 532,
"end": 2005
} | class ____ extends QuarkusGradleWrapperTestBase {
@Test
public void testFastJarFormatWorks() throws Exception {
final File projectDir = getProjectDir("test-that-fast-jar-format-works");
runGradleWrapper(projectDir, "clean", "build");
final Path quarkusApp = projectDir.toPath().resolve("build").resolve("quarkus-app");
assertThat(quarkusApp).exists();
Path jar = quarkusApp.resolve("quarkus-run.jar");
assertThat(jar).exists();
File output = new File(projectDir, "build/output.log");
output.createNewFile();
DevModeClient devModeClient = new DevModeClient();
Process process = launch(jar, output);
try {
//Wait until server up
dumpFileContentOnFailure(() -> {
await()
.pollDelay(1, TimeUnit.SECONDS)
.atMost(1, TimeUnit.MINUTES)
.until(() -> devModeClient.isCode("/hello", 200));
return null;
}, output, ConditionTimeoutException.class);
String logs = FileUtils.readFileToString(output, "UTF-8");
assertThat(logs).contains("INFO").contains("cdi, resteasy");
// test that the application name and version are properly set
assertThat(devModeClient.getHttpResponse("/hello")).isEqualTo("hello");
} finally {
process.destroy();
}
}
}
| FastJarFormatWorksTest |
java | redisson__redisson | redisson/src/main/java/org/redisson/client/protocol/decoder/ClusterNodesDecoder.java | {
"start": 1081,
"end": 3790
} | class ____ implements Decoder<List<ClusterNodeInfo>> {
private final String scheme;
public ClusterNodesDecoder(String scheme) {
super();
this.scheme = scheme;
}
@Override
public List<ClusterNodeInfo> decode(ByteBuf buf, State state) throws IOException {
String response = buf.toString(CharsetUtil.UTF_8);
List<ClusterNodeInfo> nodes = new ArrayList<>();
for (String nodeInfo : response.split("\n")) {
ClusterNodeInfo node = new ClusterNodeInfo(nodeInfo);
String[] params = nodeInfo.split(" ");
String nodeId = params[0];
node.setNodeId(nodeId);
String flags = params[2];
for (String flag : flags.split(",")) {
for (Flag nodeInfoFlag : ClusterNodeInfo.Flag.values()) {
if (nodeInfoFlag.getValue().equalsIgnoreCase(flag)) {
node.addFlag(nodeInfoFlag);
break;
}
}
}
if (!node.containsFlag(Flag.NOADDR)) {
String[] parts = params[1].split(",");
String uri = createUri(parts[0]);
if (uri == null) {
continue;
}
node.setAddress(uri);
if (parts.length == 2) {
node.setHostName(parts[1]);
}
}
String slaveOf = params[3];
if (!"-".equals(slaveOf)) {
node.setSlaveOf(slaveOf);
}
if (params.length > 8) {
for (int i = 0; i < params.length - 8; i++) {
String slots = params[i + 8];
if (slots.contains("-<-") || slots.contains("->-")) {
continue;
}
String[] parts = slots.split("-");
if (parts.length == 1) {
node.addSlotRange(new ClusterSlotRange(Integer.valueOf(parts[0]), Integer.valueOf(parts[0])));
} else if (parts.length == 2) {
node.addSlotRange(new ClusterSlotRange(Integer.valueOf(parts[0]), Integer.valueOf(parts[1])));
}
}
}
nodes.add(node);
}
return nodes;
}
private String createUri(String part) {
String addr = part.split("@")[0];
String name = addr.substring(0, addr.lastIndexOf(":"));
if (name.isEmpty()) {
// skip nodes with empty address
return null;
}
return scheme + "://" + addr;
}
}
| ClusterNodesDecoder |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/synonyms/DeleteSynonymsAction.java | {
"start": 965,
"end": 1274
} | class ____ extends ActionType<AcknowledgedResponse> {
public static final DeleteSynonymsAction INSTANCE = new DeleteSynonymsAction();
public static final String NAME = "cluster:admin/synonyms/delete";
public DeleteSynonymsAction() {
super(NAME);
}
public static | DeleteSynonymsAction |
java | elastic__elasticsearch | x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java | {
"start": 11426,
"end": 16257
} | class ____ {
public static final Literal ONE = new Literal(Source.EMPTY, 1, DataType.INTEGER);
public static final Literal TWO = new Literal(Source.EMPTY, 2, DataType.INTEGER);
public static final Literal THREE = new Literal(Source.EMPTY, 3, DataType.INTEGER);
public static final Literal FOUR = new Literal(Source.EMPTY, 4, DataType.INTEGER);
public static final Literal FIVE = new Literal(Source.EMPTY, 5, DataType.INTEGER);
public static final Literal SIX = new Literal(Source.EMPTY, 6, DataType.INTEGER);
public static Equals equalsOf(Expression left, Expression right) {
return new Equals(EMPTY, left, right, null);
}
public static LessThan lessThanOf(Expression left, Expression right) {
return new LessThan(EMPTY, left, right, null);
}
public static GreaterThan greaterThanOf(Expression left, Expression right) {
return new GreaterThan(EMPTY, left, right, ESTestCase.randomZone());
}
public static NotEquals notEqualsOf(Expression left, Expression right) {
return new NotEquals(EMPTY, left, right, ESTestCase.randomZone());
}
public static LessThanOrEqual lessThanOrEqualOf(Expression left, Expression right) {
return new LessThanOrEqual(EMPTY, left, right, ESTestCase.randomZone());
}
public static GreaterThanOrEqual greaterThanOrEqualOf(Expression left, Expression right) {
return new GreaterThanOrEqual(EMPTY, left, right, ESTestCase.randomZone());
}
public static FieldAttribute findFieldAttribute(LogicalPlan plan, String name) {
return findFieldAttribute(plan, name, (unused) -> true);
}
public static FieldAttribute findFieldAttribute(LogicalPlan plan, String name, Predicate<EsRelation> inThisRelation) {
Holder<FieldAttribute> result = new Holder<>();
plan.forEachDown(EsRelation.class, relation -> {
if (inThisRelation.test(relation) == false) {
return;
}
for (Attribute attr : relation.output()) {
if (attr.name().equals(name)) {
assertNull("Multiple matching field attributes found", result.get());
result.set((FieldAttribute) attr);
return;
}
}
});
return result.get();
}
public static FieldAttribute getFieldAttribute() {
return getFieldAttribute("a");
}
public static FieldAttribute getFieldAttribute(String name) {
return getFieldAttribute(name, INTEGER);
}
public static FieldAttribute getFieldAttribute(String name, DataType dataType) {
return new FieldAttribute(EMPTY, name, new EsField(name + "f", dataType, emptyMap(), true, EsField.TimeSeriesFieldType.NONE));
}
public static FieldAttribute fieldAttribute() {
return fieldAttribute(randomAlphaOfLength(10), randomFrom(DataType.types()));
}
public static FieldAttribute fieldAttribute(String name, DataType type) {
return new FieldAttribute(EMPTY, name, new EsField(name, type, emptyMap(), randomBoolean(), EsField.TimeSeriesFieldType.NONE));
}
public static Literal of(Object value) {
return of(Source.EMPTY, value);
}
/**
* Utility method for creating 'in-line' Literals (out of values instead of expressions).
*/
public static Literal of(Source source, Object value) {
if (value instanceof Literal) {
return (Literal) value;
}
var dataType = DataType.fromJava(value);
if (value instanceof String) {
value = BytesRefs.toBytesRef(value);
}
return new Literal(source, value, dataType);
}
public static ReferenceAttribute referenceAttribute(String name, DataType type) {
return new ReferenceAttribute(EMPTY, name, type);
}
public static Alias alias(String name, Expression child) {
return new Alias(EMPTY, name, child);
}
public static Mul mul(Expression left, Expression right) {
return new Mul(EMPTY, left, right);
}
public static Range rangeOf(Expression value, Expression lower, boolean includeLower, Expression upper, boolean includeUpper) {
return new Range(EMPTY, value, lower, includeLower, upper, includeUpper, randomZone());
}
public static EsRelation relation() {
return relation(IndexMode.STANDARD);
}
public static EsRelation relation(IndexMode mode) {
return new EsRelation(EMPTY, randomIdentifier(), mode, Map.of(), Map.of(), Map.of(), List.of());
}
/**
* This version of SearchStats always returns true for all fields for all boolean methods.
* For custom behaviour either use {@link TestConfigurableSearchStats} or override the specific methods.
*/
public static | EsqlTestUtils |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/annotation/AnnotationBeanNameGeneratorTests.java | {
"start": 7440,
"end": 7698
} | interface ____ {
// This is intentionally convention-based. Please do not add @AliasFor.
// See gh-31093.
String value() default "";
}
@ConventionBasedComponent1("myComponent")
@ConventionBasedComponent2("myComponent")
static | ConventionBasedComponent2 |
java | elastic__elasticsearch | libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PathLookup.java | {
"start": 706,
"end": 819
} | interface ____ {
Class<?> DEFAULT_FILESYSTEM_CLASS = PathUtils.getDefaultFileSystem().getClass();
| PathLookup |
java | apache__maven | impl/maven-cli/src/main/java/org/apache/maven/cling/invoker/mvnenc/goals/Init.java | {
"start": 2202,
"end": 12254
} | class ____ extends InteractiveGoalSupport {
private static final String NONE = "__none__";
@Inject
public Init(MessageBuilderFactory messageBuilderFactory, SecDispatcher secDispatcher) {
super(messageBuilderFactory, secDispatcher);
}
@SuppressWarnings("MethodLength")
@Override
public int doExecute(EncryptContext context) throws Exception {
boolean force = context.options().force().orElse(false);
boolean yes = context.options().yes().orElse(false);
if (configExists() && !force) {
context.logger.error(messageBuilderFactory
.builder()
.error("Error: configuration exist. Use --force if you want to reset existing configuration.")
.build());
return BAD_OPERATION;
}
context.addInHeader(context.style.italic().bold().foreground(Colors.rgbColor("yellow")), "goal: init");
context.addInHeader("");
ConsolePrompt.UiConfig promptConfig;
if (OSUtils.IS_WINDOWS) {
promptConfig = new ConsolePrompt.UiConfig(">", "( )", "(x)", "( )");
} else {
promptConfig = new ConsolePrompt.UiConfig("❯", "◯ ", "◉ ", "◯ ");
}
promptConfig.setCancellableFirstPrompt(true);
SettingsSecurity config = secDispatcher.readConfiguration(true);
// reset config
config.setDefaultDispatcher(null);
config.getConfigurations().clear();
Map<String, PromptResultItemIF> dispatcherResult;
Map<String, PromptResultItemIF> dispatcherConfigResult;
Map<String, PromptResultItemIF> confirmChoice;
ConsolePrompt prompt = new ConsolePrompt(context.reader, context.terminal, promptConfig);
dispatcherResult = prompt.prompt(
context.header, dispatcherPrompt(prompt.getPromptBuilder()).build());
if (dispatcherResult.isEmpty()) {
throw new InterruptedException();
}
if (NONE.equals(dispatcherResult.get("defaultDispatcher").getResult())) {
context.terminal
.writer()
.println(messageBuilderFactory
.builder()
.warning(
"Maven4 SecDispatcher disabled; Maven3 fallback may still work, use `mvnenc diag` to check")
.build());
} else {
config.setDefaultDispatcher(
dispatcherResult.get("defaultDispatcher").getResult());
DispatcherMeta meta = secDispatcher.availableDispatchers().stream()
.filter(d -> Objects.equals(config.getDefaultDispatcher(), d.name()))
.findFirst()
.orElseThrow();
if (!meta.fields().isEmpty()) {
dispatcherConfigResult = prompt.prompt(
context.header,
configureDispatcher(context, meta, prompt.getPromptBuilder())
.build());
if (dispatcherConfigResult.isEmpty()) {
throw new InterruptedException();
}
List<Map.Entry<String, PromptResultItemIF>> editables = dispatcherConfigResult.entrySet().stream()
.filter(e -> e.getValue().getResult().contains("$"))
.toList();
if (!editables.isEmpty()) {
context.addInHeader("");
context.addInHeader("Please customize the editable value:");
Map<String, PromptResultItemIF> editMap;
for (Map.Entry<String, PromptResultItemIF> editable : editables) {
String template = editable.getValue().getResult();
editMap = prompt.prompt(
context.header,
prompt.getPromptBuilder()
.createInputPrompt()
.name("edit")
.message(template)
.addPrompt()
.build());
if (editMap.isEmpty()) {
throw new InterruptedException();
}
dispatcherConfigResult.put(editable.getKey(), editMap.get("edit"));
}
}
Config dispatcherConfig = new Config();
dispatcherConfig.setName(meta.name());
for (DispatcherMeta.Field field : meta.fields()) {
ConfigProperty property = new ConfigProperty();
property.setName(field.getKey());
property.setValue(dispatcherConfigResult.get(field.getKey()).getResult());
dispatcherConfig.addProperty(property);
}
if (!dispatcherConfig.getProperties().isEmpty()) {
config.addConfiguration(dispatcherConfig);
}
}
}
if (yes) {
secDispatcher.writeConfiguration(config);
} else {
context.addInHeader("");
context.addInHeader("Values set:");
context.addInHeader("defaultDispatcher=" + config.getDefaultDispatcher());
for (Config c : config.getConfigurations()) {
context.addInHeader(" dispatcherName=" + c.getName());
for (ConfigProperty cp : c.getProperties()) {
context.addInHeader(" " + cp.getName() + "=" + cp.getValue());
}
}
confirmChoice = prompt.prompt(
context.header, confirmPrompt(prompt.getPromptBuilder()).build());
ConfirmResult confirm = (ConfirmResult) confirmChoice.get("confirm");
if (confirm.getConfirmed() == ConfirmChoice.ConfirmationValue.YES) {
context.terminal
.writer()
.println(messageBuilderFactory
.builder()
.info("Writing out the configuration...")
.build());
secDispatcher.writeConfiguration(config);
} else {
context.terminal
.writer()
.println(messageBuilderFactory
.builder()
.warning("Values not accepted; not saving configuration.")
.build());
return CANCELED;
}
}
return OK;
}
protected PromptBuilder confirmPrompt(PromptBuilder promptBuilder) {
promptBuilder
.createConfirmPromp()
.name("confirm")
.message("Are values above correct?")
.defaultValue(ConfirmChoice.ConfirmationValue.YES)
.addPrompt();
return promptBuilder;
}
protected PromptBuilder dispatcherPrompt(PromptBuilder promptBuilder) {
ListPromptBuilder listPromptBuilder = promptBuilder
.createListPrompt()
.name("defaultDispatcher")
.message("Which dispatcher you want to use as default?");
listPromptBuilder
.newItem()
.name(NONE)
.text("None (disable MavenSecDispatcher)")
.add();
for (DispatcherMeta meta : secDispatcher.availableDispatchers()) {
if (!meta.isHidden()) {
listPromptBuilder
.newItem()
.name(meta.name())
.text(meta.displayName())
.add();
}
}
listPromptBuilder.addPrompt();
return promptBuilder;
}
private PromptBuilder configureDispatcher(
EncryptContext context, DispatcherMeta dispatcherMeta, PromptBuilder promptBuilder) throws Exception {
context.addInHeader(
context.style.italic().bold().foreground(Colors.rgbColor("yellow")),
"Configure " + dispatcherMeta.displayName());
context.addInHeader("");
for (DispatcherMeta.Field field : dispatcherMeta.fields()) {
String fieldKey = field.getKey();
String fieldDescription = "Configure " + fieldKey + ": " + field.getDescription();
if (field.getOptions().isPresent()) {
// list options
ListPromptBuilder listPromptBuilder =
promptBuilder.createListPrompt().name(fieldKey).message(fieldDescription);
for (DispatcherMeta.Field option : field.getOptions().get()) {
listPromptBuilder
.newItem()
.name(
option.getDefaultValue().isPresent()
? option.getDefaultValue().get()
: option.getKey())
.text(option.getDescription())
.add();
}
listPromptBuilder.addPrompt();
} else if (field.getDefaultValue().isPresent()) {
// input w/ def value
promptBuilder
.createInputPrompt()
.name(fieldKey)
.message(fieldDescription)
.defaultValue(field.getDefaultValue().get())
.addPrompt();
} else {
// ? plain input?
promptBuilder
.createInputPrompt()
.name(fieldKey)
.message(fieldDescription)
.addPrompt();
}
}
return promptBuilder;
}
}
| Init |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/service/launcher/testservices/StoppingInStartLaunchableService.java | {
"start": 1145,
"end": 1738
} | class ____
extends AbstractLaunchableService {
public static final String NAME =
"org.apache.hadoop.service.launcher.testservices.StoppingInStartLaunchableService";
public StoppingInStartLaunchableService(String name) {
super(name);
}
@Override
protected void serviceStart() throws Exception {
super.serviceStart();
stop();
}
@Override
public int execute() throws Exception {
throw new ServiceLaunchException(
LauncherExitCodes.EXIT_SERVICE_LIFECYCLE_EXCEPTION,
"Should not have been executed");
}
}
| StoppingInStartLaunchableService |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-csi/src/test/java/org/apache/hadoop/yarn/csi/adaptor/TestCsiAdaptorService.java | {
"start": 3747,
"end": 18370
} | interface ____ extends CsiAdaptorPlugin {
default void init(String driverName, Configuration conf)
throws YarnException {
return;
}
default String getDriverName() {
return null;
}
default GetPluginInfoResponse getPluginInfo(GetPluginInfoRequest request)
throws YarnException, IOException {
return null;
}
default ValidateVolumeCapabilitiesResponse validateVolumeCapacity(
ValidateVolumeCapabilitiesRequest request) throws YarnException,
IOException {
return null;
}
default NodePublishVolumeResponse nodePublishVolume(
NodePublishVolumeRequest request) throws YarnException, IOException {
return null;
}
default NodeUnpublishVolumeResponse nodeUnpublishVolume(
NodeUnpublishVolumeRequest request) throws YarnException, IOException{
return null;
}
}
@Test
void testValidateVolume() throws IOException, YarnException {
ServerSocket ss = new ServerSocket(0);
ss.close();
InetSocketAddress address = new InetSocketAddress(ss.getLocalPort());
Configuration conf = new Configuration();
conf.setSocketAddr(
YarnConfiguration.NM_CSI_ADAPTOR_PREFIX + "test-driver.address",
address);
conf.set(
YarnConfiguration.NM_CSI_DRIVER_PREFIX + "test-driver.endpoint",
"unix:///tmp/test-driver.sock");
// inject a fake CSI adaptor
// this client validates if the ValidateVolumeCapabilitiesRequest
// is integrity, and then reply a fake response
CsiAdaptorPlugin plugin = new FakeCsiAdaptor() {
@Override
public String getDriverName() {
return "test-driver";
}
@Override
public GetPluginInfoResponse getPluginInfo(GetPluginInfoRequest request) {
return GetPluginInfoResponse.newInstance("test-plugin", "0.1");
}
@Override
public ValidateVolumeCapabilitiesResponse validateVolumeCapacity(
ValidateVolumeCapabilitiesRequest request) throws YarnException,
IOException {
// validate we get all info from the request
assertEquals("volume-id-0000123", request.getVolumeId());
assertEquals(1, request.getVolumeCapabilities().size());
assertEquals(Csi.VolumeCapability.AccessMode
.newBuilder().setModeValue(5).build().getMode().name(),
request.getVolumeCapabilities().get(0).getAccessMode().name());
assertEquals(2, request.getVolumeCapabilities().get(0)
.getMountFlags().size());
assertTrue(request.getVolumeCapabilities().get(0)
.getMountFlags().contains("mountFlag1"));
assertTrue(request.getVolumeCapabilities().get(0)
.getMountFlags().contains("mountFlag2"));
assertEquals(2, request.getVolumeAttributes().size());
assertEquals("v1", request.getVolumeAttributes().get("k1"));
assertEquals("v2", request.getVolumeAttributes().get("k2"));
// return a fake result
return ValidateVolumeCapabilitiesResponse
.newInstance(false, "this is a test");
}
};
CsiAdaptorProtocolService service =
new CsiAdaptorProtocolService(plugin);
service.init(conf);
service.start();
try (CsiAdaptorProtocolPBClientImpl client =
new CsiAdaptorProtocolPBClientImpl(1L, address, new Configuration())) {
ValidateVolumeCapabilitiesRequest request =
ValidateVolumeCapabilitiesRequestPBImpl
.newInstance("volume-id-0000123",
ImmutableList.of(
new ValidateVolumeCapabilitiesRequest
.VolumeCapability(
MULTI_NODE_MULTI_WRITER, FILE_SYSTEM,
ImmutableList.of("mountFlag1", "mountFlag2"))),
ImmutableMap.of("k1", "v1", "k2", "v2"));
ValidateVolumeCapabilitiesResponse response = client
.validateVolumeCapacity(request);
assertEquals(false, response.isSupported());
assertEquals("this is a test", response.getResponseMessage());
} finally {
service.stop();
}
}
@Test
void testValidateVolumeWithNMProxy() throws Exception {
ServerSocket ss = new ServerSocket(0);
ss.close();
InetSocketAddress address = new InetSocketAddress(ss.getLocalPort());
Configuration conf = new Configuration();
conf.setSocketAddr(
YarnConfiguration.NM_CSI_ADAPTOR_PREFIX + "test-driver.address",
address);
conf.set(
YarnConfiguration.NM_CSI_DRIVER_PREFIX + "test-driver.endpoint",
"unix:///tmp/test-driver.sock");
// inject a fake CSI adaptor
// this client validates if the ValidateVolumeCapabilitiesRequest
// is integrity, and then reply a fake response
FakeCsiAdaptor plugin = new FakeCsiAdaptor() {
@Override
public String getDriverName() {
return "test-driver";
}
@Override
public GetPluginInfoResponse getPluginInfo(
GetPluginInfoRequest request) throws YarnException, IOException {
return GetPluginInfoResponse.newInstance("test-plugin", "0.1");
}
@Override
public ValidateVolumeCapabilitiesResponse validateVolumeCapacity(
ValidateVolumeCapabilitiesRequest request)
throws YarnException, IOException {
// validate we get all info from the request
assertEquals("volume-id-0000123", request.getVolumeId());
assertEquals(1, request.getVolumeCapabilities().size());
assertEquals(
Csi.VolumeCapability.AccessMode.newBuilder().setModeValue(5)
.build().getMode().name(),
request.getVolumeCapabilities().get(0).getAccessMode().name());
assertEquals(2,
request.getVolumeCapabilities().get(0).getMountFlags().size());
assertTrue(request.getVolumeCapabilities().get(0).getMountFlags()
.contains("mountFlag1"));
assertTrue(request.getVolumeCapabilities().get(0).getMountFlags()
.contains("mountFlag2"));
assertEquals(2, request.getVolumeAttributes().size());
assertEquals("v1", request.getVolumeAttributes().get("k1"));
assertEquals("v2", request.getVolumeAttributes().get("k2"));
// return a fake result
return ValidateVolumeCapabilitiesResponse
.newInstance(false, "this is a test");
}
};
CsiAdaptorProtocolService service =
new CsiAdaptorProtocolService(plugin);
service.init(conf);
service.start();
YarnRPC rpc = YarnRPC.create(conf);
UserGroupInformation currentUser = UserGroupInformation.getCurrentUser();
CsiAdaptorProtocol adaptorClient = NMProxy
.createNMProxy(conf, CsiAdaptorProtocol.class, currentUser, rpc,
NetUtils.createSocketAddrForHost("localhost", ss.getLocalPort()));
ValidateVolumeCapabilitiesRequest request =
ValidateVolumeCapabilitiesRequestPBImpl
.newInstance("volume-id-0000123",
ImmutableList.of(new ValidateVolumeCapabilitiesRequest
.VolumeCapability(
MULTI_NODE_MULTI_WRITER, FILE_SYSTEM,
ImmutableList.of("mountFlag1", "mountFlag2"))),
ImmutableMap.of("k1", "v1", "k2", "v2"));
ValidateVolumeCapabilitiesResponse response = adaptorClient
.validateVolumeCapacity(request);
assertEquals(false, response.isSupported());
assertEquals("this is a test", response.getResponseMessage());
service.stop();
}
@Test
void testMissingConfiguration() {
assertThrows(ServiceStateException.class, () -> {
Configuration conf = new Configuration();
CsiAdaptorProtocolService service =
new CsiAdaptorProtocolService(new FakeCsiAdaptor() {
});
service.init(conf);
});
}
@Test
void testInvalidServicePort() {
assertThrows(ServiceStateException.class, () -> {
Configuration conf = new Configuration();
conf.set(YarnConfiguration.NM_CSI_ADAPTOR_PREFIX
+ "test-driver-0001.address",
"0.0.0.0:-100"); // this is an invalid address
CsiAdaptorProtocolService service =
new CsiAdaptorProtocolService(new FakeCsiAdaptor() {
});
service.init(conf);
});
}
@Test
void testInvalidHost() {
assertThrows(ServiceStateException.class, () -> {
Configuration conf = new Configuration();
conf.set(YarnConfiguration.NM_CSI_ADAPTOR_PREFIX
+ "test-driver-0001.address",
"192.0.1:8999"); // this is an invalid ip address
CsiAdaptorProtocolService service =
new CsiAdaptorProtocolService(new FakeCsiAdaptor() {
});
service.init(conf);
});
}
@Test
void testCustomizedAdaptor() throws IOException, YarnException {
ServerSocket ss = new ServerSocket(0);
ss.close();
InetSocketAddress address = new InetSocketAddress(ss.getLocalPort());
Configuration conf = new Configuration();
conf.set(YarnConfiguration.NM_CSI_DRIVER_NAMES, "customized-driver");
conf.setSocketAddr(
YarnConfiguration.NM_CSI_ADAPTOR_PREFIX + "customized-driver.address",
address);
conf.set(
YarnConfiguration.NM_CSI_ADAPTOR_PREFIX + "customized-driver.class",
"org.apache.hadoop.yarn.csi.adaptor.MockCsiAdaptor");
conf.set(
YarnConfiguration.NM_CSI_DRIVER_PREFIX + "customized-driver.endpoint",
"unix:///tmp/customized-driver.sock");
CsiAdaptorServices services =
new CsiAdaptorServices();
services.init(conf);
services.start();
YarnRPC rpc = YarnRPC.create(conf);
UserGroupInformation currentUser = UserGroupInformation.getCurrentUser();
CsiAdaptorProtocol adaptorClient = NMProxy
.createNMProxy(conf, CsiAdaptorProtocol.class, currentUser, rpc,
NetUtils.createSocketAddrForHost("localhost", ss.getLocalPort()));
// Test getPluginInfo
GetPluginInfoResponse pluginInfo =
adaptorClient.getPluginInfo(GetPluginInfoRequest.newInstance());
assertThat(pluginInfo.getDriverName()).isEqualTo("customized-driver");
assertThat(pluginInfo.getVersion()).isEqualTo("1.0");
// Test validateVolumeCapacity
ValidateVolumeCapabilitiesRequest request =
ValidateVolumeCapabilitiesRequestPBImpl
.newInstance("volume-id-0000123",
ImmutableList.of(new ValidateVolumeCapabilitiesRequest
.VolumeCapability(
MULTI_NODE_MULTI_WRITER, FILE_SYSTEM,
ImmutableList.of("mountFlag1", "mountFlag2"))),
ImmutableMap.of("k1", "v1", "k2", "v2"));
ValidateVolumeCapabilitiesResponse response = adaptorClient
.validateVolumeCapacity(request);
assertEquals(true, response.isSupported());
assertEquals("verified via MockCsiAdaptor",
response.getResponseMessage());
services.stop();
}
@Test
void testMultipleCsiAdaptors() throws IOException, YarnException {
ServerSocket driver1Addr = new ServerSocket(0);
ServerSocket driver2Addr = new ServerSocket(0);
InetSocketAddress address1 =
new InetSocketAddress(driver1Addr.getLocalPort());
InetSocketAddress address2 =
new InetSocketAddress(driver2Addr.getLocalPort());
Configuration conf = new Configuration();
// Two csi-drivers configured
conf.set(YarnConfiguration.NM_CSI_DRIVER_NAMES,
"customized-driver-1,customized-driver-2");
// customized-driver-1
conf.setSocketAddr(YarnConfiguration.NM_CSI_ADAPTOR_PREFIX
+ "customized-driver-1.address", address1);
conf.set(YarnConfiguration.NM_CSI_ADAPTOR_PREFIX
+ "customized-driver-1.class",
"org.apache.hadoop.yarn.csi.adaptor.MockCsiAdaptor");
conf.set(YarnConfiguration.NM_CSI_DRIVER_PREFIX
+ "customized-driver-1.endpoint",
"unix:///tmp/customized-driver-1.sock");
// customized-driver-2
conf.setSocketAddr(YarnConfiguration.NM_CSI_ADAPTOR_PREFIX
+ "customized-driver-2.address", address2);
conf.set(YarnConfiguration.NM_CSI_ADAPTOR_PREFIX
+ "customized-driver-2.class",
"org.apache.hadoop.yarn.csi.adaptor.MockCsiAdaptor");
conf.set(YarnConfiguration.NM_CSI_DRIVER_PREFIX
+ "customized-driver-2.endpoint",
"unix:///tmp/customized-driver-2.sock");
driver1Addr.close();
driver2Addr.close();
CsiAdaptorServices services =
new CsiAdaptorServices();
services.init(conf);
services.start();
YarnRPC rpc = YarnRPC.create(conf);
UserGroupInformation currentUser = UserGroupInformation.getCurrentUser();
CsiAdaptorProtocol client1 = NMProxy
.createNMProxy(conf, CsiAdaptorProtocol.class, currentUser, rpc,
NetUtils.createSocketAddrForHost("localhost",
driver1Addr.getLocalPort()));
// ***************************************************
// Verify talking with customized-driver-1
// ***************************************************
// Test getPluginInfo
GetPluginInfoResponse pluginInfo =
client1.getPluginInfo(GetPluginInfoRequest.newInstance());
assertThat(pluginInfo.getDriverName()).isEqualTo("customized-driver-1");
assertThat(pluginInfo.getVersion()).isEqualTo("1.0");
// Test validateVolumeCapacity
ValidateVolumeCapabilitiesRequest request =
ValidateVolumeCapabilitiesRequestPBImpl
.newInstance("driver-1-volume-00001",
ImmutableList.of(new ValidateVolumeCapabilitiesRequest
.VolumeCapability(
MULTI_NODE_MULTI_WRITER, FILE_SYSTEM,
ImmutableList.of())), ImmutableMap.of());
ValidateVolumeCapabilitiesResponse response = client1
.validateVolumeCapacity(request);
assertEquals(true, response.isSupported());
assertEquals("verified via MockCsiAdaptor",
response.getResponseMessage());
// ***************************************************
// Verify talking with customized-driver-2
// ***************************************************
CsiAdaptorProtocol client2 = NMProxy
.createNMProxy(conf, CsiAdaptorProtocol.class, currentUser, rpc,
NetUtils.createSocketAddrForHost("localhost",
driver2Addr.getLocalPort()));
GetPluginInfoResponse pluginInfo2 =
client2.getPluginInfo(GetPluginInfoRequest.newInstance());
assertThat(pluginInfo2.getDriverName()).isEqualTo("customized-driver-2");
assertThat(pluginInfo2.getVersion()).isEqualTo("1.0");
services.stop();
}
}
| FakeCsiAdaptor |
java | bumptech__glide | library/src/main/java/com/bumptech/glide/util/MarkEnforcingInputStream.java | {
"start": 293,
"end": 2407
} | class ____ extends FilterInputStream {
private static final int UNSET = Integer.MIN_VALUE;
private static final int END_OF_STREAM = -1;
private int availableBytes = UNSET;
public MarkEnforcingInputStream(@NonNull InputStream in) {
super(in);
}
@Override
public synchronized void mark(int readLimit) {
super.mark(readLimit);
availableBytes = readLimit;
}
@Override
public int read() throws IOException {
if (getBytesToRead(1) == END_OF_STREAM) {
return END_OF_STREAM;
}
int result = super.read();
updateAvailableBytesAfterRead(1 /* bytesRead */);
return result;
}
@Override
public int read(@NonNull byte[] buffer, int byteOffset, int byteCount) throws IOException {
int toRead = (int) getBytesToRead(byteCount);
if (toRead == END_OF_STREAM) {
return END_OF_STREAM;
}
int read = super.read(buffer, byteOffset, toRead);
updateAvailableBytesAfterRead(read);
return read;
}
@Override
public synchronized void reset() throws IOException {
super.reset();
availableBytes = UNSET;
}
@Override
public long skip(long byteCount) throws IOException {
long toSkip = getBytesToRead(byteCount);
if (toSkip == END_OF_STREAM) {
return 0;
}
long read = super.skip(toSkip);
updateAvailableBytesAfterRead(read);
return read;
}
@Override
public int available() throws IOException {
return availableBytes == UNSET
? super.available()
: Math.min(availableBytes, super.available());
}
private long getBytesToRead(long targetByteCount) {
if (availableBytes == 0) {
return END_OF_STREAM;
} else if (availableBytes != UNSET && targetByteCount > availableBytes) {
return availableBytes;
} else {
return targetByteCount;
}
}
private void updateAvailableBytesAfterRead(long bytesRead) {
if (availableBytes != UNSET && bytesRead != END_OF_STREAM) {
// See https://errorprone.info/bugpattern/NarrowingCompoundAssignment.
availableBytes = (int) (availableBytes - bytesRead);
}
}
}
| MarkEnforcingInputStream |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/io/network/partition/PartitionTracker.java | {
"start": 1195,
"end": 1822
} | interface ____<K, M> {
/** Stops the tracking of all partitions for the given key. */
Collection<PartitionTrackerEntry<K, M>> stopTrackingPartitionsFor(K key);
/** Stops the tracking of the given partitions. */
Collection<PartitionTrackerEntry<K, M>> stopTrackingPartitions(
Collection<ResultPartitionID> resultPartitionIds);
/** Returns whether any partition is being tracked for the given key. */
boolean isTrackingPartitionsFor(K key);
/** Returns whether the given partition is being tracked. */
boolean isPartitionTracked(ResultPartitionID resultPartitionID);
}
| PartitionTracker |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/junit4/rules/FailingBeforeAndAfterMethodsSpringRuleTests.java | {
"start": 2697,
"end": 3124
} | class ____ {
@ClassRule
public static final SpringClassRule springClassRule = new SpringClassRule();
@Rule
public final SpringMethodRule springMethodRule = new SpringMethodRule();
@Test
public void testNothing() {
}
}
@Ignore("TestCase classes are run manually by the enclosing test class")
@TestExecutionListeners(AlwaysFailingBeforeTestClassTestExecutionListener.class)
public static | BaseSpringRuleTestCase |
java | spring-projects__spring-data-jpa | spring-data-jpa/src/main/java/org/springframework/data/jpa/repository/query/PreprocessedQuery.java | {
"start": 15794,
"end": 15913
} | enum ____ the different types of bindings.
*
* @author Thomas Darimont
* @author Oliver Gierke
*/
private | for |
java | spring-projects__spring-framework | spring-webflux/src/test/java/org/springframework/web/reactive/function/BodyExtractorsTests.java | {
"start": 3318,
"end": 16036
} | class ____ {
private BodyExtractor.Context context;
private Map<String, Object> hints;
private Optional<ServerHttpResponse> serverResponse = Optional.empty();
@BeforeEach
void createContext() {
final List<HttpMessageReader<?>> messageReaders = new ArrayList<>();
messageReaders.add(new DecoderHttpMessageReader<>(new ByteBufferDecoder()));
messageReaders.add(new DecoderHttpMessageReader<>(StringDecoder.allMimeTypes()));
messageReaders.add(new DecoderHttpMessageReader<>(new Jaxb2XmlDecoder()));
messageReaders.add(new DecoderHttpMessageReader<>(new JacksonJsonDecoder()));
messageReaders.add(new FormHttpMessageReader());
DefaultPartHttpMessageReader partReader = new DefaultPartHttpMessageReader();
messageReaders.add(partReader);
messageReaders.add(new MultipartHttpMessageReader(partReader));
messageReaders.add(new FormHttpMessageReader());
this.context = new BodyExtractor.Context() {
@Override
public List<HttpMessageReader<?>> messageReaders() {
return messageReaders;
}
@Override
public Optional<ServerHttpResponse> serverResponse() {
return serverResponse;
}
@Override
public Map<String, Object> hints() {
return hints;
}
};
this.hints = new HashMap<>();
}
@Test
void toMono() {
BodyExtractor<Mono<String>, ReactiveHttpInputMessage> extractor = BodyExtractors.toMono(String.class);
DefaultDataBufferFactory factory = DefaultDataBufferFactory.sharedInstance;
DefaultDataBuffer dataBuffer =
factory.wrap(ByteBuffer.wrap("foo".getBytes(StandardCharsets.UTF_8)));
Flux<DataBuffer> body = Flux.just(dataBuffer);
MockServerHttpRequest request = MockServerHttpRequest.post("/").body(body);
Mono<String> result = extractor.extract(request, this.context);
StepVerifier.create(result)
.expectNext("foo")
.expectComplete()
.verify();
}
@Test
void toMonoParameterizedTypeReference() {
BodyExtractor<Mono<Map<String, String>>, ReactiveHttpInputMessage> extractor =
BodyExtractors.toMono(new ParameterizedTypeReference<>() {});
byte[] bytes = "{\"username\":\"foo\",\"password\":\"bar\"}".getBytes(StandardCharsets.UTF_8);
DefaultDataBuffer dataBuffer = DefaultDataBufferFactory.sharedInstance.wrap(ByteBuffer.wrap(bytes));
Flux<DataBuffer> body = Flux.just(dataBuffer);
MockServerHttpRequest request = MockServerHttpRequest.post("/").contentType(MediaType.APPLICATION_JSON).body(body);
Mono<Map<String, String>> result = extractor.extract(request, this.context);
Map<String, String > expected = new LinkedHashMap<>();
expected.put("username", "foo");
expected.put("password", "bar");
StepVerifier.create(result)
.expectNext(expected)
.expectComplete()
.verify();
}
@Test
void toMonoWithHints() {
BodyExtractor<Mono<User>, ReactiveHttpInputMessage> extractor = BodyExtractors.toMono(User.class);
this.hints.put(JSON_VIEW_HINT, SafeToDeserialize.class);
byte[] bytes = "{\"username\":\"foo\",\"password\":\"bar\"}".getBytes(StandardCharsets.UTF_8);
DefaultDataBuffer dataBuffer = DefaultDataBufferFactory.sharedInstance.wrap(ByteBuffer.wrap(bytes));
Flux<DataBuffer> body = Flux.just(dataBuffer);
MockServerHttpRequest request = MockServerHttpRequest.post("/")
.contentType(MediaType.APPLICATION_JSON)
.body(body);
Mono<User> result = extractor.extract(request, this.context);
StepVerifier.create(result)
.consumeNextWith(user -> {
assertThat(user.getUsername()).isEqualTo("foo");
assertThat(user.getPassword()).isNull();
})
.expectComplete()
.verify();
}
@Test // SPR-15758
void toMonoWithEmptyBodyAndNoContentType() {
BodyExtractor<Mono<Map<String, String>>, ReactiveHttpInputMessage> extractor =
BodyExtractors.toMono(new ParameterizedTypeReference<>() {});
MockServerHttpRequest request = MockServerHttpRequest.post("/").body(Flux.empty());
Mono<Map<String, String>> result = extractor.extract(request, this.context);
StepVerifier.create(result).expectComplete().verify();
}
@Test
void toMonoVoidAsClientShouldConsumeAndCancel() {
byte[] bytes = "foo".getBytes(StandardCharsets.UTF_8);
DefaultDataBuffer dataBuffer = DefaultDataBufferFactory.sharedInstance.wrap(ByteBuffer.wrap(bytes));
TestPublisher<DataBuffer> body = TestPublisher.create();
BodyExtractor<Mono<Void>, ReactiveHttpInputMessage> extractor = BodyExtractors.toMono(Void.class);
MockClientHttpResponse response = new MockClientHttpResponse(HttpStatus.OK);
response.setBody(body.flux());
StepVerifier.create(extractor.extract(response, this.context))
.then(() -> {
body.assertWasSubscribed();
body.emit(dataBuffer);
})
.verifyComplete();
body.assertCancelled();
}
@Test
void toMonoVoidAsClientWithEmptyBody() {
TestPublisher<DataBuffer> body = TestPublisher.create();
BodyExtractor<Mono<Void>, ReactiveHttpInputMessage> extractor = BodyExtractors.toMono(Void.class);
MockClientHttpResponse response = new MockClientHttpResponse(HttpStatus.OK);
response.setBody(body.flux());
StepVerifier.create(extractor.extract(response, this.context))
.then(() -> {
body.assertWasSubscribed();
body.complete();
})
.verifyComplete();
}
@Test
void toFlux() {
BodyExtractor<Flux<String>, ReactiveHttpInputMessage> extractor = BodyExtractors.toFlux(String.class);
byte[] bytes = "foo".getBytes(StandardCharsets.UTF_8);
DefaultDataBuffer dataBuffer = DefaultDataBufferFactory.sharedInstance.wrap(ByteBuffer.wrap(bytes));
Flux<DataBuffer> body = Flux.just(dataBuffer);
MockServerHttpRequest request = MockServerHttpRequest.post("/").body(body);
Flux<String> result = extractor.extract(request, this.context);
StepVerifier.create(result)
.expectNext("foo")
.expectComplete()
.verify();
}
@Test
void toFluxWithHints() {
BodyExtractor<Flux<User>, ReactiveHttpInputMessage> extractor = BodyExtractors.toFlux(User.class);
this.hints.put(JSON_VIEW_HINT, SafeToDeserialize.class);
String text = "[{\"username\":\"foo\",\"password\":\"bar\"},{\"username\":\"bar\",\"password\":\"baz\"}]";
byte[] bytes = text.getBytes(StandardCharsets.UTF_8);
DefaultDataBuffer dataBuffer = DefaultDataBufferFactory.sharedInstance.wrap(ByteBuffer.wrap(bytes));
Flux<DataBuffer> body = Flux.just(dataBuffer);
MockServerHttpRequest request = MockServerHttpRequest.post("/")
.contentType(MediaType.APPLICATION_JSON)
.body(body);
Flux<User> result = extractor.extract(request, this.context);
StepVerifier.create(result)
.consumeNextWith(user -> {
assertThat(user.getUsername()).isEqualTo("foo");
assertThat(user.getPassword()).isNull();
})
.consumeNextWith(user -> {
assertThat(user.getUsername()).isEqualTo("bar");
assertThat(user.getPassword()).isNull();
})
.expectComplete()
.verify();
}
@Test
void toFluxUnacceptable() {
BodyExtractor<Flux<String>, ReactiveHttpInputMessage> extractor = BodyExtractors.toFlux(String.class);
byte[] bytes = "foo".getBytes(StandardCharsets.UTF_8);
DefaultDataBuffer dataBuffer = DefaultDataBufferFactory.sharedInstance.wrap(ByteBuffer.wrap(bytes));
Flux<DataBuffer> body = Flux.just(dataBuffer);
MockServerHttpRequest request = MockServerHttpRequest.post("/")
.contentType(MediaType.APPLICATION_JSON)
.body(body);
BodyExtractor.Context emptyContext = new BodyExtractor.Context() {
@Override
public List<HttpMessageReader<?>> messageReaders() {
return Collections.emptyList();
}
@Override
public Optional<ServerHttpResponse> serverResponse() {
return Optional.empty();
}
@Override
public Map<String, Object> hints() {
return Collections.emptyMap();
}
};
Flux<String> result = extractor.extract(request, emptyContext);
StepVerifier.create(result)
.expectError(UnsupportedMediaTypeException.class)
.verify();
}
@Test
void toFormData() {
byte[] bytes = "name+1=value+1&name+2=value+2%2B1&name+2=value+2%2B2&name+3".getBytes(StandardCharsets.UTF_8);
DefaultDataBuffer dataBuffer = DefaultDataBufferFactory.sharedInstance.wrap(ByteBuffer.wrap(bytes));
Flux<DataBuffer> body = Flux.just(dataBuffer);
MockServerHttpRequest request = MockServerHttpRequest.post("/")
.contentType(MediaType.APPLICATION_FORM_URLENCODED)
.body(body);
Mono<MultiValueMap<String, String>> result = BodyExtractors.toFormData().extract(request, this.context);
StepVerifier.create(result)
.consumeNextWith(form -> {
assertThat(form).as("Invalid result").hasSize(3);
assertThat(form.getFirst("name 1")).as("Invalid result").isEqualTo("value 1");
List<String> values = form.get("name 2");
assertThat(values).as("Invalid result").containsExactly("value 2+1", "value 2+2");
assertThat(form.getFirst("name 3")).as("Invalid result").isNull();
})
.expectComplete()
.verify();
}
@Test
void toParts() {
BodyExtractor<Flux<Part>, ServerHttpRequest> extractor = BodyExtractors.toParts();
String bodyContents = """
-----------------------------9051914041544843365972754266
Content-Disposition: form-data; name="text"
text default
-----------------------------9051914041544843365972754266
Content-Disposition: form-data; name="file1"; filename="a.txt"
Content-Type: text/plain
Content of a.txt.
-----------------------------9051914041544843365972754266
Content-Disposition: form-data; name="file2"; filename="a.html"
Content-Type: text/html
<!DOCTYPE html><title>Content of a.html.</title>
-----------------------------9051914041544843365972754266--
""".replace("\n", "\r\n");
byte[] bytes = bodyContents.getBytes(StandardCharsets.UTF_8);
DefaultDataBuffer dataBuffer = DefaultDataBufferFactory.sharedInstance.wrap(ByteBuffer.wrap(bytes));
Flux<DataBuffer> body = Flux.just(dataBuffer);
MockServerHttpRequest request = MockServerHttpRequest.post("/")
.header("Content-Type", "multipart/form-data; boundary=---------------------------9051914041544843365972754266")
.body(body);
Flux<Part> result = extractor.extract(request, this.context);
StepVerifier.create(result)
.consumeNextWith(part -> {
assertThat(part.name()).isEqualTo("text");
assertThat(part).asInstanceOf(type(FormFieldPart.class)).satisfies(
formFieldPart -> assertThat(formFieldPart.value()).isEqualTo("text default"));
})
.consumeNextWith(part -> {
assertThat(part.name()).isEqualTo("file1");
assertThat(part).asInstanceOf(type(FilePart.class)).satisfies(
filePart -> assertThat(filePart.filename()).isEqualTo("a.txt"),
filePart -> assertThat(filePart.headers().getContentType()).isEqualTo(MediaType.TEXT_PLAIN));
})
.consumeNextWith(part -> {
assertThat(part.name()).isEqualTo("file2");
assertThat(part).asInstanceOf(type(FilePart.class)).satisfies(
filePart -> assertThat(filePart.filename()).isEqualTo("a.html"),
filePart -> assertThat(filePart.headers().getContentType()).isEqualTo(MediaType.TEXT_HTML));
})
.expectComplete()
.verify();
}
@Test
void toDataBuffers() {
BodyExtractor<Flux<DataBuffer>, ReactiveHttpInputMessage> extractor = BodyExtractors.toDataBuffers();
byte[] bytes = "foo".getBytes(StandardCharsets.UTF_8);
DefaultDataBuffer dataBuffer = DefaultDataBufferFactory.sharedInstance.wrap(ByteBuffer.wrap(bytes));
Flux<DataBuffer> body = Flux.just(dataBuffer);
MockServerHttpRequest request = MockServerHttpRequest.post("/").body(body);
Flux<DataBuffer> result = extractor.extract(request, this.context);
StepVerifier.create(result)
.expectNext(dataBuffer)
.expectComplete()
.verify();
}
@Test // SPR-17054
void unsupportedMediaTypeShouldConsumeAndCancel() {
NettyDataBufferFactory factory = new NettyDataBufferFactory(new PooledByteBufAllocator(true));
NettyDataBuffer buffer = factory.wrap(ByteBuffer.wrap("spring".getBytes(StandardCharsets.UTF_8)));
TestPublisher<DataBuffer> body = TestPublisher.create();
MockClientHttpResponse response = new MockClientHttpResponse(HttpStatus.OK);
response.getHeaders().setContentType(MediaType.APPLICATION_PDF);
response.setBody(body.flux());
BodyExtractor<Mono<User>, ReactiveHttpInputMessage> extractor = BodyExtractors.toMono(User.class);
StepVerifier.create(extractor.extract(response, this.context))
.then(() -> {
body.assertWasSubscribed();
body.emit(buffer);
})
.expectErrorSatisfies(throwable -> {
assertThat(throwable).isInstanceOf(UnsupportedMediaTypeException.class);
assertThatExceptionOfType(IllegalReferenceCountException.class).isThrownBy(buffer::release);
body.assertCancelled();
}).verify();
}
| BodyExtractorsTests |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/io/network/partition/hybrid/TestingFileDataIndexSpilledRegionManager.java | {
"start": 3153,
"end": 4124
} | class ____<T extends FileDataIndexRegionHelper.Region>
implements FileDataIndexSpilledRegionManager.Factory<T> {
public TestingFileDataIndexSpilledRegionManager<T> lastSpilledRegionManager;
public TestingFileDataIndexSpilledRegionManager<T> getLastSpilledRegionManager() {
return lastSpilledRegionManager;
}
@Override
public FileDataIndexSpilledRegionManager<T> create(
int numSubpartitions,
Path indexFilePath,
BiConsumer<Integer, T> cacheRegionConsumer) {
TestingFileDataIndexSpilledRegionManager<T> testingFileDataIndexSpilledRegionManager =
new TestingFileDataIndexSpilledRegionManager<>(
numSubpartitions, cacheRegionConsumer);
lastSpilledRegionManager = testingFileDataIndexSpilledRegionManager;
return testingFileDataIndexSpilledRegionManager;
}
}
}
| Factory |
java | apache__rocketmq | remoting/src/main/java/org/apache/rocketmq/remoting/protocol/body/CMResult.java | {
"start": 863,
"end": 987
} | enum ____ {
CR_SUCCESS,
CR_LATER,
CR_ROLLBACK,
CR_COMMIT,
CR_THROW_EXCEPTION,
CR_RETURN_NULL,
}
| CMResult |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/issue_3500/Issue3571.java | {
"start": 274,
"end": 909
} | class ____ extends TestCase {
public void test_for_issue() throws Exception {
Bean1 bean = new Bean1();
bean.id1 = 101;
bean.id2 = 102;
bean.id3 = 103;
assertEquals("{\"id1\":101,\"id2\":102,\"id3\":103}", JSON.toJSON(bean).toString());
}
public void test_for_issue_2() throws Exception {
Bean2 bean = new Bean2();
bean.id1 = 101;
bean.id2 = 102;
bean.id3 = 103;
assertEquals("{\"id1\":101,\"id2\":102,\"id3\":103}", JSON.toJSON(bean).toString());
}
@JSONType(serialzeFeatures = SerializerFeature.SortField)
public static | Issue3571 |
java | elastic__elasticsearch | modules/lang-painless/src/main/java/org/elasticsearch/painless/phase/DefaultSemanticAnalysisPhase.java | {
"start": 114620,
"end": 158857
} | class ____ references,
* constructor function references, and local function references.
* Checks: type validation
*/
@Override
public void visitFunctionRef(EFunctionRef userFunctionRefNode, SemanticScope semanticScope) {
ScriptScope scriptScope = semanticScope.getScriptScope();
Location location = userFunctionRefNode.getLocation();
String symbol = userFunctionRefNode.getSymbol();
String methodName = userFunctionRefNode.getMethodName();
boolean read = semanticScope.getCondition(userFunctionRefNode, Read.class);
Class<?> type = scriptScope.getPainlessLookup().canonicalTypeNameToType(symbol);
TargetType targetType = semanticScope.getDecoration(userFunctionRefNode, TargetType.class);
Class<?> valueType;
boolean isInstanceReference = "this".equals(symbol);
if (isInstanceReference || type != null) {
if (semanticScope.getCondition(userFunctionRefNode, Write.class)) {
throw userFunctionRefNode.createError(
new IllegalArgumentException(
"invalid assignment: cannot assign a value to function reference [" + symbol + ":" + methodName + "]"
)
);
}
if (read == false) {
throw userFunctionRefNode.createError(
new IllegalArgumentException("not a statement: function reference [" + symbol + ":" + methodName + "] not used")
);
}
if (isInstanceReference) {
semanticScope.setCondition(userFunctionRefNode, InstanceCapturingFunctionRef.class);
}
if (targetType == null) {
valueType = String.class;
semanticScope.putDecoration(userFunctionRefNode, EncodingDecoration.of(true, isInstanceReference, symbol, methodName, 0));
} else {
FunctionRef ref = FunctionRef.create(
scriptScope.getPainlessLookup(),
scriptScope.getFunctionTable(),
location,
targetType.targetType(),
symbol,
methodName,
0,
scriptScope.getCompilerSettings().asMap(),
isInstanceReference
);
valueType = targetType.targetType();
semanticScope.putDecoration(userFunctionRefNode, new ReferenceDecoration(ref));
}
} else {
if (semanticScope.getCondition(userFunctionRefNode, Write.class)) {
throw userFunctionRefNode.createError(
new IllegalArgumentException(
"invalid assignment: cannot assign a value to capturing function reference [" + symbol + ":" + methodName + "]"
)
);
}
if (read == false) {
throw userFunctionRefNode.createError(
new IllegalArgumentException(
"not a statement: capturing function reference [" + symbol + ":" + methodName + "] not used"
)
);
}
SemanticScope.Variable captured = semanticScope.getVariable(location, symbol);
semanticScope.putDecoration(userFunctionRefNode, new CapturesDecoration(List.of(captured)));
if (captured.type().isPrimitive()) {
semanticScope.setCondition(userFunctionRefNode, CaptureBox.class);
}
if (targetType == null) {
EncodingDecoration encodingDecoration;
if (captured.type() == def.class) {
// dynamic implementation
encodingDecoration = EncodingDecoration.of(false, false, symbol, methodName, 1);
} else {
// typed implementation
encodingDecoration = EncodingDecoration.of(true, false, captured.getCanonicalTypeName(), methodName, 1);
}
valueType = String.class;
semanticScope.putDecoration(userFunctionRefNode, encodingDecoration);
} else {
valueType = targetType.targetType();
// static case
if (captured.type() != def.class) {
FunctionRef ref = FunctionRef.create(
scriptScope.getPainlessLookup(),
scriptScope.getFunctionTable(),
location,
targetType.targetType(),
captured.getCanonicalTypeName(),
methodName,
1,
scriptScope.getCompilerSettings().asMap(),
false
);
semanticScope.putDecoration(userFunctionRefNode, new ReferenceDecoration(ref));
}
}
}
semanticScope.putDecoration(userFunctionRefNode, new ValueType(valueType));
}
/**
* Visits a new array function ref expression which covers only a new array function reference
* and generates an internal method to define the new array.
* Checks: type validation
*/
@Override
public void visitNewArrayFunctionRef(ENewArrayFunctionRef userNewArrayFunctionRefNode, SemanticScope semanticScope) {
String canonicalTypeName = userNewArrayFunctionRefNode.getCanonicalTypeName();
if (semanticScope.getCondition(userNewArrayFunctionRefNode, Write.class)) {
throw userNewArrayFunctionRefNode.createError(
new IllegalArgumentException(
"cannot assign a value to new array function reference with target type [ + " + canonicalTypeName + "]"
)
);
}
if (semanticScope.getCondition(userNewArrayFunctionRefNode, Read.class) == false) {
throw userNewArrayFunctionRefNode.createError(
new IllegalArgumentException(
"not a statement: new array function reference with target type [" + canonicalTypeName + "] not used"
)
);
}
ScriptScope scriptScope = semanticScope.getScriptScope();
TargetType targetType = semanticScope.getDecoration(userNewArrayFunctionRefNode, TargetType.class);
Class<?> valueType;
Class<?> clazz = scriptScope.getPainlessLookup().canonicalTypeNameToType(canonicalTypeName);
semanticScope.putDecoration(userNewArrayFunctionRefNode, new ReturnType(clazz));
if (clazz == null) {
throw userNewArrayFunctionRefNode.createError(new IllegalArgumentException("Not a type [" + canonicalTypeName + "]."));
}
String name = scriptScope.getNextSyntheticName("newarray");
scriptScope.getFunctionTable().addFunction(name, clazz, Collections.singletonList(int.class), true, true);
semanticScope.putDecoration(userNewArrayFunctionRefNode, new MethodNameDecoration(name));
if (targetType == null) {
valueType = String.class;
scriptScope.putDecoration(userNewArrayFunctionRefNode, EncodingDecoration.of(true, false, "this", name, 0));
} else {
FunctionRef ref = FunctionRef.create(
scriptScope.getPainlessLookup(),
scriptScope.getFunctionTable(),
userNewArrayFunctionRefNode.getLocation(),
targetType.targetType(),
"this",
name,
0,
scriptScope.getCompilerSettings().asMap(),
false
);
valueType = targetType.targetType();
semanticScope.putDecoration(userNewArrayFunctionRefNode, new ReferenceDecoration(ref));
}
semanticScope.putDecoration(userNewArrayFunctionRefNode, new ValueType(valueType));
}
/**
* Visits a symbol expression which covers static types, partial canonical types,
* and variables.
* Checks: type checking, type resolution, variable resolution
*/
@Override
public void visitSymbol(ESymbol userSymbolNode, SemanticScope semanticScope) {
boolean read = semanticScope.getCondition(userSymbolNode, Read.class);
boolean write = semanticScope.getCondition(userSymbolNode, Write.class);
String symbol = userSymbolNode.getSymbol();
Class<?> staticType = semanticScope.getScriptScope().getPainlessLookup().canonicalTypeNameToType(symbol);
if (staticType != null) {
if (write) {
throw userSymbolNode.createError(
new IllegalArgumentException(
"invalid assignment: "
+ "cannot write a value to a static type ["
+ PainlessLookupUtility.typeToCanonicalTypeName(staticType)
+ "]"
)
);
}
if (read == false) {
throw userSymbolNode.createError(
new IllegalArgumentException(
"not a statement: " + "static type [" + PainlessLookupUtility.typeToCanonicalTypeName(staticType) + "] not used"
)
);
}
semanticScope.putDecoration(userSymbolNode, new StaticType(staticType));
} else if (semanticScope.isVariableDefined(symbol)) {
if (read == false && write == false) {
throw userSymbolNode.createError(new IllegalArgumentException("not a statement: variable [" + symbol + "] not used"));
}
Location location = userSymbolNode.getLocation();
Variable variable = semanticScope.getVariable(location, symbol);
if (write && variable.isFinal()) {
throw userSymbolNode.createError(new IllegalArgumentException("Variable [" + variable.name() + "] is read-only."));
}
Class<?> valueType = variable.type();
semanticScope.putDecoration(userSymbolNode, new ValueType(valueType));
} else {
semanticScope.putDecoration(userSymbolNode, new PartialCanonicalTypeName(symbol));
}
}
/**
* Visits a dot expression which is a field index with a qualifier (prefix) and
* may resolve to a static type, a partial canonical type, a field, a shortcut to a
* getter/setter method on a type, or a getter/setter for a Map or List.
* Checks: type validation, method resolution, field resolution
*/
@Override
public void visitDot(EDot userDotNode, SemanticScope semanticScope) {
boolean read = semanticScope.getCondition(userDotNode, Read.class);
boolean write = semanticScope.getCondition(userDotNode, Write.class);
if (read == false && write == false) {
throw userDotNode.createError(new IllegalArgumentException("not a statement: result of dot operator [.] not used"));
}
ScriptScope scriptScope = semanticScope.getScriptScope();
String index = userDotNode.getIndex();
AExpression userPrefixNode = userDotNode.getPrefixNode();
semanticScope.setCondition(userPrefixNode, Read.class);
visit(userPrefixNode, semanticScope);
ValueType prefixValueType = semanticScope.getDecoration(userPrefixNode, ValueType.class);
StaticType prefixStaticType = semanticScope.getDecoration(userPrefixNode, StaticType.class);
if (prefixValueType != null && prefixStaticType != null) {
throw userDotNode.createError(
new IllegalStateException(
Strings.format(
"cannot have both value [%s] and type [%s]",
prefixValueType.getValueCanonicalTypeName(),
prefixStaticType.getStaticCanonicalTypeName()
)
)
);
}
if (semanticScope.hasDecoration(userPrefixNode, PartialCanonicalTypeName.class)) {
if (prefixValueType != null) {
throw userDotNode.createError(
new IllegalArgumentException(
"value required: instead found unexpected type " + "[" + prefixValueType.getValueCanonicalTypeName() + "]"
)
);
}
if (prefixStaticType != null) {
throw userDotNode.createError(
new IllegalArgumentException(
"value required: instead found unexpected type " + "[" + prefixStaticType.staticType() + "]"
)
);
}
String canonicalTypeName = semanticScope.getDecoration(userPrefixNode, PartialCanonicalTypeName.class)
.partialCanonicalTypeName()
+ "."
+ index;
Class<?> staticType = scriptScope.getPainlessLookup().canonicalTypeNameToType(canonicalTypeName);
if (staticType == null) {
semanticScope.putDecoration(userDotNode, new PartialCanonicalTypeName(canonicalTypeName));
} else {
if (write) {
throw userDotNode.createError(
new IllegalArgumentException(
"invalid assignment: "
+ "cannot write a value to a static type ["
+ PainlessLookupUtility.typeToCanonicalTypeName(staticType)
+ "]"
)
);
}
semanticScope.putDecoration(userDotNode, new StaticType(staticType));
}
} else {
Class<?> staticType = null;
if (prefixStaticType != null) {
String staticCanonicalTypeName = prefixStaticType.getStaticCanonicalTypeName() + "." + userDotNode.getIndex();
staticType = scriptScope.getPainlessLookup().canonicalTypeNameToType(staticCanonicalTypeName);
}
if (staticType != null) {
if (write) {
throw userDotNode.createError(
new IllegalArgumentException(
"invalid assignment: "
+ "cannot write a value to a static type ["
+ PainlessLookupUtility.typeToCanonicalTypeName(staticType)
+ "]"
)
);
}
semanticScope.putDecoration(userDotNode, new StaticType(staticType));
} else {
Class<?> valueType = null;
if (prefixValueType != null && prefixValueType.valueType().isArray()) {
if ("length".equals(index)) {
if (write) {
throw userDotNode.createError(
new IllegalArgumentException(
"invalid assignment: cannot assign a value write to read-only field [length] for an array."
)
);
}
valueType = int.class;
} else {
throw userDotNode.createError(
new IllegalArgumentException(
"Field [" + index + "] does not exist for type [" + prefixValueType.getValueCanonicalTypeName() + "]."
)
);
}
} else if (prefixValueType != null && prefixValueType.valueType() == def.class) {
TargetType targetType = userDotNode.isNullSafe() ? null : semanticScope.getDecoration(userDotNode, TargetType.class);
valueType = targetType == null || semanticScope.getCondition(userDotNode, Explicit.class)
? def.class
: targetType.targetType();
if (write) {
semanticScope.setCondition(userDotNode, DefOptimized.class);
}
} else {
Class<?> prefixType;
String prefixCanonicalTypeName;
boolean isStatic;
if (prefixValueType != null) {
prefixType = prefixValueType.valueType();
prefixCanonicalTypeName = prefixValueType.getValueCanonicalTypeName();
isStatic = false;
} else if (prefixStaticType != null) {
prefixType = prefixStaticType.staticType();
prefixCanonicalTypeName = prefixStaticType.getStaticCanonicalTypeName();
isStatic = true;
} else {
throw userDotNode.createError(new IllegalStateException("value required: instead found no value"));
}
PainlessField field = semanticScope.getScriptScope()
.getPainlessLookup()
.lookupPainlessField(prefixType, isStatic, index);
if (field == null) {
PainlessMethod getter;
PainlessMethod setter;
getter = scriptScope.getPainlessLookup()
.lookupPainlessMethod(
prefixType,
isStatic,
"get" + Character.toUpperCase(index.charAt(0)) + index.substring(1),
0
);
if (getter == null) {
getter = scriptScope.getPainlessLookup()
.lookupPainlessMethod(
prefixType,
isStatic,
"is" + Character.toUpperCase(index.charAt(0)) + index.substring(1),
0
);
}
setter = scriptScope.getPainlessLookup()
.lookupPainlessMethod(
prefixType,
isStatic,
"set" + Character.toUpperCase(index.charAt(0)) + index.substring(1),
1
);
if (getter != null || setter != null) {
if (getter != null && (getter.returnType() == void.class || getter.typeParameters().isEmpty() == false)) {
throw userDotNode.createError(
new IllegalArgumentException(
"Illegal get shortcut on field [" + index + "] for type [" + prefixCanonicalTypeName + "]."
)
);
}
if (setter != null && (setter.returnType() != void.class || setter.typeParameters().size() != 1)) {
throw userDotNode.createError(
new IllegalArgumentException(
"Illegal set shortcut on field [" + index + "] for type [" + prefixCanonicalTypeName + "]."
)
);
}
if (getter != null && setter != null && setter.typeParameters().get(0) != getter.returnType()) {
throw userDotNode.createError(new IllegalArgumentException("Shortcut argument types must match."));
}
if ((read == false || getter != null) && (write == false || setter != null)) {
valueType = setter != null ? setter.typeParameters().get(0) : getter.returnType();
} else {
throw userDotNode.createError(
new IllegalArgumentException(
"Illegal shortcut on field [" + index + "] for type [" + prefixCanonicalTypeName + "]."
)
);
}
if (getter != null) {
semanticScope.putDecoration(userDotNode, new GetterPainlessMethod(getter));
}
if (setter != null) {
semanticScope.putDecoration(userDotNode, new SetterPainlessMethod(setter));
}
semanticScope.setCondition(userDotNode, Shortcut.class);
} else if (isStatic == false) {
if (Map.class.isAssignableFrom(prefixValueType.valueType())) {
getter = scriptScope.getPainlessLookup().lookupPainlessMethod(prefixType, false, "get", 1);
setter = scriptScope.getPainlessLookup().lookupPainlessMethod(prefixType, false, "put", 2);
if (getter != null && (getter.returnType() == void.class || getter.typeParameters().size() != 1)) {
throw userDotNode.createError(
new IllegalArgumentException("Illegal map get shortcut for type [" + prefixCanonicalTypeName + "].")
);
}
if (setter != null && setter.typeParameters().size() != 2) {
throw userDotNode.createError(
new IllegalArgumentException("Illegal map set shortcut for type [" + prefixCanonicalTypeName + "].")
);
}
if (getter != null
&& setter != null
&& (getter.typeParameters().get(0).equals(setter.typeParameters().get(0)) == false
|| getter.returnType().equals(setter.typeParameters().get(1)) == false)) {
throw userDotNode.createError(new IllegalArgumentException("Shortcut argument types must match."));
}
if ((read == false || getter != null) && (write == false || setter != null)) {
valueType = setter != null ? setter.typeParameters().get(1) : getter.returnType();
} else {
throw userDotNode.createError(
new IllegalArgumentException("Illegal map shortcut for type [" + prefixCanonicalTypeName + "].")
);
}
if (getter != null) {
semanticScope.putDecoration(userDotNode, new GetterPainlessMethod(getter));
}
if (setter != null) {
semanticScope.putDecoration(userDotNode, new SetterPainlessMethod(setter));
}
semanticScope.setCondition(userDotNode, MapShortcut.class);
}
if (List.class.isAssignableFrom(prefixType)) {
try {
scriptScope.putDecoration(userDotNode, new StandardConstant(Integer.parseInt(index)));
} catch (NumberFormatException nfe) {
throw userDotNode.createError(new IllegalArgumentException("invalid list index [" + index + "]", nfe));
}
getter = scriptScope.getPainlessLookup().lookupPainlessMethod(prefixType, false, "get", 1);
setter = scriptScope.getPainlessLookup().lookupPainlessMethod(prefixType, false, "set", 2);
if (getter != null
&& (getter.returnType() == void.class
|| getter.typeParameters().size() != 1
|| getter.typeParameters().get(0) != int.class)) {
throw userDotNode.createError(
new IllegalArgumentException(
"Illegal list get shortcut for type [" + prefixCanonicalTypeName + "]."
)
);
}
if (setter != null
&& (setter.typeParameters().size() != 2 || setter.typeParameters().get(0) != int.class)) {
throw userDotNode.createError(
new IllegalArgumentException(
"Illegal list set shortcut for type [" + prefixCanonicalTypeName + "]."
)
);
}
if (getter != null
&& setter != null
&& (getter.typeParameters().get(0).equals(setter.typeParameters().get(0)) == false
|| getter.returnType().equals(setter.typeParameters().get(1)) == false)) {
throw userDotNode.createError(new IllegalArgumentException("Shortcut argument types must match."));
}
if ((read == false || getter != null) && (write == false || setter != null)) {
valueType = setter != null ? setter.typeParameters().get(1) : getter.returnType();
} else {
throw userDotNode.createError(
new IllegalArgumentException("Illegal list shortcut for type [" + prefixCanonicalTypeName + "].")
);
}
if (getter != null) {
semanticScope.putDecoration(userDotNode, new GetterPainlessMethod(getter));
}
if (setter != null) {
semanticScope.putDecoration(userDotNode, new SetterPainlessMethod(setter));
}
semanticScope.setCondition(userDotNode, ListShortcut.class);
}
}
if (valueType == null) {
if (prefixValueType != null) {
throw userDotNode.createError(
new IllegalArgumentException(
"field [" + prefixValueType.getValueCanonicalTypeName() + ", " + index + "] not found"
)
);
} else {
throw userDotNode.createError(
new IllegalArgumentException(
"field [" + prefixStaticType.getStaticCanonicalTypeName() + ", " + index + "] not found"
)
);
}
}
} else {
if (write && Modifier.isFinal(field.javaField().getModifiers())) {
throw userDotNode.createError(
new IllegalArgumentException(
"invalid assignment: cannot assign a value to read-only field [" + field.javaField().getName() + "]"
)
);
}
semanticScope.putDecoration(userDotNode, new StandardPainlessField(field));
valueType = field.typeParameter();
}
}
semanticScope.putDecoration(userDotNode, new ValueType(valueType));
if (userDotNode.isNullSafe()) {
if (write) {
throw userDotNode.createError(
new IllegalArgumentException("invalid assignment: cannot assign a value to a null safe operation [?.]")
);
}
if (valueType.isPrimitive()) {
throw new IllegalArgumentException("Result of null safe operator must be nullable");
}
}
}
}
}
/**
* Visits a brace expression which is an array index with a qualifier (prefix) and
* may resolve to an array index, or a getter/setter for a Map or List.
* Checks: type validation, method resolution, field resolution
*/
@Override
public void visitBrace(EBrace userBraceNode, SemanticScope semanticScope) {
boolean read = semanticScope.getCondition(userBraceNode, Read.class);
boolean write = semanticScope.getCondition(userBraceNode, Write.class);
if (read == false && write == false) {
throw userBraceNode.createError(new IllegalArgumentException("not a statement: result of brace operator not used"));
}
AExpression userPrefixNode = userBraceNode.getPrefixNode();
semanticScope.setCondition(userPrefixNode, Read.class);
checkedVisit(userPrefixNode, semanticScope);
Class<?> prefixValueType = semanticScope.getDecoration(userPrefixNode, ValueType.class).valueType();
AExpression userIndexNode = userBraceNode.getIndexNode();
Class<?> valueType;
if (prefixValueType.isArray()) {
semanticScope.setCondition(userIndexNode, Read.class);
semanticScope.putDecoration(userIndexNode, new TargetType(int.class));
checkedVisit(userIndexNode, semanticScope);
decorateWithCast(userIndexNode, semanticScope);
valueType = prefixValueType.getComponentType();
} else if (prefixValueType == def.class) {
semanticScope.setCondition(userIndexNode, Read.class);
checkedVisit(userIndexNode, semanticScope);
TargetType targetType = semanticScope.getDecoration(userBraceNode, TargetType.class);
// TODO: remove ZonedDateTime exception when JodaCompatibleDateTime is removed
valueType = targetType == null
|| targetType.targetType() == ZonedDateTime.class
|| semanticScope.getCondition(userBraceNode, Explicit.class) ? def.class : targetType.targetType();
if (write) {
semanticScope.setCondition(userBraceNode, DefOptimized.class);
}
} else if (Map.class.isAssignableFrom(prefixValueType)) {
String canonicalClassName = PainlessLookupUtility.typeToCanonicalTypeName(prefixValueType);
PainlessMethod getter = semanticScope.getScriptScope()
.getPainlessLookup()
.lookupPainlessMethod(prefixValueType, false, "get", 1);
PainlessMethod setter = semanticScope.getScriptScope()
.getPainlessLookup()
.lookupPainlessMethod(prefixValueType, false, "put", 2);
if (getter != null && (getter.returnType() == void.class || getter.typeParameters().size() != 1)) {
throw userBraceNode.createError(
new IllegalArgumentException("Illegal map get shortcut for type [" + canonicalClassName + "].")
);
}
if (setter != null && setter.typeParameters().size() != 2) {
throw userBraceNode.createError(
new IllegalArgumentException("Illegal map set shortcut for type [" + canonicalClassName + "].")
);
}
if (getter != null
&& setter != null
&& (getter.typeParameters().get(0).equals(setter.typeParameters().get(0)) == false
|| getter.returnType().equals(setter.typeParameters().get(1)) == false)) {
throw userBraceNode.createError(new IllegalArgumentException("Shortcut argument types must match."));
}
if ((read == false || getter != null) && (write == false || setter != null)) {
semanticScope.setCondition(userIndexNode, Read.class);
semanticScope.putDecoration(
userIndexNode,
new TargetType(setter != null ? setter.typeParameters().get(0) : getter.typeParameters().get(0))
);
checkedVisit(userIndexNode, semanticScope);
decorateWithCast(userIndexNode, semanticScope);
valueType = setter != null ? setter.typeParameters().get(1) : getter.returnType();
if (getter != null) {
semanticScope.putDecoration(userBraceNode, new GetterPainlessMethod(getter));
}
if (setter != null) {
semanticScope.putDecoration(userBraceNode, new SetterPainlessMethod(setter));
}
} else {
throw userBraceNode.createError(
new IllegalArgumentException("Illegal map shortcut for type [" + canonicalClassName + "].")
);
}
semanticScope.setCondition(userBraceNode, MapShortcut.class);
} else if (List.class.isAssignableFrom(prefixValueType)) {
String canonicalClassName = PainlessLookupUtility.typeToCanonicalTypeName(prefixValueType);
PainlessMethod getter = semanticScope.getScriptScope()
.getPainlessLookup()
.lookupPainlessMethod(prefixValueType, false, "get", 1);
PainlessMethod setter = semanticScope.getScriptScope()
.getPainlessLookup()
.lookupPainlessMethod(prefixValueType, false, "set", 2);
if (getter != null
&& (getter.returnType() == void.class
|| getter.typeParameters().size() != 1
|| getter.typeParameters().get(0) != int.class)) {
throw userBraceNode.createError(
new IllegalArgumentException("Illegal list get shortcut for type [" + canonicalClassName + "].")
);
}
if (setter != null && (setter.typeParameters().size() != 2 || setter.typeParameters().get(0) != int.class)) {
throw userBraceNode.createError(
new IllegalArgumentException("Illegal list set shortcut for type [" + canonicalClassName + "].")
);
}
if (getter != null
&& setter != null
&& (getter.typeParameters().get(0).equals(setter.typeParameters().get(0)) == false
|| getter.returnType().equals(setter.typeParameters().get(1)) == false)) {
throw userBraceNode.createError(new IllegalArgumentException("Shortcut argument types must match."));
}
if ((read == false || getter != null) && (write == false || setter != null)) {
semanticScope.setCondition(userIndexNode, Read.class);
semanticScope.putDecoration(userIndexNode, new TargetType(int.class));
checkedVisit(userIndexNode, semanticScope);
decorateWithCast(userIndexNode, semanticScope);
valueType = setter != null ? setter.typeParameters().get(1) : getter.returnType();
if (getter != null) {
semanticScope.putDecoration(userBraceNode, new GetterPainlessMethod(getter));
}
if (setter != null) {
semanticScope.putDecoration(userBraceNode, new SetterPainlessMethod(setter));
}
} else {
throw userBraceNode.createError(
new IllegalArgumentException("Illegal list shortcut for type [" + canonicalClassName + "].")
);
}
semanticScope.setCondition(userBraceNode, ListShortcut.class);
} else {
throw userBraceNode.createError(
new IllegalArgumentException(
"Illegal array access on type " + "[" + PainlessLookupUtility.typeToCanonicalTypeName(prefixValueType) + "]."
)
);
}
semanticScope.putDecoration(userBraceNode, new ValueType(valueType));
}
/**
* Visits a call expression which is a method call with a qualifier (prefix).
* Checks: type validation, method resolution
*/
@Override
public void visitCall(ECall userCallNode, SemanticScope semanticScope) {
String methodName = userCallNode.getMethodName();
List<AExpression> userArgumentNodes = userCallNode.getArgumentNodes();
int userArgumentsSize = userArgumentNodes.size();
if (semanticScope.getCondition(userCallNode, Write.class)) {
throw userCallNode.createError(
new IllegalArgumentException(
"invalid assignment: cannot assign a value to method call [" + methodName + "/" + userArgumentsSize + "]"
)
);
}
AExpression userPrefixNode = userCallNode.getPrefixNode();
semanticScope.setCondition(userPrefixNode, Read.class);
visit(userPrefixNode, semanticScope);
ValueType prefixValueType = semanticScope.getDecoration(userPrefixNode, ValueType.class);
StaticType prefixStaticType = semanticScope.getDecoration(userPrefixNode, StaticType.class);
if (prefixValueType != null && prefixStaticType != null) {
throw userCallNode.createError(
new IllegalStateException(
Strings.format(
"cannot have both value [%s] and type [%s]",
prefixValueType.getValueCanonicalTypeName(),
prefixStaticType.getStaticCanonicalTypeName()
)
)
);
}
if (semanticScope.hasDecoration(userPrefixNode, PartialCanonicalTypeName.class)) {
throw userCallNode.createError(
new IllegalArgumentException(
"cannot resolve symbol "
+ "["
+ semanticScope.getDecoration(userPrefixNode, PartialCanonicalTypeName.class).partialCanonicalTypeName()
+ "]"
)
);
}
boolean dynamic = false;
PainlessMethod method = null;
if (prefixValueType != null) {
Class<?> type = prefixValueType.valueType();
PainlessLookup lookup = semanticScope.getScriptScope().getPainlessLookup();
if (prefixValueType.valueType() == def.class) {
dynamic = true;
} else {
method = lookup.lookupPainlessMethod(type, false, methodName, userArgumentsSize);
if (method == null) {
PainlessClass pc = lookup.lookupPainlessClass(type);
dynamic = pc != null
&& pc.annotations.containsKey(DynamicTypeAnnotation.class)
&& lookup.lookupPainlessSubClassesMethod(type, methodName, userArgumentsSize) != null;
if (dynamic == false) {
throw userCallNode.createError(
new IllegalArgumentException(
Strings.format(
"member method [%s, %s/%d] not found",
prefixValueType.getValueCanonicalTypeName(),
methodName,
userArgumentsSize
)
)
);
}
}
}
} else if (prefixStaticType != null) {
method = semanticScope.getScriptScope()
.getPainlessLookup()
.lookupPainlessMethod(prefixStaticType.staticType(), true, methodName, userArgumentsSize);
if (method == null) {
throw userCallNode.createError(
new IllegalArgumentException(
Strings.format(
"static method [%s, %s/%d] not found",
prefixStaticType.getStaticCanonicalTypeName(),
methodName,
userArgumentsSize
)
)
);
}
} else {
throw userCallNode.createError(new IllegalStateException("value required: instead found no value"));
}
Class<?> valueType;
if (dynamic) {
for (AExpression userArgumentNode : userArgumentNodes) {
semanticScope.setCondition(userArgumentNode, Read.class);
semanticScope.setCondition(userArgumentNode, Internal.class);
checkedVisit(userArgumentNode, semanticScope);
Class<?> argumentValueType = semanticScope.getDecoration(userArgumentNode, ValueType.class).valueType();
if (argumentValueType == void.class) {
throw userCallNode.createError(
new IllegalArgumentException("Argument(s) cannot be of [void] type when calling method [" + methodName + "].")
);
}
}
TargetType targetType = userCallNode.isNullSafe() ? null : semanticScope.getDecoration(userCallNode, TargetType.class);
valueType = targetType == null || semanticScope.getCondition(userCallNode, Explicit.class)
? def.class
: targetType.targetType();
semanticScope.setCondition(userCallNode, DynamicInvocation.class);
} else {
Objects.requireNonNull(method);
semanticScope.getScriptScope().markNonDeterministic(method.annotations().containsKey(NonDeterministicAnnotation.class));
for (int argument = 0; argument < userArgumentsSize; ++argument) {
AExpression userArgumentNode = userArgumentNodes.get(argument);
semanticScope.setCondition(userArgumentNode, Read.class);
semanticScope.putDecoration(userArgumentNode, new TargetType(method.typeParameters().get(argument)));
semanticScope.setCondition(userArgumentNode, Internal.class);
checkedVisit(userArgumentNode, semanticScope);
decorateWithCast(userArgumentNode, semanticScope);
}
semanticScope.putDecoration(userCallNode, new StandardPainlessMethod(method));
valueType = method.returnType();
}
if (userCallNode.isNullSafe() && valueType.isPrimitive()) {
throw new IllegalArgumentException("Result of null safe operator must be nullable");
}
semanticScope.putDecoration(userCallNode, new ValueType(valueType));
}
}
| function |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/hql/InferenceTest.java | {
"start": 1041,
"end": 2865
} | class ____ {
@BeforeEach
public void createTestData(SessionFactoryScope factoryScope) {
factoryScope.inTransaction( session -> {
var person = new Person( 1, "Johannes", "Buehler" );
session.persist( person );
} );
}
@AfterEach
public void dropTestData(SessionFactoryScope factoryScope) {
factoryScope.dropData();
}
@Test
public void testBinaryArithmeticInference(SessionFactoryScope factoryScope) {
factoryScope.inTransaction( session -> {
List<Person> resultList = session.createQuery( "from Person p where p.id + 1 < :param", Person.class )
.setParameter("param", 10)
.getResultList();
assertThat( resultList ).map( Person::getId ).contains( 1 );
} );
}
@Test
@JiraKey("HHH-17386")
public void testInferenceSourceResetForOnClause(SessionFactoryScope factoryScope) {
factoryScope.inTransaction( session -> {
var hql = """
from Person p
where p in (
select p2
from Person p2
join Person p3
on exists (select 1 from Person p4)
)
""";
session.createQuery( hql, Person.class ).getResultList();
} );
}
@Test
@JiraKey("HHH-18046")
@SkipForDialect( dialectClass = CockroachDialect.class, reason = "CockroachDB doesn't support multiplication between int and float columns" )
public void testBinaryArithmeticParameterInference(SessionFactoryScope factoryScope) {
factoryScope.inTransaction( session -> {
HibernateCriteriaBuilder cb = session.getCriteriaBuilder();
JpaCriteriaQuery<Double> cq = cb.createQuery( Double.class );
JpaRoot<Person> root = cq.from( Person.class );
cq.select( cb.toDouble( cb.prod( root.get( "id" ), 0.5f ) ) );
Double result = session.createQuery( cq ).getSingleResult();
assertThat( result ).isEqualTo( 0.5d );
} );
}
@Entity(name = "Person")
public static | InferenceTest |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/bugs/_3413/Erroneous3413Mapper.java | {
"start": 338,
"end": 568
} | interface ____ {
Erroneous3413Mapper INSTANCE = Mappers.getMapper( Erroneous3413Mapper.class );
@Mapping(target = "", expression = "", conditionQualifiedByName = "")
ToPOJO map(FromPOJO fromPOJO);
| Erroneous3413Mapper |
java | apache__camel | components/camel-mllp/src/generated/java/org/apache/camel/component/mllp/MllpEndpointConfigurer.java | {
"start": 731,
"end": 12344
} | class ____ extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
MllpEndpoint target = (MllpEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "accepttimeout":
case "acceptTimeout": target.getConfiguration().setAcceptTimeout(property(camelContext, int.class, value)); return true;
case "autoack":
case "autoAck": target.getConfiguration().setAutoAck(property(camelContext, boolean.class, value)); return true;
case "backlog": target.getConfiguration().setBacklog(property(camelContext, java.lang.Integer.class, value)); return true;
case "bindretryinterval":
case "bindRetryInterval": target.getConfiguration().setBindRetryInterval(property(camelContext, int.class, value)); return true;
case "bindtimeout":
case "bindTimeout": target.getConfiguration().setBindTimeout(property(camelContext, int.class, value)); return true;
case "bridgeerrorhandler":
case "bridgeErrorHandler": target.getConfiguration().setBridgeErrorHandler(property(camelContext, boolean.class, value)); return true;
case "charsetname":
case "charsetName": target.getConfiguration().setCharsetName(property(camelContext, java.lang.String.class, value)); return true;
case "connecttimeout":
case "connectTimeout": target.getConfiguration().setConnectTimeout(property(camelContext, int.class, value)); return true;
case "exceptionhandler":
case "exceptionHandler": target.setExceptionHandler(property(camelContext, org.apache.camel.spi.ExceptionHandler.class, value)); return true;
case "exchangepattern":
case "exchangePattern": target.getConfiguration().setExchangePattern(property(camelContext, org.apache.camel.ExchangePattern.class, value)); return true;
case "hl7headers":
case "hl7Headers": target.getConfiguration().setHl7Headers(property(camelContext, boolean.class, value)); return true;
case "idletimeout":
case "idleTimeout": target.getConfiguration().setIdleTimeout(property(camelContext, java.lang.Integer.class, value)); return true;
case "idletimeoutstrategy":
case "idleTimeoutStrategy": target.getConfiguration().setIdleTimeoutStrategy(property(camelContext, org.apache.camel.component.mllp.MllpIdleTimeoutStrategy.class, value)); return true;
case "keepalive":
case "keepAlive": target.getConfiguration().setKeepAlive(property(camelContext, java.lang.Boolean.class, value)); return true;
case "lazystartproducer":
case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true;
case "lenientbind":
case "lenientBind": target.getConfiguration().setLenientBind(property(camelContext, boolean.class, value)); return true;
case "maxbuffersize":
case "maxBufferSize": target.getConfiguration().setMaxBufferSize(property(camelContext, int.class, value)); return true;
case "maxconcurrentconsumers":
case "maxConcurrentConsumers": target.getConfiguration().setMaxConcurrentConsumers(property(camelContext, int.class, value)); return true;
case "minbuffersize":
case "minBufferSize": target.getConfiguration().setMinBufferSize(property(camelContext, int.class, value)); return true;
case "readtimeout":
case "readTimeout": target.getConfiguration().setReadTimeout(property(camelContext, int.class, value)); return true;
case "receivebuffersize":
case "receiveBufferSize": target.getConfiguration().setReceiveBufferSize(property(camelContext, java.lang.Integer.class, value)); return true;
case "receivetimeout":
case "receiveTimeout": target.getConfiguration().setReceiveTimeout(property(camelContext, int.class, value)); return true;
case "requireendofdata":
case "requireEndOfData": target.getConfiguration().setRequireEndOfData(property(camelContext, boolean.class, value)); return true;
case "reuseaddress":
case "reuseAddress": target.getConfiguration().setReuseAddress(property(camelContext, java.lang.Boolean.class, value)); return true;
case "sendbuffersize":
case "sendBufferSize": target.getConfiguration().setSendBufferSize(property(camelContext, java.lang.Integer.class, value)); return true;
case "sslcontextparameters":
case "sslContextParameters": target.getConfiguration().setSslContextParameters(property(camelContext, org.apache.camel.support.jsse.SSLContextParameters.class, value)); return true;
case "stringpayload":
case "stringPayload": target.getConfiguration().setStringPayload(property(camelContext, boolean.class, value)); return true;
case "tcpnodelay":
case "tcpNoDelay": target.getConfiguration().setTcpNoDelay(property(camelContext, java.lang.Boolean.class, value)); return true;
case "validatepayload":
case "validatePayload": target.getConfiguration().setValidatePayload(property(camelContext, boolean.class, value)); return true;
default: return false;
}
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "accepttimeout":
case "acceptTimeout": return int.class;
case "autoack":
case "autoAck": return boolean.class;
case "backlog": return java.lang.Integer.class;
case "bindretryinterval":
case "bindRetryInterval": return int.class;
case "bindtimeout":
case "bindTimeout": return int.class;
case "bridgeerrorhandler":
case "bridgeErrorHandler": return boolean.class;
case "charsetname":
case "charsetName": return java.lang.String.class;
case "connecttimeout":
case "connectTimeout": return int.class;
case "exceptionhandler":
case "exceptionHandler": return org.apache.camel.spi.ExceptionHandler.class;
case "exchangepattern":
case "exchangePattern": return org.apache.camel.ExchangePattern.class;
case "hl7headers":
case "hl7Headers": return boolean.class;
case "idletimeout":
case "idleTimeout": return java.lang.Integer.class;
case "idletimeoutstrategy":
case "idleTimeoutStrategy": return org.apache.camel.component.mllp.MllpIdleTimeoutStrategy.class;
case "keepalive":
case "keepAlive": return java.lang.Boolean.class;
case "lazystartproducer":
case "lazyStartProducer": return boolean.class;
case "lenientbind":
case "lenientBind": return boolean.class;
case "maxbuffersize":
case "maxBufferSize": return int.class;
case "maxconcurrentconsumers":
case "maxConcurrentConsumers": return int.class;
case "minbuffersize":
case "minBufferSize": return int.class;
case "readtimeout":
case "readTimeout": return int.class;
case "receivebuffersize":
case "receiveBufferSize": return java.lang.Integer.class;
case "receivetimeout":
case "receiveTimeout": return int.class;
case "requireendofdata":
case "requireEndOfData": return boolean.class;
case "reuseaddress":
case "reuseAddress": return java.lang.Boolean.class;
case "sendbuffersize":
case "sendBufferSize": return java.lang.Integer.class;
case "sslcontextparameters":
case "sslContextParameters": return org.apache.camel.support.jsse.SSLContextParameters.class;
case "stringpayload":
case "stringPayload": return boolean.class;
case "tcpnodelay":
case "tcpNoDelay": return java.lang.Boolean.class;
case "validatepayload":
case "validatePayload": return boolean.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
MllpEndpoint target = (MllpEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "accepttimeout":
case "acceptTimeout": return target.getConfiguration().getAcceptTimeout();
case "autoack":
case "autoAck": return target.getConfiguration().isAutoAck();
case "backlog": return target.getConfiguration().getBacklog();
case "bindretryinterval":
case "bindRetryInterval": return target.getConfiguration().getBindRetryInterval();
case "bindtimeout":
case "bindTimeout": return target.getConfiguration().getBindTimeout();
case "bridgeerrorhandler":
case "bridgeErrorHandler": return target.getConfiguration().isBridgeErrorHandler();
case "charsetname":
case "charsetName": return target.getConfiguration().getCharsetName();
case "connecttimeout":
case "connectTimeout": return target.getConfiguration().getConnectTimeout();
case "exceptionhandler":
case "exceptionHandler": return target.getExceptionHandler();
case "exchangepattern":
case "exchangePattern": return target.getConfiguration().getExchangePattern();
case "hl7headers":
case "hl7Headers": return target.getConfiguration().isHl7Headers();
case "idletimeout":
case "idleTimeout": return target.getConfiguration().getIdleTimeout();
case "idletimeoutstrategy":
case "idleTimeoutStrategy": return target.getConfiguration().getIdleTimeoutStrategy();
case "keepalive":
case "keepAlive": return target.getConfiguration().getKeepAlive();
case "lazystartproducer":
case "lazyStartProducer": return target.isLazyStartProducer();
case "lenientbind":
case "lenientBind": return target.getConfiguration().isLenientBind();
case "maxbuffersize":
case "maxBufferSize": return target.getConfiguration().getMaxBufferSize();
case "maxconcurrentconsumers":
case "maxConcurrentConsumers": return target.getConfiguration().getMaxConcurrentConsumers();
case "minbuffersize":
case "minBufferSize": return target.getConfiguration().getMinBufferSize();
case "readtimeout":
case "readTimeout": return target.getConfiguration().getReadTimeout();
case "receivebuffersize":
case "receiveBufferSize": return target.getConfiguration().getReceiveBufferSize();
case "receivetimeout":
case "receiveTimeout": return target.getConfiguration().getReceiveTimeout();
case "requireendofdata":
case "requireEndOfData": return target.getConfiguration().isRequireEndOfData();
case "reuseaddress":
case "reuseAddress": return target.getConfiguration().getReuseAddress();
case "sendbuffersize":
case "sendBufferSize": return target.getConfiguration().getSendBufferSize();
case "sslcontextparameters":
case "sslContextParameters": return target.getConfiguration().getSslContextParameters();
case "stringpayload":
case "stringPayload": return target.getConfiguration().isStringPayload();
case "tcpnodelay":
case "tcpNoDelay": return target.getConfiguration().getTcpNoDelay();
case "validatepayload":
case "validatePayload": return target.getConfiguration().isValidatePayload();
default: return null;
}
}
}
| MllpEndpointConfigurer |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/SecretsManagerEndpointBuilderFactory.java | {
"start": 21018,
"end": 23190
} | interface ____ {
/**
* AWS Secrets Manager (camel-aws-secrets-manager)
* Manage secrets using AWS Secrets Manager.
*
* Category: cloud,management
* Since: 3.9
* Maven coordinates: org.apache.camel:camel-aws-secrets-manager
*
* @return the dsl builder for the headers' name.
*/
default SecretsManagerHeaderNameBuilder awsSecretsManager() {
return SecretsManagerHeaderNameBuilder.INSTANCE;
}
/**
* AWS Secrets Manager (camel-aws-secrets-manager)
* Manage secrets using AWS Secrets Manager.
*
* Category: cloud,management
* Since: 3.9
* Maven coordinates: org.apache.camel:camel-aws-secrets-manager
*
* Syntax: <code>aws-secrets-manager:label</code>
*
* Path parameter: label (required)
* Logical name
*
* @param path label
* @return the dsl builder
*/
default SecretsManagerEndpointBuilder awsSecretsManager(String path) {
return SecretsManagerEndpointBuilderFactory.endpointBuilder("aws-secrets-manager", path);
}
/**
* AWS Secrets Manager (camel-aws-secrets-manager)
* Manage secrets using AWS Secrets Manager.
*
* Category: cloud,management
* Since: 3.9
* Maven coordinates: org.apache.camel:camel-aws-secrets-manager
*
* Syntax: <code>aws-secrets-manager:label</code>
*
* Path parameter: label (required)
* Logical name
*
* @param componentName to use a custom component name for the endpoint
* instead of the default name
* @param path label
* @return the dsl builder
*/
default SecretsManagerEndpointBuilder awsSecretsManager(String componentName, String path) {
return SecretsManagerEndpointBuilderFactory.endpointBuilder(componentName, path);
}
}
/**
* The builder of headers' name for the AWS Secrets Manager component.
*/
public static | SecretsManagerBuilders |
java | resilience4j__resilience4j | resilience4j-circuitbreaker/src/main/java/io/github/resilience4j/circuitbreaker/event/CircuitBreakerOnFailureRateExceededEvent.java | {
"start": 61,
"end": 826
} | class ____ extends AbstractCircuitBreakerEvent {
private final float failureRate;
public CircuitBreakerOnFailureRateExceededEvent(String circuitBreakerName, float failureRate) {
super(circuitBreakerName);
this.failureRate = failureRate;
}
public float getFailureRate() {
return failureRate;
}
@Override
public Type getEventType() {
return Type.FAILURE_RATE_EXCEEDED;
}
@Override
public String toString() {
return String
.format("%s: CircuitBreaker '%s' exceeded failure rate threshold. Current failure rate: %s",
getCreationTime(),
getCircuitBreakerName(),
getFailureRate());
}
}
| CircuitBreakerOnFailureRateExceededEvent |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/CxfRsEndpointBuilderFactory.java | {
"start": 3106,
"end": 13968
} | class ____ be omitted, to emulate document-only endpoints.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param modelRef the value to set
* @return the dsl builder
*/
default CxfRsEndpointConsumerBuilder modelRef(String modelRef) {
doSetProperty("modelRef", modelRef);
return this;
}
/**
* Set custom JAX-RS provider(s) list to the CxfRs endpoint. You can
* specify a string with a list of providers to lookup in the registy
* separated by comma.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param providers the value to set
* @return the dsl builder
*/
default CxfRsEndpointConsumerBuilder providers(String providers) {
doSetProperty("providers", providers);
return this;
}
/**
* The resource classes which you want to export as REST service.
* Multiple classes can be separated by comma.
*
* The option is a:
* <code>java.util.List<java.lang.Class<java.lang.Object>></code> type.
*
* Group: common
*
* @param resourceClasses the value to set
* @return the dsl builder
*/
default CxfRsEndpointConsumerBuilder resourceClasses(List<java.lang.Class<java.lang.Object>> resourceClasses) {
doSetProperty("resourceClasses", resourceClasses);
return this;
}
/**
* The resource classes which you want to export as REST service.
* Multiple classes can be separated by comma.
*
* The option will be converted to a
* <code>java.util.List<java.lang.Class<java.lang.Object>></code> type.
*
* Group: common
*
* @param resourceClasses the value to set
* @return the dsl builder
*/
default CxfRsEndpointConsumerBuilder resourceClasses(String resourceClasses) {
doSetProperty("resourceClasses", resourceClasses);
return this;
}
/**
* Sets the locations of the schema(s) which can be used to validate the
* incoming XML or JAXB-driven JSON.
*
* The option is a: <code>java.util.List<java.lang.String></code>
* type.
*
* Group: common
*
* @param schemaLocations the value to set
* @return the dsl builder
*/
default CxfRsEndpointConsumerBuilder schemaLocations(List<java.lang.String> schemaLocations) {
doSetProperty("schemaLocations", schemaLocations);
return this;
}
/**
* Sets the locations of the schema(s) which can be used to validate the
* incoming XML or JAXB-driven JSON.
*
* The option will be converted to a
* <code>java.util.List<java.lang.String></code> type.
*
* Group: common
*
* @param schemaLocations the value to set
* @return the dsl builder
*/
default CxfRsEndpointConsumerBuilder schemaLocations(String schemaLocations) {
doSetProperty("schemaLocations", schemaLocations);
return this;
}
/**
* This option controls whether the PhaseInterceptorChain skips logging
* the Fault that it catches.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param skipFaultLogging the value to set
* @return the dsl builder
*/
default CxfRsEndpointConsumerBuilder skipFaultLogging(boolean skipFaultLogging) {
doSetProperty("skipFaultLogging", skipFaultLogging);
return this;
}
/**
* This option controls whether the PhaseInterceptorChain skips logging
* the Fault that it catches.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param skipFaultLogging the value to set
* @return the dsl builder
*/
default CxfRsEndpointConsumerBuilder skipFaultLogging(String skipFaultLogging) {
doSetProperty("skipFaultLogging", skipFaultLogging);
return this;
}
/**
* Sets how requests and responses will be mapped to/from Camel. Two
* values are possible: SimpleConsumer: This binding style processes
* request parameters, multiparts, etc. and maps them to IN headers, IN
* attachments and to the message body. It aims to eliminate low-level
* processing of org.apache.cxf.message.MessageContentsList. It also
* also adds more flexibility and simplicity to the response mapping.
* Only available for consumers. Default: The default style. For
* consumers this passes on a MessageContentsList to the route,
* requiring low-level processing in the route. This is the traditional
* binding style, which simply dumps the
* org.apache.cxf.message.MessageContentsList coming in from the CXF
* stack onto the IN message body. The user is then responsible for
* processing it according to the contract defined by the JAX-RS method
* signature. Custom: allows you to specify a custom binding through the
* binding option.
*
* The option is a:
* <code>org.apache.camel.component.cxf.jaxrs.BindingStyle</code> type.
*
* Default: Default
* Group: consumer
*
* @param bindingStyle the value to set
* @return the dsl builder
*/
default CxfRsEndpointConsumerBuilder bindingStyle(org.apache.camel.component.cxf.jaxrs.BindingStyle bindingStyle) {
doSetProperty("bindingStyle", bindingStyle);
return this;
}
/**
* Sets how requests and responses will be mapped to/from Camel. Two
* values are possible: SimpleConsumer: This binding style processes
* request parameters, multiparts, etc. and maps them to IN headers, IN
* attachments and to the message body. It aims to eliminate low-level
* processing of org.apache.cxf.message.MessageContentsList. It also
* also adds more flexibility and simplicity to the response mapping.
* Only available for consumers. Default: The default style. For
* consumers this passes on a MessageContentsList to the route,
* requiring low-level processing in the route. This is the traditional
* binding style, which simply dumps the
* org.apache.cxf.message.MessageContentsList coming in from the CXF
* stack onto the IN message body. The user is then responsible for
* processing it according to the contract defined by the JAX-RS method
* signature. Custom: allows you to specify a custom binding through the
* binding option.
*
* The option will be converted to a
* <code>org.apache.camel.component.cxf.jaxrs.BindingStyle</code> type.
*
* Default: Default
* Group: consumer
*
* @param bindingStyle the value to set
* @return the dsl builder
*/
default CxfRsEndpointConsumerBuilder bindingStyle(String bindingStyle) {
doSetProperty("bindingStyle", bindingStyle);
return this;
}
/**
* This option can override the endpointUrl that published from the WADL
* which can be accessed with resource address url plus _wadl.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*
* @param publishedEndpointUrl the value to set
* @return the dsl builder
*/
default CxfRsEndpointConsumerBuilder publishedEndpointUrl(String publishedEndpointUrl) {
doSetProperty("publishedEndpointUrl", publishedEndpointUrl);
return this;
}
/**
* This option enables CXF Logging Feature which writes inbound and
* outbound REST messages to log.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: logging
*
* @param loggingFeatureEnabled the value to set
* @return the dsl builder
*/
default CxfRsEndpointConsumerBuilder loggingFeatureEnabled(boolean loggingFeatureEnabled) {
doSetProperty("loggingFeatureEnabled", loggingFeatureEnabled);
return this;
}
/**
* This option enables CXF Logging Feature which writes inbound and
* outbound REST messages to log.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: logging
*
* @param loggingFeatureEnabled the value to set
* @return the dsl builder
*/
default CxfRsEndpointConsumerBuilder loggingFeatureEnabled(String loggingFeatureEnabled) {
doSetProperty("loggingFeatureEnabled", loggingFeatureEnabled);
return this;
}
/**
* To limit the total size of number of bytes the logger will output
* when logging feature has been enabled and -1 for no limit.
*
* The option is a: <code>int</code> type.
*
* Default: 49152
* Group: logging
*
* @param loggingSizeLimit the value to set
* @return the dsl builder
*/
default CxfRsEndpointConsumerBuilder loggingSizeLimit(int loggingSizeLimit) {
doSetProperty("loggingSizeLimit", loggingSizeLimit);
return this;
}
/**
* To limit the total size of number of bytes the logger will output
* when logging feature has been enabled and -1 for no limit.
*
* The option will be converted to a <code>int</code> type.
*
* Default: 49152
* Group: logging
*
* @param loggingSizeLimit the value to set
* @return the dsl builder
*/
default CxfRsEndpointConsumerBuilder loggingSizeLimit(String loggingSizeLimit) {
doSetProperty("loggingSizeLimit", loggingSizeLimit);
return this;
}
}
/**
* Advanced builder for endpoint consumers for the CXF-RS component.
*/
public | can |
java | apache__hadoop | hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/adapter/TestV1CredentialsProvider.java | {
"start": 6113,
"end": 6221
} | class ____ %s, but found %s.",
i, expectedClass, provider.getClass()));
}
}
public static | is |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/bytecode/enhance/internal/tracker/SimpleFieldTracker.java | {
"start": 594,
"end": 1469
} | class ____ implements DirtyTracker {
private String[] names;
private boolean suspended;
public SimpleFieldTracker() {
names = ArrayHelper.EMPTY_STRING_ARRAY;
}
@Override
public void add(String name) {
if ( suspended ) {
return;
}
if ( !contains( name ) ) {
names = Arrays.copyOf( names, names.length + 1 );
names[names.length - 1] = name;
}
}
@Override
public boolean contains(String name) {
for ( String existing : names ) {
if ( existing.equals( name ) ) {
return true;
}
}
return false;
}
@Override
public void clear() {
if ( !isEmpty() ) {
names = ArrayHelper.EMPTY_STRING_ARRAY;
}
}
@Override
public boolean isEmpty() {
return names.length == 0;
}
@Override
public String[] get() {
return names;
}
@Override
public void suspend(boolean suspend) {
this.suspended = suspend;
}
}
| SimpleFieldTracker |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/models/annotations/internal/CollateAnnotation.java | {
"start": 465,
"end": 1293
} | class ____ implements Collate {
private String value;
/**
* Used in creating dynamic annotation instances (e.g. from XML)
*/
public CollateAnnotation(ModelsContext modelContext) {
}
/**
* Used in creating annotation instances from JDK variant
*/
public CollateAnnotation(Collate annotation, ModelsContext modelContext) {
this.value = annotation.value();
}
/**
* Used in creating annotation instances from Jandex variant
*/
public CollateAnnotation(Map<String, Object> attributeValues, ModelsContext modelContext) {
this.value = (String) attributeValues.get( "value" );
}
@Override
public Class<? extends Annotation> annotationType() {
return Collate.class;
}
@Override
public String value() {
return value;
}
public void value(String value) {
this.value = value;
}
}
| CollateAnnotation |
java | apache__camel | components/camel-ai/camel-neo4j/src/main/java/org/apache/camel/component/neo4j/Neo4jHeaders.java | {
"start": 892,
"end": 3336
} | class ____ {
@Metadata(description = "The operation to be performed.", javaType = "String",
enums = "CREATE_NODE,DELETE_NODE,RETRIEVE_NODES,RETRIEVE_NODES_AND_UPDATE_WITH_CYPHER_QUERY,ADD_OR_DELETE_NODE_WITH_CYPHER_QUERY,CREATE_VECTOR_INDEX,DROP_VECTOR_INDEX,CREATE_VECTOR,VECTOR_SIMILARITY_SEARCH")
public static final String OPERATION = "CamelNeo4jOperation";
@Metadata(description = "MATCH properties for the generated MATCH query. Needed only if we are matching properties and values. Example: {name: 'Alice'} ",
javaType = "String")
public static final String MATCH_PROPERTIES = "CamelNeo4jMatchProperties";
@Metadata(description = "Query Result", javaType = "String")
public static final String QUERY_RESULT = "CamelNeo4jQueryResult";
@Metadata(description = "Query Number of nodes created", javaType = "Long")
public static final String QUERY_RESULT_NODES_CREATED = "CamelNeo4jQueryResultNodesCreated";
@Metadata(description = "Query Number of nodes deleted", javaType = "Long")
public static final String QUERY_RESULT_NODES_DELETED = "CamelNeo4jQueryResultNodesDeleted";
@Metadata(description = "Query executed contains update", javaType = "Boolean")
public static final String QUERY_RESULT_CONTAINS_UPDATES = "CamelNeo4jQueryResultContainsUpdates";
@Metadata(description = "Query executed number of relationships created", javaType = "Long")
public static final String QUERY_RESULT_RELATIONSHIPS_CREATED = "CamelNeo4jQueryResultRelationshipsCreated";
@Metadata(description = "Query executed number of relationships deleted", javaType = "Long")
public static final String QUERY_RESULT_RELATIONSHIPS_DELETED = "CamelNeo4jQueryResultRelationshipsDeleted";
@Metadata(description = "Number of nodes retrieved", javaType = "Long")
public static final String QUERY_RETRIEVE_SIZE = "CamelNeo4jQueryResultRetrieveSize";
@Metadata(description = "Query execution time in Milliseconds", javaType = "Long")
public static final String QUERY_RETRIEVE_LIST_NEO4J_NODES = "CamelNeo4jQueryResultListNeo4jNodes";
@Metadata(description = "Vector Id for the embedding", javaType = "String")
public static final String VECTOR_ID = "CamelNeo4jVectorEmbeddingId";
@Metadata(description = "Label for the Node - used when inserting from Embeddings", javaType = "String")
public static final String LABEL = "CamelNeo4jLabel";
}
| Neo4jHeaders |
java | grpc__grpc-java | cronet/src/main/java/io/grpc/cronet/CronetClientTransport.java | {
"start": 1726,
"end": 5053
} | class ____ implements ConnectionClientTransport {
private final InternalLogId logId;
private final InetSocketAddress address;
private final String authority;
private final String userAgent;
private Listener listener;
private final Object lock = new Object();
@GuardedBy("lock")
private final Set<CronetClientStream> streams = Collections.newSetFromMap(
new IdentityHashMap<>());
private final Executor executor;
private final int maxMessageSize;
private final boolean alwaysUsePut;
private final TransportTracer transportTracer;
private Attributes attrs;
private final boolean useGetForSafeMethods;
private final boolean usePutForIdempotentMethods;
private final StreamBuilderFactory streamFactory;
// Indicates the transport is in go-away state: no new streams will be processed,
// but existing streams may continue.
@GuardedBy("lock")
private boolean goAway;
// Used to indicate the special phase while we are going to enter go-away state but before
// goAway is turned to true, see the comment at where this is set about why it is needed.
@GuardedBy("lock")
private boolean startedGoAway;
@GuardedBy("lock")
private Status goAwayStatus;
@GuardedBy("lock")
private boolean stopped;
@GuardedBy("lock")
// Whether this transport has started.
private boolean started;
CronetClientTransport(
StreamBuilderFactory streamFactory,
InetSocketAddress address,
String authority,
@Nullable String userAgent,
Attributes eagAttrs,
Executor executor,
int maxMessageSize,
boolean alwaysUsePut,
TransportTracer transportTracer,
boolean useGetForSafeMethods,
boolean usePutForIdempotentMethods) {
this.address = Preconditions.checkNotNull(address, "address");
this.logId = InternalLogId.allocate(getClass(), address.toString());
this.authority = authority;
this.userAgent = GrpcUtil.getGrpcUserAgent("cronet", userAgent);
this.maxMessageSize = maxMessageSize;
this.alwaysUsePut = alwaysUsePut;
this.executor = Preconditions.checkNotNull(executor, "executor");
this.streamFactory = Preconditions.checkNotNull(streamFactory, "streamFactory");
this.transportTracer = Preconditions.checkNotNull(transportTracer, "transportTracer");
this.attrs = Attributes.newBuilder()
.set(GrpcAttributes.ATTR_SECURITY_LEVEL, SecurityLevel.PRIVACY_AND_INTEGRITY)
.set(GrpcAttributes.ATTR_CLIENT_EAG_ATTRS, eagAttrs)
.build();
this.useGetForSafeMethods = useGetForSafeMethods;
this.usePutForIdempotentMethods = usePutForIdempotentMethods;
}
@Override
public ListenableFuture<SocketStats> getStats() {
SettableFuture<SocketStats> f = SettableFuture.create();
f.set(null);
return f;
}
@Override
public CronetClientStream newStream(final MethodDescriptor<?, ?> method, final Metadata headers,
final CallOptions callOptions, ClientStreamTracer[] tracers) {
Preconditions.checkNotNull(method, "method");
Preconditions.checkNotNull(headers, "headers");
final String defaultPath = "/" + method.getFullMethodName();
final String url = "https://" + authority + defaultPath;
final StatsTraceContext statsTraceCtx =
StatsTraceContext.newClientContext(tracers, attrs, headers);
| CronetClientTransport |
java | google__error-prone | check_api/src/test/java/com/google/errorprone/util/FindIdentifiersTest.java | {
"start": 37136,
"end": 37313
} | class ____ extends Super {
static void test() {}
}
""")
.addSourceLines(
"Test.java",
"""
| Reference |
java | grpc__grpc-java | core/src/main/java/io/grpc/internal/ManagedChannelServiceConfig.java | {
"start": 14892,
"end": 15351
} | class ____ extends InternalConfigSelector {
final ManagedChannelServiceConfig config;
/** Converts the service config to config selector. */
private ServiceConfigConvertedSelector(ManagedChannelServiceConfig config) {
this.config = config;
}
@Override
public Result selectConfig(PickSubchannelArgs args) {
return Result.newBuilder()
.setConfig(config)
.build();
}
}
}
| ServiceConfigConvertedSelector |
java | apache__kafka | clients/src/main/java/org/apache/kafka/clients/admin/internals/AdminApiDriver.java | {
"start": 15951,
"end": 16171
} | class ____ helps us to map requests that need to be sent
* to the internal `Call` implementation that is used internally in
* {@link org.apache.kafka.clients.admin.KafkaAdminClient}.
*/
public static | which |
java | apache__kafka | storage/api/src/main/java/org/apache/kafka/server/log/remote/storage/RemoteLogSegmentMetadataUpdate.java | {
"start": 1372,
"end": 4651
} | class ____ extends RemoteLogMetadata {
/**
* Universally unique remote log segment id.
*/
private final RemoteLogSegmentId remoteLogSegmentId;
/**
* Custom metadata.
*/
private final Optional<CustomMetadata> customMetadata;
/**
* It indicates the state in which the action is executed on this segment.
*/
private final RemoteLogSegmentState state;
/**
* @param remoteLogSegmentId Universally unique remote log segment id.
* @param eventTimestampMs Epoch time in milliseconds at which the remote log segment is copied to the remote tier storage.
* @param customMetadata Custom metadata.
* @param state State of the remote log segment.
* @param brokerId Broker id from which this event is generated.
*/
public RemoteLogSegmentMetadataUpdate(RemoteLogSegmentId remoteLogSegmentId, long eventTimestampMs,
Optional<CustomMetadata> customMetadata,
RemoteLogSegmentState state,
int brokerId) {
super(brokerId, eventTimestampMs);
this.remoteLogSegmentId = Objects.requireNonNull(remoteLogSegmentId, "remoteLogSegmentId can not be null");
this.customMetadata = Objects.requireNonNull(customMetadata, "customMetadata can not be null");
this.state = Objects.requireNonNull(state, "state can not be null");
}
/**
* @return Universally unique id of this remote log segment.
*/
public RemoteLogSegmentId remoteLogSegmentId() {
return remoteLogSegmentId;
}
/**
* @return Custom metadata.
*/
public Optional<CustomMetadata> customMetadata() {
return customMetadata;
}
/**
* It represents the state of the remote log segment. It can be one of the values of {@link RemoteLogSegmentState}.
*/
public RemoteLogSegmentState state() {
return state;
}
@Override
public TopicIdPartition topicIdPartition() {
return remoteLogSegmentId.topicIdPartition();
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
RemoteLogSegmentMetadataUpdate that = (RemoteLogSegmentMetadataUpdate) o;
return Objects.equals(remoteLogSegmentId, that.remoteLogSegmentId) &&
Objects.equals(customMetadata, that.customMetadata) &&
state == that.state &&
eventTimestampMs() == that.eventTimestampMs() &&
brokerId() == that.brokerId();
}
@Override
public int hashCode() {
return Objects.hash(remoteLogSegmentId, customMetadata, state, eventTimestampMs(), brokerId());
}
@Override
public String toString() {
return "RemoteLogSegmentMetadataUpdate{" +
"remoteLogSegmentId=" + remoteLogSegmentId +
", customMetadata=" + customMetadata +
", state=" + state +
", eventTimestampMs=" + eventTimestampMs() +
", brokerId=" + brokerId() +
'}';
}
}
| RemoteLogSegmentMetadataUpdate |
java | quarkusio__quarkus | independent-projects/arc/processor/src/main/java/io/quarkus/arc/processor/Reproducibility.java | {
"start": 152,
"end": 1244
} | class ____ {
static final Comparator<BeanInfo> BEAN_COMPARATOR = Comparator.comparing(BeanInfo::getIdentifier);
static final Comparator<ObserverInfo> OBSERVER_COMPARATOR = Comparator.comparing(ObserverInfo::getIdentifier);
static List<BeanInfo> orderedBeans(Collection<BeanInfo> beans) {
List<BeanInfo> list = new ArrayList<>(beans);
list.sort(BEAN_COMPARATOR);
return list;
}
static List<InterceptorInfo> orderedInterceptors(Collection<InterceptorInfo> interceptors) {
List<InterceptorInfo> list = new ArrayList<>(interceptors);
list.sort(BEAN_COMPARATOR);
return list;
}
static List<DecoratorInfo> orderedDecorators(Collection<DecoratorInfo> decorators) {
List<DecoratorInfo> list = new ArrayList<>(decorators);
list.sort(BEAN_COMPARATOR);
return list;
}
static List<ObserverInfo> orderedObservers(Collection<ObserverInfo> observers) {
List<ObserverInfo> list = new ArrayList<>(observers);
list.sort(OBSERVER_COMPARATOR);
return list;
}
}
| Reproducibility |
java | apache__camel | components/camel-tracing/src/main/java/org/apache/camel/tracing/SpanDecorator.java | {
"start": 1107,
"end": 4146
} | interface ____ {
/* Prefix for camel component tag */
String CAMEL_COMPONENT = "camel-";
SpanDecorator DEFAULT = new AbstractSpanDecorator() {
@Override
public String getComponent() {
return null;
}
@Override
public String getComponentClassName() {
return null;
}
};
/**
* This method indicates whether the component associated with the SpanDecorator should result in a new span being
* created.
*
* @return Whether a new span should be created
*/
boolean newSpan();
/**
* The camel component name associated with the decorator.
*
* @return The camel component name
*/
String getComponent();
/**
* The camel component FQN classname associated with the decorator.
*
* @return The camel component FQN classname
*/
String getComponentClassName();
/**
* This method returns the operation name to use with the Span representing this exchange and endpoint.
*
* @param exchange The exchange
* @param endpoint The endpoint
* @return The operation name
*/
String getOperationName(Exchange exchange, Endpoint endpoint);
/**
* This method adds appropriate details (tags/logs) to the supplied span based on the pre-processing of the
* exchange.
*
* @param span The span
* @param exchange The exchange
* @param endpoint The endpoint
*/
void pre(SpanAdapter span, Exchange exchange, Endpoint endpoint);
/**
* This method adds appropriate details (tags/logs) to the supplied span based on the post-processing of the
* exchange.
*
* @param span The span
* @param exchange The exchange
* @param endpoint The endpoint
*/
void post(SpanAdapter span, Exchange exchange, Endpoint endpoint);
/**
* This method returns the 'span.kind' value for use when the component is initiating a communication.
*
* @return The kind
*/
SpanKind getInitiatorSpanKind();
/**
* This method returns the 'span.kind' value for use when the component is receiving a communication.
*
* @return The kind
*/
SpanKind getReceiverSpanKind();
/**
* This method returns the map to be used for header extraction when the component is receiving a communication.
*
* @param map a map containing the objects
* @param encoding whether the headers are encoded
* @return The extraction map
*/
ExtractAdapter getExtractAdapter(Map<String, Object> map, boolean encoding);
/**
* This method returns the map to be used for header injection when the component is receiving a communication.
*
* @param map a map containing the objects
* @param encoding whether the headers are encoded
* @return The injection map
*/
InjectAdapter getInjectAdapter(Map<String, Object> map, boolean encoding);
}
| SpanDecorator |
java | spring-projects__spring-framework | spring-web/src/test/java/org/springframework/web/cors/UrlBasedCorsConfigurationSourceTests.java | {
"start": 1647,
"end": 3366
} | class ____ {
private final UrlBasedCorsConfigurationSource source = new UrlBasedCorsConfigurationSource();
@PathPatternsParameterizedTest
void empty(Function<String, MockHttpServletRequest> requestFactory) {
assertThat(source.getCorsConfiguration(requestFactory.apply("/bar/test.html"))).isNull();
}
@PathPatternsParameterizedTest
void registerAndMatch(Function<String, MockHttpServletRequest> requestFactory) {
CorsConfiguration config = new CorsConfiguration();
source.registerCorsConfiguration("/bar/**", config);
MockHttpServletRequest request = requestFactory.apply("/foo/test.html");
assertThat(source.getCorsConfiguration(request)).isNull();
request = requestFactory.apply("/bar/test.html");
assertThat(source.getCorsConfiguration(request)).isEqualTo(config);
}
@Test
void unmodifiableConfigurationsMap() {
assertThatExceptionOfType(UnsupportedOperationException.class)
.isThrownBy(() -> source.getCorsConfigurations().put("/**", new CorsConfiguration()));
}
@SuppressWarnings("removal")
@Test
void allowInitLookupPath() {
CorsConfiguration config = new CorsConfiguration();
source.registerCorsConfiguration("/**", config);
MockHttpServletRequest request = new MockHttpServletRequest("GET", "/foo");
assertThat(source.getCorsConfiguration(request))
.as("The path should be resolved lazily by default")
.isSameAs(config);
source.setAllowInitLookupPath(false);
assertThatIllegalArgumentException().isThrownBy(() -> source.getCorsConfiguration(request));
}
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
@ParameterizedTest(name = "[{index}] {0}")
@MethodSource("pathPatternsArguments")
private @ | UrlBasedCorsConfigurationSourceTests |
java | apache__flink | flink-core/src/main/java/org/apache/flink/util/InstantiationUtil.java | {
"start": 2008,
"end": 2232
} | class ____ {
private static final Logger LOG = LoggerFactory.getLogger(InstantiationUtil.class);
/** A custom ObjectInputStream that can load classes using a specific ClassLoader. */
public static | InstantiationUtil |
java | spring-projects__spring-security | itest/context/src/main/java/org/springframework/security/integration/python/PythonInterpreterPrePostInvocationAttributeFactory.java | {
"start": 972,
"end": 1616
} | class ____ implements PrePostInvocationAttributeFactory {
public PythonInterpreterPrePostInvocationAttributeFactory() {
PythonInterpreter.initialize(System.getProperties(), null, new String[] {});
}
@Override
public PreInvocationAttribute createPreInvocationAttribute(String preFilterAttribute, String filterObject,
String preAuthorizeAttribute) {
return new PythonInterpreterPreInvocationAttribute(preAuthorizeAttribute);
}
@Override
public PostInvocationAttribute createPostInvocationAttribute(String postFilterAttribute,
String postAuthorizeAttribute) {
return null;
}
}
| PythonInterpreterPrePostInvocationAttributeFactory |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/method/configuration/NamespaceGlobalMethodSecurityTests.java | {
"start": 11157,
"end": 11381
} | class ____ extends GlobalMethodSecurityConfiguration {
@Override
protected AccessDecisionManager accessDecisionManager() {
return new DenyAllAccessDecisionManager();
}
public static | CustomAccessDecisionManagerConfig |
java | spring-projects__spring-framework | spring-beans/src/testFixtures/java/org/springframework/beans/testfixture/beans/factory/aot/InnerBeanConfiguration.java | {
"start": 823,
"end": 923
} | class ____ {
public SimpleBean simpleBean() {
return new SimpleBean();
}
public static | Simple |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/aop/aspectj/ProceedTests.java | {
"start": 1227,
"end": 2931
} | class ____ {
private ClassPathXmlApplicationContext ctx;
private SimpleBean testBean;
private ProceedTestingAspect firstTestAspect;
private ProceedTestingAspect secondTestAspect;
@BeforeEach
void setup() {
this.ctx = new ClassPathXmlApplicationContext(getClass().getSimpleName() + ".xml", getClass());
testBean = (SimpleBean) ctx.getBean("testBean");
firstTestAspect = (ProceedTestingAspect) ctx.getBean("firstTestAspect");
secondTestAspect = (ProceedTestingAspect) ctx.getBean("secondTestAspect");
}
@AfterEach
void tearDown() {
this.ctx.close();
}
@Test
void testSimpleProceedWithChangedArgs() {
this.testBean.setName("abc");
assertThat(this.testBean.getName()).as("Name changed in around advice").isEqualTo("ABC");
}
@Test
void testGetArgsIsDefensive() {
this.testBean.setAge(5);
assertThat(this.testBean.getAge()).as("getArgs is defensive").isEqualTo(5);
}
@Test
void testProceedWithArgsInSameAspect() {
this.testBean.setMyFloat(1.0F);
assertThat(this.testBean.getMyFloat()).as("value changed in around advice").isGreaterThan(1.9F);
assertThat(this.firstTestAspect.getLastBeforeFloatValue()).as("changed value visible to next advice in chain")
.isGreaterThan(1.9F);
}
@Test
void testProceedWithArgsAcrossAspects() {
this.testBean.setSex("male");
assertThat(this.testBean.getSex()).as("value changed in around advice").isEqualTo("MALE");
assertThat(this.secondTestAspect.getLastBeforeStringValue()).as("changed value visible to next before advice in chain").isEqualTo("MALE");
assertThat(this.secondTestAspect.getLastAroundStringValue()).as("changed value visible to next around advice in chain").isEqualTo("MALE");
}
}
| ProceedTests |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java | {
"start": 8349,
"end": 15295
} | class ____
assertEquals(first.stringDistance().getClass(), second.stringDistance().getClass());
assertEquals(first.suggestMode(), second.suggestMode());
}
/**
* test that bad xContent throws exception
*/
public void testIllegalXContent() throws IOException {
// test missing fieldname
String directGenerator = "{ }";
assertIllegalXContent(directGenerator, IllegalArgumentException.class, "Required [field]");
// test unknown field
directGenerator = "{ \"unknown_param\" : \"f1\" }";
assertIllegalXContent(directGenerator, IllegalArgumentException.class, "[direct_generator] unknown field [unknown_param]");
// test bad value for field (e.g. size expects an int)
directGenerator = "{ \"size\" : \"xxl\" }";
assertIllegalXContent(directGenerator, XContentParseException.class, "[direct_generator] failed to parse field [size]");
// test unexpected token
directGenerator = "{ \"size\" : [ \"xxl\" ] }";
assertIllegalXContent(
directGenerator,
XContentParseException.class,
"[direct_generator] size doesn't support values of type: START_ARRAY"
);
}
public void testFrequencyThreshold() throws Exception {
try (Directory dir = newDirectory()) {
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig());
int numDocs = randomIntBetween(10, 20);
for (int i = 0; i < numDocs; i++) {
Document doc = new Document();
if (i == 0) {
for (int j = 0; j < numDocs; j++) {
doc.add(new TextField("field", "fooz", Field.Store.NO));
}
} else {
doc.add(new TextField("field", "foo", Field.Store.NO));
}
writer.addDocument(doc);
}
try (IndexReader reader = DirectoryReader.open(writer)) {
writer.close();
DirectSpellChecker spellchecker = new DirectSpellChecker();
DirectCandidateGenerator generator = new DirectCandidateGenerator(
spellchecker,
"field",
SuggestMode.SUGGEST_MORE_POPULAR,
reader,
0f,
10
);
DirectCandidateGenerator.CandidateSet candidateSet = generator.drawCandidates(
new DirectCandidateGenerator.CandidateSet(
DirectCandidateGenerator.Candidate.EMPTY,
generator.createCandidate(new BytesRef("fooz"), false)
)
);
assertThat(candidateSet.candidates.length, equalTo(1));
assertThat(candidateSet.candidates[0].termStats.docFreq(), equalTo(numDocs - 1));
assertThat(candidateSet.candidates[0].termStats.totalTermFreq(), equalTo((long) numDocs - 1));
// test that it doesn't overflow
assertThat(generator.thresholdTermFrequency(Integer.MAX_VALUE), equalTo(Integer.MAX_VALUE));
spellchecker = new DirectSpellChecker();
spellchecker.setThresholdFrequency(0.5f);
generator = new DirectCandidateGenerator(spellchecker, "field", SuggestMode.SUGGEST_MORE_POPULAR, reader, 0f, 10);
candidateSet = generator.drawCandidates(
new DirectCandidateGenerator.CandidateSet(
DirectCandidateGenerator.Candidate.EMPTY,
generator.createCandidate(new BytesRef("fooz"), false)
)
);
assertThat(candidateSet.candidates.length, equalTo(1));
assertThat(candidateSet.candidates[0].termStats.docFreq(), equalTo(numDocs - 1));
assertThat(candidateSet.candidates[0].termStats.totalTermFreq(), equalTo((long) numDocs - 1));
// test that it doesn't overflow
assertThat(generator.thresholdTermFrequency(Integer.MAX_VALUE), equalTo(Integer.MAX_VALUE));
spellchecker = new DirectSpellChecker();
spellchecker.setThresholdFrequency(0.5f);
generator = new DirectCandidateGenerator(spellchecker, "field", SuggestMode.SUGGEST_ALWAYS, reader, 0f, 10);
candidateSet = generator.drawCandidates(
new DirectCandidateGenerator.CandidateSet(
DirectCandidateGenerator.Candidate.EMPTY,
generator.createCandidate(new BytesRef("fooz"), false)
)
);
assertThat(candidateSet.candidates.length, equalTo(01));
// test that it doesn't overflow
assertThat(generator.thresholdTermFrequency(Integer.MAX_VALUE), equalTo(Integer.MAX_VALUE));
}
}
}
private void assertIllegalXContent(String directGenerator, Class<? extends Exception> exceptionClass, String exceptionMsg)
throws IOException {
try (XContentParser parser = createParser(JsonXContent.jsonXContent, directGenerator)) {
Exception e = expectThrows(exceptionClass, () -> DirectCandidateGeneratorBuilder.PARSER.apply(parser, null));
assertThat(e.getMessage(), containsString(exceptionMsg));
}
}
/**
* create random {@link DirectCandidateGeneratorBuilder}
*/
public static DirectCandidateGeneratorBuilder randomCandidateGenerator() {
DirectCandidateGeneratorBuilder generator = new DirectCandidateGeneratorBuilder(randomAlphaOfLength(10));
maybeSet(generator::accuracy, randomFloat());
maybeSet(generator::maxEdits, randomIntBetween(1, 2));
maybeSet(generator::maxInspections, randomIntBetween(1, 20));
maybeSet(generator::maxTermFreq, randomFloat());
maybeSet(generator::minDocFreq, randomFloat());
maybeSet(generator::minWordLength, randomIntBetween(1, 20));
maybeSet(generator::prefixLength, randomIntBetween(1, 20));
maybeSet(generator::preFilter, randomAlphaOfLengthBetween(1, 20));
maybeSet(generator::postFilter, randomAlphaOfLengthBetween(1, 20));
maybeSet(generator::size, randomIntBetween(1, 20));
maybeSet(generator::sort, randomFrom("score", "frequency"));
maybeSet(generator::stringDistance, randomFrom("internal", "damerau_levenshtein", "levenshtein", "jaro_winkler", "ngram"));
maybeSet(generator::suggestMode, randomFrom("missing", "popular", "always"));
return generator;
}
private static DirectCandidateGeneratorBuilder copy(DirectCandidateGeneratorBuilder original) throws IOException {
return copyWriteable(original, new NamedWriteableRegistry(Collections.emptyList()), DirectCandidateGeneratorBuilder::new);
}
}
| here |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/util/ExceptionHelper.java | {
"start": 888,
"end": 4374
} | class ____ {
/** Utility class. */
private ExceptionHelper() {
throw new IllegalStateException("No instances!");
}
/**
* If the provided Throwable is an Error this method
* throws it, otherwise returns a RuntimeException wrapping the error
* if that error is a checked exception.
* @param error the error to wrap or throw
* @return the (wrapped) error
*/
public static RuntimeException wrapOrThrow(Throwable error) {
if (error instanceof Error) {
throw (Error)error;
}
if (error instanceof RuntimeException) {
return (RuntimeException)error;
}
return new RuntimeException(error);
}
/**
* A singleton instance of a Throwable indicating a terminal state for exceptions,
* don't leak this.
*/
public static final Throwable TERMINATED = new Termination();
public static boolean addThrowable(AtomicReference<Throwable> field, Throwable exception) {
for (;;) {
Throwable current = field.get();
if (current == TERMINATED) {
return false;
}
Throwable update;
if (current == null) {
update = exception;
} else {
update = new CompositeException(current, exception);
}
if (field.compareAndSet(current, update)) {
return true;
}
}
}
public static Throwable terminate(AtomicReference<Throwable> field) {
Throwable current = field.get();
if (current != TERMINATED) {
current = field.getAndSet(TERMINATED);
}
return current;
}
/**
* Returns a flattened list of Throwables from tree-like CompositeException chain.
* @param t the starting throwable
* @return the list of Throwables flattened in a depth-first manner
*/
public static List<Throwable> flatten(Throwable t) {
List<Throwable> list = new ArrayList<>();
ArrayDeque<Throwable> deque = new ArrayDeque<>();
deque.offer(t);
while (!deque.isEmpty()) {
Throwable e = deque.removeFirst();
if (e instanceof CompositeException) {
CompositeException ce = (CompositeException) e;
List<Throwable> exceptions = ce.getExceptions();
for (int i = exceptions.size() - 1; i >= 0; i--) {
deque.offerFirst(exceptions.get(i));
}
} else {
list.add(e);
}
}
return list;
}
/**
* Workaround for Java 6 not supporting throwing a final Throwable from a catch block.
* @param <E> the generic exception type
* @param e the Throwable error to return or throw
* @return the Throwable e if it is a subclass of Exception
* @throws E the generic exception thrown
*/
@SuppressWarnings("unchecked")
public static <E extends Throwable> Exception throwIfThrowable(Throwable e) throws E {
if (e instanceof Exception) {
return (Exception)e;
}
throw (E)e;
}
public static String timeoutMessage(long timeout, TimeUnit unit) {
return "The source did not signal an event for "
+ timeout
+ " "
+ unit.toString().toLowerCase()
+ " and has been terminated.";
}
static final | ExceptionHelper |
java | apache__flink | flink-table/flink-table-code-splitter/src/test/resources/add-boolean/expected/TestRewriteInnerClass.java | {
"start": 563,
"end": 845
} | class ____ {
boolean funHasReturned$2;
public void fun(int a) {
if (a > 0) {
a += 5;
{ funHasReturned$2 = true; return; }
}
a -= 5;
{ funHasReturned$2 = true; return; }
}
}
}
| InnerClass2 |
java | netty__netty | common/src/test/java/io/netty/util/internal/logging/Log4J2LoggerTest.java | {
"start": 1214,
"end": 1360
} | class ____ support wrapped loggers,
* so There is no need to test it's method.<br>
* We only need to test the netty's {@linkplain InternalLogger} | to |
java | alibaba__nacos | common/src/main/java/com/alibaba/nacos/common/packagescan/classreading/ClassReader.java | {
"start": 15549,
"end": 15683
} | class ____ flags.
*/
public int getAccess() {
return readUnsignedShort(header);
}
/**
* the internal | access |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/tool/schema/Action.java | {
"start": 8320,
"end": 9277
} | enum ____. An empty value will return {@link #NONE}.
*
* @throws IllegalArgumentException If the incoming value is unrecognized
*/
public static Action interpretHbm2ddlSetting(Object value) {
if ( value == null ) {
return NONE;
}
if ( value instanceof Action action ) {
return action;
}
final String name = value.toString().trim();
if ( name.isEmpty() ) {
// default is NONE
return NONE;
}
// prefer hbm2ddl names
for ( var action : values() ) {
final String hbm2ddlName = action.getExternalHbm2ddlName();
if ( hbm2ddlName != null && hbm2ddlName.equals( name ) ) {
return action;
}
}
// then check JPA external names
for ( var action : values() ) {
final String jpaName = action.getExternalJpaName();
if ( jpaName != null && jpaName.equals( name ) ) {
return action;
}
}
throw new IllegalArgumentException( "Unrecognized '" + HBM2DDL_AUTO + "' setting: '" + name + "'" );
}
}
| value |
java | spring-projects__spring-framework | spring-webmvc/src/main/java/org/springframework/web/servlet/support/JstlUtils.java | {
"start": 1244,
"end": 1406
} | class ____ preparing JSTL views,
* in particular for exposing a JSTL localization context.
*
* @author Juergen Hoeller
* @since 20.08.2003
*/
public abstract | for |
java | alibaba__nacos | plugin-default-impl/nacos-default-auth-plugin/src/main/java/com/alibaba/nacos/plugin/auth/impl/persistence/ExternalRolePersistServiceImpl.java | {
"start": 1607,
"end": 7704
} | class ____ implements RolePersistService {
private static final Logger LOGGER = LoggerFactory.getLogger("com.alibaba.nacos.persistence");
private JdbcTemplate jt;
private String dataSourceType = "";
private static final String PATTERN_STR = "*";
@PostConstruct
protected void init() {
DataSourceService dataSource = DynamicDataSource.getInstance().getDataSource();
jt = dataSource.getJdbcTemplate();
dataSourceType = dataSource.getDataSourceType();
}
@Override
public Page<RoleInfo> getRoles(int pageNo, int pageSize) {
AuthPaginationHelper<RoleInfo> helper = createPaginationHelper();
String sqlCountRows = "SELECT count(*) FROM (SELECT DISTINCT role FROM roles) roles WHERE ";
String sqlFetchRows = "SELECT role,username FROM roles WHERE ";
String where = " 1=1 ";
try {
Page<RoleInfo> pageInfo = helper.fetchPage(sqlCountRows + where, sqlFetchRows + where,
new ArrayList<String>().toArray(), pageNo, pageSize, ROLE_INFO_ROW_MAPPER);
if (pageInfo == null) {
pageInfo = new Page<>();
pageInfo.setTotalCount(0);
pageInfo.setPageItems(new ArrayList<>());
}
return pageInfo;
} catch (CannotGetJdbcConnectionException e) {
LOGGER.error("[db-error] " + e.toString(), e);
throw e;
}
}
@Override
public Page<RoleInfo> getRolesByUserNameAndRoleName(String username, String role, int pageNo, int pageSize) {
AuthPaginationHelper<RoleInfo> helper = createPaginationHelper();
String sqlCountRows = "SELECT count(*) FROM roles ";
String sqlFetchRows = "SELECT role,username FROM roles ";
StringBuilder where = new StringBuilder(" WHERE 1 = 1 ");
List<String> params = new ArrayList<>();
if (StringUtils.isNotBlank(username)) {
where.append(" AND username = ? ");
params.add(username);
}
if (StringUtils.isNotBlank(role)) {
where.append(" AND role = ? ");
params.add(role);
}
try {
return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize,
ROLE_INFO_ROW_MAPPER);
} catch (CannotGetJdbcConnectionException e) {
LOGGER.error("[db-error] " + e.toString(), e);
throw e;
}
}
/**
* Execute add role operation.
*
* @param role role string value.
* @param userName username string value.
*/
@Override
public void addRole(String role, String userName) {
String sql = "INSERT INTO roles (role, username) VALUES (?, ?)";
try {
jt.update(sql, role, userName);
} catch (CannotGetJdbcConnectionException e) {
LOGGER.error("[db-error] " + e.toString(), e);
throw e;
}
}
/**
* Execute delete role operation.
*
* @param role role string value.
*/
@Override
public void deleteRole(String role) {
String sql = "DELETE FROM roles WHERE role=?";
try {
jt.update(sql, role);
} catch (CannotGetJdbcConnectionException e) {
LOGGER.error("[db-error] " + e.toString(), e);
throw e;
}
}
/**
* Execute delete role operation.
*
* @param role role string value.
* @param username username string value.
*/
@Override
public void deleteRole(String role, String username) {
String sql = "DELETE FROM roles WHERE role=? AND username=?";
try {
jt.update(sql, role, username);
} catch (CannotGetJdbcConnectionException e) {
LOGGER.error("[db-error] " + e.toString(), e);
throw e;
}
}
@Override
public List<String> findRolesLikeRoleName(String role) {
String sql = "SELECT role FROM roles WHERE role LIKE ?";
List<String> users = this.jt.queryForList(sql, new String[] {String.format("%%%s%%", role)}, String.class);
return users;
}
@Override
public String generateLikeArgument(String s) {
String underscore = "_";
if (s.contains(underscore)) {
s = s.replaceAll(underscore, "\\\\_");
}
String fuzzySearchSign = "\\*";
String sqlLikePercentSign = "%";
if (s.contains(PATTERN_STR)) {
return s.replaceAll(fuzzySearchSign, sqlLikePercentSign);
} else {
return s;
}
}
@Override
public Page<RoleInfo> findRolesLike4Page(String username, String role, int pageNo, int pageSize) {
String sqlCountRows = "SELECT count(*) FROM roles";
String sqlFetchRows = "SELECT role, username FROM roles";
StringBuilder where = new StringBuilder(" WHERE 1 = 1 ");
List<String> params = new ArrayList<>();
if (StringUtils.isNotBlank(username)) {
where.append(" AND username LIKE ? ");
params.add(generateLikeArgument(username));
}
if (StringUtils.isNotBlank(role)) {
where.append(" AND role LIKE ? ");
params.add(generateLikeArgument(role));
}
AuthPaginationHelper<RoleInfo> helper = createPaginationHelper();
try {
return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize,
ROLE_INFO_ROW_MAPPER);
} catch (CannotGetJdbcConnectionException e) {
LOGGER.error("[db-error] " + e.toString(), e);
throw e;
}
}
@Override
public <E> AuthPaginationHelper<E> createPaginationHelper() {
return new AuthExternalPaginationHelperImpl<>(jt, dataSourceType);
}
private static final | ExternalRolePersistServiceImpl |
java | google__guice | extensions/assistedinject/test/com/google/inject/assistedinject/FactoryModuleBuilderTest.java | {
"start": 15029,
"end": 15201
} | class ____ implements Car {
private final Color color;
@Inject
public Mustang(@Assisted Color color) {
this.color = color;
}
}
public static | Mustang |
java | apache__camel | components/camel-pulsar/src/main/java/org/apache/camel/component/pulsar/utils/message/PulsarMessageUtils.java | {
"start": 2106,
"end": 3985
} | class ____ {
private PulsarMessageUtils() {
}
public static Exchange updateExchange(final Message<byte[]> message, final Exchange input) {
final Exchange output = input.copy();
org.apache.camel.Message msg = output.getIn();
msg.setHeader(EVENT_TIME, message.getEventTime());
msg.setHeader(MESSAGE_ID, message.getMessageId());
msg.setHeader(KEY, message.getKey());
msg.setHeader(KEY_BYTES, message.getKeyBytes());
msg.setHeader(PRODUCER_NAME, message.getProducerName());
msg.setHeader(TOPIC_NAME, message.getTopicName());
msg.setHeader(SEQUENCE_ID, message.getSequenceId());
msg.setHeader(PUBLISH_TIME, message.getPublishTime());
msg.setHeader(PULSAR_REDELIVERY_COUNT, message.getRedeliveryCount());
msg.setHeader(PROPERTIES, message.getProperties());
msg.setHeader(Exchange.MESSAGE_TIMESTAMP, message.getPublishTime());
msg.setBody(message.getValue());
output.setIn(msg);
return output;
}
public static Exchange updateExchangeWithException(final Exception exception, final Exchange input) {
final Exchange output = input.copy();
output.setException(exception);
return output;
}
public static byte[] serialize(final Object body) throws IOException {
final ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
final ObjectOutputStream outputStream = new ObjectOutputStream(byteArrayOutputStream);
try {
outputStream.writeObject(body);
return byteArrayOutputStream.toByteArray();
} catch (NotSerializableException exception) {
throw new RuntimeCamelException(exception);
} finally {
byteArrayOutputStream.close();
outputStream.close();
}
}
}
| PulsarMessageUtils |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/state/StateBackendTestUtils.java | {
"start": 14791,
"end": 15176
} | interface ____<T>
extends FunctionWithException<T, T, Exception>, Serializable {}
// ------------------------------------------------------------------------
/** Private constructor to prevent instantiation. */
private StateBackendTestUtils() {}
// ------------------------------------------------------------------------
}
| SerializableFunctionWithException |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/AvoidObjectArraysTest.java | {
"start": 1204,
"end": 1962
} | class ____ {
// BUG: Diagnostic contains: consider an Iterable<Object> instead
public void objectArray(Object[] objectArray) {}
// BUG: Diagnostic contains: consider an Iterable<String> instead
public void stringArray(String[] stringArray) {}
public void intArray(int[] intArray) {}
public void objectValue(Object objectValue) {}
public void stringValue(String stringValue) {}
public void intValue(int intValue) {}
}
""")
.doTest();
}
@Test
public void methodParam_staticMethods() {
compilationHelper
.addSourceLines(
"ArrayUsage.java",
"""
public | ArrayUsage |
java | spring-projects__spring-boot | integration-test/spring-boot-actuator-integration-tests/src/test/java/org/springframework/boot/actuate/endpoint/web/annotation/AbstractWebEndpointIntegrationTests.java | {
"start": 31764,
"end": 32029
} | class ____ {
@ReadOperation
String read(SecurityContext securityContext) {
Principal principal = securityContext.getPrincipal();
return (principal != null) ? principal.getName() : "None";
}
}
@Endpoint(id = "userinrole")
static | SecurityContextEndpoint |
java | apache__camel | dsl/camel-jbang/camel-jbang-core/src/main/java/org/apache/camel/dsl/jbang/core/commands/Run.java | {
"start": 4057,
"end": 89716
} | class ____ extends CamelCommand {
// special template for running camel-jbang in docker containers
public static final String RUN_JAVA_SH = "classpath:templates/run-java.sh";
public static final String RUN_SETTINGS_FILE = "camel-jbang-run.properties";
public static final String RUN_PLATFORM_DIR = ".camel-jbang-run";
private static final String[] ACCEPTED_XML_ROOT_ELEMENT_NAMES = new String[] {
"route", "routes",
"routeTemplate", "routeTemplates",
"templatedRoute", "templatedRoutes",
"rest", "rests",
"routeConfiguration",
"beans", "blueprint", "camel"
};
private static final Set<String> ACCEPTED_XML_ROOT_ELEMENTS
= new HashSet<>(Arrays.asList(ACCEPTED_XML_ROOT_ELEMENT_NAMES));
private static final String OPENAPI_GENERATED_FILE = CommandLineHelper.CAMEL_JBANG_WORK_DIR + "/generated-openapi.yaml";
private static final String CLIPBOARD_GENERATED_FILE = CommandLineHelper.CAMEL_JBANG_WORK_DIR + "/generated-clipboard";
private static final Pattern PACKAGE_PATTERN = Pattern.compile(
"^\\s*package\\s+([a-zA-Z][.\\w]*)\\s*;.*$", Pattern.MULTILINE);
private static final Pattern CLASS_PATTERN = Pattern.compile(
"^\\s*public class\\s+([a-zA-Z0-9]*)[\\s+|;].*$", Pattern.MULTILINE);
public boolean exportRun;
protected Path exportBaseDir;
boolean scriptRun;
boolean transformRun;
boolean transformMessageRun;
boolean debugRun;
private Path logFile;
public long spawnPid;
private Printer quietPrinter;
@Parameters(description = "The Camel file(s) to run. If no files specified then application.properties is used as source for which files to run.",
arity = "0..9", paramLabel = "<files>", parameterConsumer = FilesConsumer.class)
Path[] filePaths; // Defined only for file path completion; the field never used
public List<String> files = new ArrayList<>();
@Option(names = { "--runtime" },
completionCandidates = RuntimeCompletionCandidates.class,
defaultValue = "camel-main",
converter = RuntimeTypeConverter.class,
description = "Runtime (${COMPLETION-CANDIDATES})")
RuntimeType runtime = RuntimeType.main;
@Option(names = { "--source-dir" },
description = "Source directory for dynamically loading Camel file(s) to run. When using this, then files cannot be specified at the same time.")
String sourceDir;
@Option(names = { "--background" }, defaultValue = "false", description = "Run in the background")
public boolean background;
@Option(names = { "--background-wait" }, defaultValue = "true",
description = "To wait for run in background to startup successfully, before returning")
public boolean backgroundWait = true;
@Option(names = { "--empty" }, defaultValue = "false", description = "Run an empty Camel without loading source files")
public boolean empty;
@Option(names = { "--camel-version" }, description = "To run using a different Camel version than the default version.")
String camelVersion;
@Option(names = { "--camel-spring-boot-version" },
description = "To run using a different Camel Spring Boot version than the default version.")
String camelSpringBootVersion;
@Option(names = { "--kamelets-version" }, description = "Apache Camel Kamelets version")
String kameletsVersion;
@CommandLine.Option(names = { "--quarkus-group-id" }, description = "Quarkus Platform Maven groupId",
defaultValue = "io.quarkus.platform")
String quarkusGroupId = "io.quarkus.platform";
@CommandLine.Option(names = { "--quarkus-artifact-id" }, description = "Quarkus Platform Maven artifactId",
defaultValue = "quarkus-bom")
String quarkusArtifactId = "quarkus-bom";
@Option(names = { "--quarkus-version" }, description = "Quarkus Platform version",
defaultValue = RuntimeType.QUARKUS_VERSION)
String quarkusVersion = RuntimeType.QUARKUS_VERSION;
@Option(names = { "--spring-boot-version" }, description = "Spring Boot version",
defaultValue = RuntimeType.SPRING_BOOT_VERSION)
String springBootVersion = RuntimeType.SPRING_BOOT_VERSION;
@Option(names = { "--profile" }, scope = CommandLine.ScopeType.INHERIT, defaultValue = "dev",
description = "Profile to run (dev, test, or prod).")
String profile = "dev";
@Option(names = { "--dep", "--dependency" }, description = "Add additional dependencies",
split = ",")
List<String> dependencies = new ArrayList<>();
@CommandLine.Option(names = { "--repo", "--repos" },
description = "Additional maven repositories (Use commas to separate multiple repositories)")
String repositories;
@Option(names = { "--gav" }, description = "The Maven group:artifact:version (used during exporting)")
String gav;
@Option(names = { "--maven-settings" },
description = "Optional location of Maven settings.xml file to configure servers, repositories, mirrors and proxies."
+ " If set to \"false\", not even the default ~/.m2/settings.xml will be used.")
String mavenSettings;
@Option(names = { "--maven-settings-security" },
description = "Optional location of Maven settings-security.xml file to decrypt settings.xml")
String mavenSettingsSecurity;
@Option(names = { "--maven-central-enabled" }, defaultValue = "true",
description = "Whether downloading JARs from Maven Central repository is enabled")
boolean mavenCentralEnabled = true;
@Option(names = { "--maven-apache-snapshot-enabled" }, defaultValue = "true",
description = "Whether downloading JARs from ASF Maven Snapshot repository is enabled")
boolean mavenApacheSnapshotEnabled = true;
@Option(names = { "--fresh" }, defaultValue = "false", description = "Make sure we use fresh (i.e. non-cached) resources")
boolean fresh;
@Option(names = { "--download" }, defaultValue = "true",
description = "Whether to allow automatic downloading JAR dependencies (over the internet)")
boolean download = true;
@CommandLine.Option(names = { "--package-scan-jars" }, defaultValue = "false",
description = "Whether to automatic package scan JARs for custom Spring or Quarkus beans making them available for Camel JBang")
boolean packageScanJars;
@Option(names = { "--jvm-debug" }, parameterConsumer = DebugConsumer.class, paramLabel = "<true|false|port>",
description = "To enable JVM remote debugging on port 4004 by default. The supported values are true to " +
"enable the remote debugging, false to disable the remote debugging or a number to use a custom port")
int jvmDebugPort;
@Option(names = { "--name" }, defaultValue = "CamelJBang", description = "The name of the Camel application")
String name;
@CommandLine.Option(names = { "--exclude" }, description = "Exclude files by name or pattern")
List<String> excludes = new ArrayList<>();
@Option(names = { "--logging" }, defaultValue = "true", description = "Can be used to turn off logging")
boolean logging = true;
@Option(names = { "--logging-level" }, completionCandidates = LoggingLevelCompletionCandidates.class,
defaultValue = "info", description = "Logging level (${COMPLETION-CANDIDATES})")
String loggingLevel;
@Option(names = { "--logging-color" }, defaultValue = "true", description = "Use colored logging")
boolean loggingColor = true;
@Option(names = { "--logging-json" }, defaultValue = "false", description = "Use JSON logging (ECS Layout)")
boolean loggingJson;
@Option(names = { "--logging-config-path" }, description = "Path to file with custom logging configuration")
String loggingConfigPath;
@Option(names = { "--logging-category" }, description = "Used for individual logging levels (ex: org.apache.kafka=DEBUG)")
List<String> loggingCategory = new ArrayList<>();
@Option(names = { "--max-messages" }, defaultValue = "0", description = "Max number of messages to process before stopping")
int maxMessages;
@Option(names = { "--max-seconds" }, defaultValue = "0", description = "Max seconds to run before stopping")
int maxSeconds;
@Option(names = { "--max-idle-seconds" }, defaultValue = "0",
description = "For how long time in seconds Camel can be idle before stopping")
int maxIdleSeconds;
@Option(names = { "--reload", "--dev" },
description = "Enables dev mode (live reload when source files are updated and saved)")
boolean dev;
@Option(names = { "--trace" }, defaultValue = "false",
description = "Enables trace logging of the routed messages")
boolean trace;
@Option(names = { "--backlog-trace" }, defaultValue = "false",
description = "Enables backlog tracing of the routed messages")
boolean backlogTrace;
@Option(names = { "--properties" },
description = "comma separated list of properties file" +
" (ex. /path/to/file.properties,/path/to/other.properties")
public String propertiesFiles;
@Option(names = { "--prop", "--property" }, description = "Additional properties (override existing)", arity = "0")
public String[] property;
@Option(names = { "--stub" }, description = "Stubs all the matching endpoint uri with the given component name or pattern."
+ " Multiple names can be separated by comma. (all = stub all endpoints).")
String stub;
@Option(names = { "--jfr" }, defaultValue = "false",
description = "Enables Java Flight Recorder saving recording to disk on exit")
boolean jfr;
@Option(names = { "--jfr-profile" },
description = "Java Flight Recorder profile to use (such as default or profile)")
String jfrProfile;
@Option(names = { "--local-kamelet-dir" },
description = "Local directory (or github link) for loading Kamelets (takes precedence). Multiple directories can be specified separated by comma.")
String localKameletDir;
@Option(names = { "--port" }, description = "Embeds a local HTTP server on this port", defaultValue = "8080")
int port;
@Option(names = { "--management-port" }, description = "To use a dedicated port for HTTP management")
int managementPort = -1;
@Option(names = { "--console" }, defaultValue = "false",
description = "Developer console at /q/dev on local HTTP server (port 8080 by default)")
boolean console;
@Deprecated
@Option(names = { "--health" }, defaultValue = "false",
description = "Deprecated: use --observe instead. Health check at /q/health on local HTTP server (port 8080 by default)")
boolean health;
@Deprecated
@Option(names = { "--metrics" }, defaultValue = "false",
description = "Deprecated: use --observe instead. Metrics (Micrometer and Prometheus) at /q/metrics on local HTTP server (port 8080 by default)")
boolean metrics;
@Option(names = { "--observe" }, defaultValue = "false",
description = "Enable observability services")
boolean observe;
@Option(names = { "--modeline" }, defaultValue = "true",
description = "Whether to support JBang style //DEPS to specify additional dependencies")
boolean modeline = true;
@Option(names = { "--open-api" }, description = "Adds an OpenAPI spec from the given file (json or yaml file)")
String openapi;
@Option(names = { "--code" }, description = "Run the given text or file as Java DSL routes")
String code;
@Option(names = { "--verbose" }, defaultValue = "false",
description = "Verbose output of startup activity (dependency resolution and downloading")
boolean verbose;
@Option(names = { "--ignore-loading-error" }, defaultValue = "false",
description = "Whether to ignore route loading and compilation errors (use this with care!)")
protected boolean ignoreLoadingError;
@Option(names = { "--lazy-bean" }, defaultValue = "false",
description = "Whether to use lazy bean initialization (can help with complex classloading issues)")
protected boolean lazyBean;
@Option(names = { "--prompt" }, defaultValue = "false",
description = "Allow user to type in required parameters in prompt if not present in application")
boolean prompt;
@Option(names = { "--skip-plugins" }, defaultValue = "false",
description = "Skip plugins during export")
boolean skipPlugins;
public Run(CamelJBangMain main) {
super(main);
}
@Override
public boolean disarrangeLogging() {
if (exportRun) {
return false;
}
if (RuntimeType.quarkus == runtime) {
return true;
} else if (RuntimeType.springBoot == runtime) {
return true;
}
return false;
}
@Override
public Integer doCall() throws Exception {
if (!exportRun) {
printConfigurationValues("Running integration with the following configuration:");
}
// run
return run();
}
public Integer runExport() throws Exception {
return runExport(false);
}
protected Integer runExport(boolean ignoreLoadingError) throws Exception {
// just boot silently and exit
this.exportRun = true;
this.ignoreLoadingError = ignoreLoadingError;
return run();
}
protected Integer runTransform(boolean ignoreLoadingError) throws Exception {
// just boot silently and exit
this.transformRun = true;
this.ignoreLoadingError = ignoreLoadingError;
this.name = "transform";
return run();
}
public Integer runTransformMessage(String camelVersion, String repositories) throws Exception {
// just boot silently an empty camel in the background and exit
this.transformMessageRun = true;
this.background = true;
this.camelVersion = camelVersion;
this.repositories = repositories;
this.empty = true;
this.ignoreLoadingError = true;
this.name = "transform";
return run();
}
protected Integer runScript(String file) throws Exception {
this.files.add(file);
this.scriptRun = true;
return run();
}
protected Integer runDebug() throws Exception {
this.debugRun = true;
return run();
}
private boolean isDebugMode() {
return jvmDebugPort > 0;
}
private void writeSetting(KameletMain main, Properties existing, String key, String value) {
String val = existing != null ? existing.getProperty(key, value) : value;
if (val != null) {
main.addInitialProperty(key, val);
writeSettings(key, val);
}
}
private void writeSetting(KameletMain main, Properties existing, String key, Supplier<String> value) {
String val = existing != null ? existing.getProperty(key, value.get()) : value.get();
if (val != null) {
main.addInitialProperty(key, val);
writeSettings(key, val);
}
}
private Properties loadProfilePropertiesFile(Path file) throws Exception {
Properties prop = new CamelCaseOrderedProperties();
if (Files.exists(file)) {
try (InputStream is = Files.newInputStream(file)) {
prop.load(is);
}
}
// special for routes include pattern that we need to "fix" after reading from properties
// to make this work in run command
String value = prop.getProperty("camel.main.routesIncludePattern");
if (value != null) {
// if not scheme then must use file: as this is what run command expects
StringJoiner sj = new StringJoiner(",");
for (String part : value.split(",")) {
if (!part.contains(":")) {
part = "file:" + part;
}
sj.add(part);
}
value = sj.toString();
prop.setProperty("camel.main.routesIncludePattern", value);
}
return prop;
}
private int run() throws Exception {
if (!empty && !files.isEmpty() && sourceDir != null) {
// cannot have both files and source dir at the same time
printer().printErr("Cannot specify both file(s) and source-dir at the same time.");
return 1;
}
Path baseDir = exportBaseDir != null ? exportBaseDir : Path.of(".");
// special if user type: camel run . or camel run dirName
if (sourceDir == null && files != null && files.size() == 1) {
String name = FileUtil.stripTrailingSeparator(files.get(0));
Path first = Path.of(name);
if (Files.isDirectory(first)) {
baseDir = first;
RunHelper.dirToFiles(name, files);
}
}
if (!exportRun) {
if (RuntimeType.quarkus == runtime) {
return runQuarkus();
} else if (RuntimeType.springBoot == runtime) {
return runSpringBoot();
}
}
Path work = CommandLineHelper.getWorkDir();
removeDir(work);
if (!Files.exists(work)) {
try {
Files.createDirectories(work);
} catch (IOException e) {
printer().println("WARN: Failed to create working directory: " + work.toAbsolutePath());
}
}
Properties profileProperties = !empty ? loadProfileProperties(baseDir) : null;
configureLogging(baseDir);
if (openapi != null) {
generateOpenApi();
}
// route code as option
if (!empty && code != null) {
// code may refer to an existing file
String name = "CodeRoute";
boolean file = false;
Path codePath = Paths.get(code);
if (Files.isRegularFile(codePath) && Files.exists(codePath)) {
// must be a java file
boolean java = codePath.getFileName().toString().endsWith(".java");
if (!java) {
printer().printErr("Only java source files is accepted when using --code parameter");
return 1;
}
code = Files.readString(codePath);
name = FileUtil.onlyName(codePath.getFileName().toString());
file = true;
}
// store code in temporary file
String codeFile = loadFromCode(code, name, file);
// use code as first file
files.add(0, codeFile);
}
boolean autoDetectFiles = files.isEmpty() || RUN_JAVA_SH.equals(files.get(0));
// if no specific file to run then try to auto-detect
if (!empty && autoDetectFiles) {
if (sourceDir != null) {
// silent-run then auto-detect all initial files for source-dir
Path sourceDirPath = Paths.get(sourceDir);
try (Stream<Path> paths = Files.list(sourceDirPath)) {
paths.forEach(p -> files.add(sourceDirPath.resolve(p.getFileName()).toString()));
} catch (IOException e) {
// Ignore
}
} else {
String routes
= profileProperties != null ? profileProperties.getProperty("camel.main.routesIncludePattern") : null;
if (routes == null) {
if (!exportRun) {
String run = "run";
if (transformRun) {
run = "transform";
} else if (debugRun) {
run = "debug";
}
System.err
.println("Cannot " + run
+ " because application.properties file does not exist or camel.main.routesIncludePattern is not configured");
return 1;
} else {
// silent-run then auto-detect all files
try {
Files.list(Paths.get("."))
.map(p -> p.getFileName().toString())
.forEach(files::add);
} catch (IOException e) {
// Ignore
}
}
}
}
}
// filter out duplicate files
if (!files.isEmpty()) {
files = files.stream().distinct().collect(Collectors.toList());
}
final KameletMain main = createMainInstance();
main.setProfile(profile);
if (repositories != null && !repositories.isBlank()) {
main.setRepositories(repositories);
}
main.setDownload(download);
main.setPackageScanJars(packageScanJars);
main.setFresh(fresh);
main.setMavenSettings(mavenSettings);
main.setMavenSettingsSecurity(mavenSettingsSecurity);
main.setMavenCentralEnabled(mavenCentralEnabled);
main.setMavenApacheSnapshotEnabled(mavenApacheSnapshotEnabled);
main.setDownloadListener(new RunDownloadListener());
main.setAppName("Apache Camel (JBang)");
if (stub != null) {
if ("all".equals(stub)) {
// stub all components only
stub = "component:*";
}
// we need to match by wildcard, to make it easier
StringJoiner sj = new StringJoiner(",");
for (String n : stub.split(",")) {
// you can either refer to a name or a specific endpoint
// if there is a colon then we assume it's a specific endpoint then we should not add wildcard
boolean colon = n.contains(":");
if (!colon && !n.endsWith("*")) {
n = "component:" + n + "*";
}
sj.add(n);
}
stub = sj.toString();
writeSetting(main, profileProperties, STUB, stub);
main.setStubPattern(stub);
}
if (dev) {
writeSetting(main, profileProperties, "camel.main.routesReloadEnabled", "true");
// allow quick shutdown during development
writeSetting(main, profileProperties, "camel.main.shutdownTimeout", "5");
}
if (sourceDir != null) {
writeSetting(main, profileProperties, SOURCE_DIR, sourceDir);
}
if (trace) {
writeSetting(main, profileProperties, "camel.main.tracing", "true");
}
if (backlogTrace) {
writeSetting(main, profileProperties, "camel.trace.enabled", "true");
}
if (modeline) {
writeSetting(main, profileProperties, "camel.main.modeline", "true");
// configure eager
main.configure().withModeline(true);
}
if (ignoreLoadingError) {
writeSetting(main, profileProperties, IGNORE_LOADING_ERROR, "true");
}
if (lazyBean) {
writeSetting(main, profileProperties, LAZY_BEAN, "true");
}
if (prompt) {
writeSetting(main, profileProperties, PROMPT, "true");
}
writeSetting(main, profileProperties, COMPILE_WORK_DIR,
Paths.get(CommandLineHelper.CAMEL_JBANG_WORK_DIR, "compile").toString());
if (gav != null) {
writeSetting(main, profileProperties, GAV, gav);
}
writeSetting(main, profileProperties, OPEN_API, openapi);
if (repositories != null) {
writeSetting(main, profileProperties, REPOS, repositories);
}
writeSetting(main, profileProperties, HEALTH, health ? "true" : "false");
writeSetting(main, profileProperties, METRICS, metrics ? "true" : "false");
writeSetting(main, profileProperties, CONSOLE, console ? "true" : "false");
writeSetting(main, profileProperties, VERBOSE, verbose ? "true" : "false");
// the runtime version of Camel is what is loaded via the catalog
writeSetting(main, profileProperties, CAMEL_VERSION, new DefaultCamelCatalog().getCatalogVersion());
writeSetting(main, profileProperties, SPRING_BOOT_VERSION, springBootVersion);
writeSetting(main, profileProperties, QUARKUS_VERSION, quarkusVersion);
writeSetting(main, profileProperties, QUARKUS_GROUP_ID, quarkusGroupId);
writeSetting(main, profileProperties, QUARKUS_ARTIFACT_ID, quarkusArtifactId);
if (observe) {
main.addInitialProperty(DEPENDENCIES, "camel:observability-services");
}
// command line arguments
if (property != null) {
for (String p : property) {
String k = StringHelper.before(p, "=");
String v = StringHelper.after(p, "=");
if (k != null && v != null) {
main.addArgumentProperty(k, v);
writeSettings(k, v);
}
}
}
if (exportRun) {
if (!verbose) {
main.setSilent(true);
}
main.addInitialProperty(EXPORT, "true");
// enable stub in silent mode so we do not use real components
main.setStubPattern("*");
// do not run for very long in silent run
main.addInitialProperty("camel.main.autoStartup", "false");
main.addInitialProperty("camel.main.durationMaxSeconds", "-1");
} else if (debugRun) {
main.addInitialProperty(DEBUG, "true");
} else if (transformRun) {
main.setSilent(true);
// enable stub in silent mode so we do not use real components
main.setStubPattern("*");
// do not run for very long in silent run
main.addInitialProperty("camel.main.autoStartup", "false");
main.addInitialProperty("camel.main.durationMaxSeconds", "-1");
} else if (transformMessageRun) {
// do not start any routes
main.addInitialProperty("camel.main.autoStartup", "false");
} else if (scriptRun) {
// auto terminate if being idle
main.addInitialProperty("camel.main.durationMaxIdleSeconds", "1");
}
// any custom initial property
doAddInitialProperty(main);
writeSetting(main, profileProperties, "camel.main.durationMaxMessages",
() -> maxMessages > 0 ? String.valueOf(maxMessages) : null);
writeSetting(main, profileProperties, "camel.main.durationMaxSeconds",
() -> maxSeconds > 0 ? String.valueOf(maxSeconds) : null);
writeSetting(main, profileProperties, "camel.main.durationMaxIdleSeconds",
() -> maxIdleSeconds > 0 ? String.valueOf(maxIdleSeconds) : null);
writeSetting(main, profileProperties, "camel.server.port",
() -> port > 0 && port != 8080 ? String.valueOf(port) : null);
if (managementPort != -1) {
writeSetting(main, profileProperties, "camel.management.port", () -> String.valueOf(managementPort));
}
writeSetting(main, profileProperties, JFR, jfr || jfrProfile != null ? "jfr" : null);
writeSetting(main, profileProperties, JFR_PROFILE, jfrProfile != null ? jfrProfile : null);
writeSetting(main, profileProperties, KAMELETS_VERSION, kameletsVersion);
StringJoiner js = new StringJoiner(",");
StringJoiner sjReload = new StringJoiner(",");
StringJoiner sjClasspathFiles = new StringJoiner(",");
StringJoiner sjScriptFiles = new StringJoiner(",");
StringJoiner sjGroovyFiles = new StringJoiner(",");
StringJoiner sjTlsFiles = new StringJoiner(",");
StringJoiner sjKamelets = new StringJoiner(",");
StringJoiner sjJKubeFiles = new StringJoiner(",");
// include generated openapi to files to run
if (openapi != null) {
files.add(OPENAPI_GENERATED_FILE);
}
// if we only run pom.xml/build.gradle then auto discover from the Maven/Gradle based project
if (files.size() == 1 && (files.get(0).endsWith("pom.xml") || files.get(0).endsWith("build.gradle"))) {
Path projectDir = Path.of(files.get(0)).toAbsolutePath();
// use a better name when running
if (name == null || "CamelJBang".equals(name)) {
name = RunHelper.mavenArtifactId(projectDir);
}
// find source files
files = RunHelper.scanMavenOrGradleProject(projectDir.getParent());
// include extra dependencies from pom.xml
var pomDependencies = RunHelper.scanMavenDependenciesFromPom(projectDir);
addDependencies(pomDependencies.toArray(new String[0]));
}
if (profile != null) {
// need to include profile application properties if exists
String name = baseDir + "/application-" + profile + ".properties";
if (Files.exists(Paths.get(name)) && !files.contains(name)) {
files.add(name);
}
}
for (String file : files) {
if (file.startsWith("clipboard") && !(Files.exists(Paths.get(file)))) {
file = loadFromClipboard(file);
} else if (skipFile(file)) {
continue;
} else if (isScriptFile(file)) {
// script files
sjScriptFiles.add(file);
continue;
} else if (isGroovyFile(file)) {
// groovy files
sjGroovyFiles.add("file:" + file);
if (dev) {
// groovy files can also be reloaded
sjReload.add(file);
}
continue;
} else if (isTlsFile(file)) {
// tls files
sjTlsFiles.add(file);
continue;
} else if (jkubeFile(file)) {
// jkube
sjJKubeFiles.add(file);
continue;
} else if (!knownFile(file) && !file.endsWith(".properties")) {
// unknown files to be added on classpath
sjClasspathFiles.add(file);
continue;
}
// process known files as its likely DSLs or configuration files
// check for properties files
if (file.endsWith(".properties")) {
if (acceptPropertiesFile(file)) {
if (!ResourceHelper.hasScheme(file) && !file.startsWith("github:")) {
file = "file:" + file;
}
if (ObjectHelper.isEmpty(propertiesFiles)) {
propertiesFiles = file;
} else {
propertiesFiles = propertiesFiles + "," + file;
}
if (dev && file.startsWith("file:")) {
// we can only reload if file based
sjReload.add(file.substring(5));
}
}
continue;
}
// Camel DSL files
if (!ResourceHelper.hasScheme(file) && !file.startsWith("github:")) {
file = "file:" + file;
}
if (file.startsWith("file:")) {
// check if file exist
Path inputPath = Paths.get(file.substring(5));
if (!Files.exists(inputPath) || !Files.isRegularFile(inputPath)) {
printer().printErr("File does not exist: " + file);
return 1;
}
}
if (file.startsWith("file:") && file.endsWith(".kamelet.yaml")) {
sjKamelets.add(file);
}
// automatic map github https urls to github resolver
if (file.startsWith("https://github.com/")) {
file = evalGithubSource(main, file);
if (file == null) {
continue; // all mapped continue to next
}
} else if (file.startsWith("https://gist.github.com/")) {
file = evalGistSource(main, file);
if (file == null) {
continue; // all mapped continue to next
}
}
if ("CamelJBang".equals(name)) {
// no specific name was given so lets use the name from the first integration file
// remove scheme and keep only the name (no path or ext)
String s = StringHelper.after(file, ":");
if (s.contains(":")) {
// its maybe a gist/github url so we need only the last part which has the name
s = StringHelper.afterLast(s, ":");
}
name = FileUtil.onlyName(s);
}
js.add(file);
if (dev && file.startsWith("file:")) {
// we can only reload if file based
sjReload.add(file.substring(5));
}
}
writeSetting(main, profileProperties, "camel.main.name", name);
if (sourceDir != null) {
// must be an existing directory
Path dirPath = Paths.get(sourceDir);
if (!Files.exists(dirPath) || !Files.isDirectory(dirPath)) {
printer().printErr("Directory does not exist: " + sourceDir);
return 1;
}
// make it a pattern as we load all files from this directory
// (optional=true as there may be non Camel routes files as well)
String sdir = "file:" + sourceDir + "/**?optional=true";
main.addInitialProperty("camel.main.routesIncludePattern", sdir);
writeSettings("camel.main.routesIncludePattern", sdir);
} else if (js.length() > 0) {
main.addInitialProperty("camel.main.routesIncludePattern", js.toString());
writeSettings("camel.main.routesIncludePattern", js.toString());
} else {
writeSetting(main, profileProperties, "camel.main.routesIncludePattern", () -> null);
}
if (sjClasspathFiles.length() > 0) {
main.addInitialProperty(CLASSPATH_FILES, sjClasspathFiles.toString());
writeSettings(CLASSPATH_FILES, sjClasspathFiles.toString());
} else {
writeSetting(main, profileProperties, CLASSPATH_FILES, () -> null);
}
if (sjScriptFiles.length() > 0) {
main.addInitialProperty(SCRIPT_FILES, sjScriptFiles.toString());
writeSettings(SCRIPT_FILES, sjScriptFiles.toString());
} else {
writeSetting(main, profileProperties, SCRIPT_FILES, () -> null);
}
if (sjGroovyFiles.length() > 0) {
main.addInitialProperty(GROOVY_FILES, sjGroovyFiles.toString());
writeSettings(GROOVY_FILES, sjGroovyFiles.toString());
} else {
writeSetting(main, profileProperties, GROOVY_FILES, () -> null);
}
if (sjTlsFiles.length() > 0) {
main.addInitialProperty(TLS_FILES, sjTlsFiles.toString());
writeSettings(TLS_FILES, sjTlsFiles.toString());
} else {
writeSetting(main, profileProperties, TLS_FILES, () -> null);
}
if (sjJKubeFiles.length() > 0) {
main.addInitialProperty(JKUBE_FILES, sjJKubeFiles.toString());
writeSettings(JKUBE_FILES, sjJKubeFiles.toString());
} else {
writeSetting(main, profileProperties, JKUBE_FILES, () -> null);
}
if (sjKamelets.length() > 0) {
String loc = main.getInitialProperties().getProperty("camel.component.kamelet.location");
if (loc != null) {
loc = loc + "," + sjKamelets;
} else {
loc = sjKamelets.toString();
}
main.addInitialProperty("camel.component.kamelet.location", loc);
writeSettings("camel.component.kamelet.location", loc);
} else {
writeSetting(main, profileProperties, "camel.component.kamelet.location", () -> null);
}
// we can only reload if file based
setupReload(main, sjReload);
if (propertiesFiles != null) {
String[] filesLocation = propertiesFiles.split(",");
// sort so general application.properties comes first (we should load profile first)
List<String> names = new ArrayList<>(List.of(filesLocation));
names.sort((o1, o2) -> {
// make sure application.properties is last
if (o1.endsWith("application.properties")) {
return 1;
} else if (o2.endsWith("application.properties")) {
return -1;
}
return 0;
});
StringBuilder locations = new StringBuilder();
for (String file : names) {
if (!file.startsWith("file:")) {
if (!file.startsWith("/")) {
file = Paths.get(FileSystems.getDefault().getPath("").toAbsolutePath().toString(), file).toString();
}
file = "file://" + file;
}
if (!locations.isEmpty()) {
locations.append(",");
}
locations.append(file);
}
// there may be existing properties
String loc = main.getInitialProperties().getProperty("camel.component.properties.location");
if (loc != null) {
loc = loc + "," + locations;
} else {
loc = locations.toString();
}
main.addInitialProperty("camel.component.properties.location", loc);
writeSettings("camel.component.properties.location", loc);
main.setPropertyPlaceholderLocations(loc);
}
// merge existing dependencies with --deps
addDependencies(RuntimeUtil.getDependenciesAsArray(profileProperties));
// Add runtime-specific dependencies
addRuntimeSpecificDependenciesFromProperties(profileProperties);
if (!dependencies.isEmpty()) {
var joined = String.join(",", dependencies);
main.addInitialProperty(DEPENDENCIES, joined);
writeSettings(DEPENDENCIES, joined);
}
// if we have a specific camel version then make sure we really need to switch
if (camelVersion != null) {
CamelCatalog catalog = new DefaultCamelCatalog();
String v = catalog.getCatalogVersion();
if (camelVersion.equals(v)) {
// same version, so we use current
camelVersion = null;
}
}
// okay we have validated all input and are ready to run
// (if exporting then we cannot run a different version)
if (!exportRun && camelVersion != null || isDebugMode()) {
// TODO: debug camel specific version
boolean custom = false;
if (camelVersion != null) {
// run in another JVM with different camel version (foreground or background)
custom = camelVersion.contains("-") && !camelVersion.endsWith("-SNAPSHOT");
if (custom) {
// regular camel versions can also be a milestone or release candidate
custom = !camelVersion.matches(".*-(RC|M)\\d$");
}
}
if (custom) {
// custom camel distribution
return runCustomCamelVersion(main);
} else {
// apache camel distribution or remote debug enabled
return runCamelVersion(main);
}
} else if (debugRun) {
// spawn new JVM to debug in background
return runDebug(main);
} else if (background) {
// spawn new JVM to run in background
return runBackground(main);
} else {
// run default in current JVM with same camel version
try {
return runKameletMain(main);
} catch (Exception ex) {
if (ignoreLoadingError) {
printer().printErr(ex);
return 0;
}
throw ex;
}
}
}
private void addRuntimeSpecificDependenciesFromProperties(Properties profileProperties) {
if (profileProperties == null)
return;
String runtimeSpecificDeps = null;
switch (runtime) {
case main -> runtimeSpecificDeps = profileProperties.getProperty(DEPENDENCIES_MAIN);
case springBoot -> runtimeSpecificDeps = profileProperties.getProperty(DEPENDENCIES_SPRING_BOOT);
case quarkus -> runtimeSpecificDeps = profileProperties.getProperty(DEPENDENCIES_QUARKUS);
}
if (runtimeSpecificDeps != null && !runtimeSpecificDeps.isEmpty()) {
addDependencies(runtimeSpecificDeps.split(","));
}
}
protected void addDependencies(String... deps) {
var depsArray = Optional.ofNullable(deps).orElse(new String[0]);
var depsList = Arrays.stream(depsArray).filter(tok -> !tok.isEmpty()).toList();
dependencies.addAll(depsList);
}
protected int runQuarkus() throws Exception {
if (background) {
printer().println("Run Camel Quarkus with --background is not supported");
return 1;
}
AtomicReference<Process> processRef = new AtomicReference<>();
// create temp run dir
Path runDirPath = Paths.get(RUN_PLATFORM_DIR, Long.toString(System.currentTimeMillis()));
if (!this.background) {
// Mark for deletion on exit
Runtime.getRuntime().addShutdownHook(new Thread(() -> {
try {
// We need to wait for the process to exit before doing any cleanup
Process process = processRef.get();
if (process != null) {
process.destroy();
for (int i = 0; i < 30; i++) {
if (!process.isAlive()) {
break;
}
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
}
removeDir(runDirPath);
} catch (Exception e) {
// Ignore
}
}));
}
Files.createDirectories(runDirPath);
// export to hidden folder
ExportQuarkus eq = new ExportQuarkus(getMain());
eq.javaLiveReload = this.dev;
eq.symbolicLink = this.dev;
eq.mavenWrapper = true;
eq.gradleWrapper = false;
eq.quarkusVersion = PropertyResolver.fromSystemProperty(QUARKUS_VERSION, () -> this.quarkusVersion);
eq.quarkusGroupId = PropertyResolver.fromSystemProperty(QUARKUS_GROUP_ID, () -> this.quarkusGroupId);
eq.quarkusArtifactId = PropertyResolver.fromSystemProperty(QUARKUS_ARTIFACT_ID, () -> this.quarkusArtifactId);
eq.camelVersion = this.camelVersion;
eq.kameletsVersion = this.kameletsVersion;
eq.exportDir = runDirPath.toString();
eq.localKameletDir = this.localKameletDir;
eq.excludes = this.excludes;
eq.filePaths = this.filePaths;
eq.files = this.files;
eq.name = this.name;
eq.verbose = this.verbose;
eq.port = this.port;
eq.managementPort = this.managementPort;
eq.gav = this.gav;
eq.runtime = this.runtime;
if (eq.gav == null) {
if (eq.name == null) {
eq.name = "jbang-run-dummy";
}
eq.gav = "org.example.project:" + eq.name + ":1.0-SNAPSHOT";
}
eq.dependencies = this.dependencies;
eq.addDependencies("camel:cli-connector");
eq.fresh = this.fresh;
eq.download = this.download;
eq.skipPlugins = this.skipPlugins;
eq.packageScanJars = this.packageScanJars;
eq.quiet = true;
eq.logging = false;
eq.loggingLevel = "off";
eq.ignoreLoadingError = this.ignoreLoadingError;
eq.lazyBean = this.lazyBean;
eq.applicationProperties = this.property;
printer().println("Running using Quarkus v" + eq.quarkusVersion + " (preparing and downloading files)");
// run export
int exit = eq.export();
if (exit != 0) {
return exit;
}
// run quarkus via maven
String mvnw = "/mvnw";
if (FileUtil.isWindows()) {
mvnw = "/mvnw.cmd";
}
ProcessBuilder pb = new ProcessBuilder();
pb.command(runDirPath + mvnw, "--quiet", "--file",
runDirPath.toRealPath().resolve("pom.xml").toString(), "package",
"quarkus:" + (dev ? "dev" : "run"));
pb.inheritIO(); // run in foreground (with IO so logs are visible)
Process p = pb.start();
processRef.set(p);
this.spawnPid = p.pid();
// wait for that process to exit as we run in foreground
return p.waitFor();
}
protected int runSpringBoot() throws Exception {
if (background) {
printer().println("Run Camel Spring Boot with --background is not supported");
return 1;
}
AtomicReference<Process> processRef = new AtomicReference<>();
// create temp run dir
Path runDirPath = Paths.get(RUN_PLATFORM_DIR, Long.toString(System.currentTimeMillis()));
if (!this.background) {
// Mark for deletion on exit
Runtime.getRuntime().addShutdownHook(new Thread(() -> {
try {
// We need to wait for the process to exit before doing any cleanup
Process process = processRef.get();
if (process != null) {
process.destroy();
for (int i = 0; i < 30; i++) {
if (!process.isAlive()) {
break;
}
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
}
removeDir(runDirPath);
} catch (Exception e) {
// Ignore
}
}));
}
Files.createDirectories(runDirPath);
// export to hidden folder
ExportSpringBoot eq = new ExportSpringBoot(getMain());
// Java codes reload is not supported in Spring Boot since it has to be recompiled to trigger the restart
eq.javaLiveReload = false;
eq.symbolicLink = this.dev;
eq.mavenWrapper = true;
eq.gradleWrapper = false;
eq.springBootVersion = this.springBootVersion;
eq.camelVersion = this.camelVersion;
eq.camelSpringBootVersion = PropertyResolver.fromSystemProperty(CAMEL_SPRING_BOOT_VERSION,
() -> this.camelSpringBootVersion != null ? this.camelSpringBootVersion : this.camelVersion);
eq.kameletsVersion = this.kameletsVersion;
eq.exportDir = runDirPath.toString();
eq.localKameletDir = this.localKameletDir;
eq.excludes = this.excludes;
eq.filePaths = this.filePaths;
eq.files = this.files;
eq.name = this.name;
eq.verbose = this.verbose;
eq.port = this.port;
eq.managementPort = this.managementPort;
eq.gav = this.gav;
eq.repositories = this.repositories;
eq.runtime = this.runtime;
if (eq.gav == null) {
if (eq.name == null) {
eq.name = "jbang-run-dummy";
}
eq.gav = "org.example.project:" + eq.name + ":1.0-SNAPSHOT";
}
eq.dependencies.addAll(this.dependencies);
eq.addDependencies("camel:cli-connector");
if (this.dev) {
// hot-reload of spring-boot
eq.addDependencies("mvn:org.springframework.boot:spring-boot-devtools");
}
eq.fresh = this.fresh;
eq.download = this.download;
eq.skipPlugins = this.skipPlugins;
eq.packageScanJars = this.packageScanJars;
eq.quiet = true;
eq.logging = false;
eq.loggingLevel = "off";
eq.ignoreLoadingError = this.ignoreLoadingError;
eq.lazyBean = this.lazyBean;
eq.applicationProperties = this.property;
printer().println("Running using Spring Boot v" + eq.springBootVersion + " (preparing and downloading files)");
// run export
int exit = eq.export();
if (exit != 0) {
return exit;
}
// prepare spring-boot for logging to file
try (InputStream is = Run.class.getClassLoader().getResourceAsStream("spring-boot-logback.xml")) {
Path logbackPath = Paths.get(eq.exportDir, "src/main/resources/logback.xml");
Files.createDirectories(logbackPath.getParent());
Files.copy(is, logbackPath, StandardCopyOption.REPLACE_EXISTING);
}
// run spring-boot via maven
ProcessBuilder pb = new ProcessBuilder();
String mvnw = "/mvnw";
if (FileUtil.isWindows()) {
mvnw = "/mvnw.cmd";
}
pb.command(runDirPath + mvnw, "--quiet", "--file",
runDirPath.toRealPath().resolve("pom.xml").toString(),
"spring-boot:run");
pb.inheritIO(); // run in foreground (with IO so logs are visible)
Process p = pb.start();
processRef.set(p);
this.spawnPid = p.pid();
// wait for that process to exit as we run in foreground
return p.waitFor();
}
private boolean acceptPropertiesFile(String file) {
String name = FileUtil.onlyName(file);
if (profile != null && name.startsWith("application-")) {
// only accept the file that matches the correct profile
return ("application-" + profile).equals(name);
}
return true;
}
protected void doAddInitialProperty(KameletMain main) {
// noop
}
private void setupReload(KameletMain main, StringJoiner sjReload) {
if (dev && (sourceDir != null || sjReload.length() > 0)) {
main.addInitialProperty("camel.main.routesReloadEnabled", "true");
if (sourceDir != null) {
main.addInitialProperty(SOURCE_DIR, sourceDir);
main.addInitialProperty("camel.main.routesReloadDirectory", sourceDir);
main.addInitialProperty("camel.main.routesReloadPattern", "*");
main.addInitialProperty("camel.main.routesReloadDirectoryRecursive", "true");
} else {
String pattern = sjReload.toString();
String reloadDir = ".";
// use current dir, however if we run a file that are in another folder, then we should track that folder instead
for (String r : sjReload.toString().split(",")) {
String path = FileUtil.onlyPath(r);
if (path != null && !path.equals(".camel-jbang")) {
reloadDir = path;
break;
}
}
main.addInitialProperty("camel.main.routesReloadDirectory", reloadDir);
main.addInitialProperty("camel.main.routesReloadPattern", pattern);
main.addInitialProperty("camel.main.routesReloadDirectoryRecursive",
isReloadRecursive(pattern) ? "true" : "false");
}
// do not shutdown the JVM but stop routes when max duration is triggered
main.addInitialProperty("camel.main.durationMaxAction", "stop");
}
}
private Properties loadProfileProperties(Path baseDir) throws Exception {
Properties answer = null;
if (transformMessageRun) {
// do not load profile in transform message run as it should be vanilla empty
return answer;
}
// application.properties
Path profilePropertiesPath;
if (sourceDir != null) {
profilePropertiesPath = Paths.get(sourceDir).resolve("application.properties");
} else {
profilePropertiesPath = baseDir.resolve("application.properties");
}
// based application-profile.properties
answer = doLoadAndInitProfileProperties(profilePropertiesPath);
if (profile != null) {
if (sourceDir != null) {
profilePropertiesPath = Paths.get(sourceDir).resolve("application-" + profile + ".properties");
} else {
profilePropertiesPath = Paths.get("application-" + profile + ".properties");
}
// application-profile.properties should override standard application.properties
Properties override = doLoadAndInitProfileProperties(profilePropertiesPath);
if (override != null) {
if (answer == null) {
answer = override;
} else {
answer.putAll(override);
}
}
}
if (kameletsVersion == null) {
kameletsVersion = VersionHelper.extractKameletsVersion();
}
return answer;
}
private Properties doLoadAndInitProfileProperties(Path profilePropertiesPath) throws Exception {
Properties answer = null;
if (Files.exists(profilePropertiesPath)) {
answer = loadProfilePropertiesFile(profilePropertiesPath);
// logging level/color may be configured in the properties file
loggingLevel = answer.getProperty("loggingLevel", loggingLevel);
loggingColor
= "true".equals(answer.getProperty("loggingColor", loggingColor ? "true" : "false"));
loggingJson
= "true".equals(answer.getProperty("loggingJson", loggingJson ? "true" : "false"));
repositories = answer.getProperty(REPOS, repositories);
mavenSettings = answer.getProperty(MAVEN_SETTINGS, mavenSettings);
mavenSettingsSecurity = answer.getProperty(MAVEN_SETTINGS_SECURITY, mavenSettingsSecurity);
mavenCentralEnabled = "true"
.equals(answer.getProperty(MAVEN_CENTRAL_ENABLED, mavenCentralEnabled ? "true" : "false"));
mavenApacheSnapshotEnabled = "true".equals(answer.getProperty(MAVEN_APACHE_SNAPSHOTS,
mavenApacheSnapshotEnabled ? "true" : "false"));
openapi = answer.getProperty(OPEN_API, openapi);
download = "true".equals(answer.getProperty(DOWNLOAD, download ? "true" : "false"));
packageScanJars
= "true".equals(answer.getProperty(PACKAGE_SCAN_JARS, packageScanJars ? "true" : "false"));
background = "true".equals(answer.getProperty(BACKGROUND, background ? "true" : "false"));
backgroundWait = "true".equals(answer.getProperty(BACKGROUND_WAIT, backgroundWait ? "true" : "false"));
jvmDebugPort = parseJvmDebugPort(answer.getProperty(JVM_DEBUG, Integer.toString(jvmDebugPort)));
camelVersion = answer.getProperty(CAMEL_VERSION, camelVersion);
kameletsVersion = answer.getProperty(KAMELETS_VERSION, kameletsVersion);
springBootVersion = answer.getProperty(SPRING_BOOT_VERSION, springBootVersion);
quarkusGroupId = answer.getProperty(QUARKUS_GROUP_ID, quarkusGroupId);
quarkusArtifactId = answer.getProperty(QUARKUS_ARTIFACT_ID, quarkusArtifactId);
quarkusVersion = answer.getProperty(QUARKUS_VERSION, quarkusVersion);
gav = answer.getProperty(GAV, gav);
stub = answer.getProperty(STUB, stub);
excludes = RuntimeUtil.getCommaSeparatedPropertyAsList(answer, EXCLUDES, excludes);
}
return answer;
}
/**
* Parses the JVM debug port from the given value.
* <p/>
* The value can be {@code true} to indicate a default port which is {@code 4004}, {@code false} to indicate no
* debug, or a number corresponding to a custom port.
*
* @param value the value to parse.
*
* @return the JVM debug port corresponding to the given value.
*/
private static int parseJvmDebugPort(String value) {
if (value == null) {
return 0;
} else if (value.equals("true")) {
return 4004;
} else if (value.equals("false")) {
return 0;
}
return Integer.parseInt(value);
}
protected int runCamelVersion(KameletMain main) throws Exception {
List<String> cmds;
if (spec != null) {
cmds = new ArrayList<>(spec.commandLine().getParseResult().originalArgs());
} else {
cmds = new ArrayList<>();
cmds.add("run");
if (transformMessageRun) {
cmds.add("--empty");
}
}
if (background) {
cmds.remove("--background=true");
cmds.remove("--background");
cmds.remove("--background-wait");
cmds.remove("--background-wait=false");
cmds.remove("--background-wait=true");
}
if (camelVersion != null) {
cmds.remove("--camel-version=" + camelVersion);
}
if (kameletsVersion != null) {
cmds.remove("--kamelets-version=" + kameletsVersion);
}
// need to use jbang command to specify camel version
List<String> jbangArgs = new ArrayList<>();
jbangArgs.add("jbang");
jbangArgs.add("run");
if (camelVersion != null) {
jbangArgs.add("-Dcamel.jbang.version=" + camelVersion);
}
if (kameletsVersion != null) {
if (camelVersion != null && VersionHelper.isLE(camelVersion, "4.16.0")) {
jbangArgs.add("-Dcamel-kamelets.version=" + camelVersion);
} else {
cmds.add("--kamelets-version=" + kameletsVersion);
}
}
// tooling may signal to run JMX debugger in suspended mode via JVM system property
// which we must include in args as well
String debugSuspend = System.getProperty(BacklogDebugger.SUSPEND_MODE_SYSTEM_PROP_NAME);
if (debugSuspend != null) {
jbangArgs.add("-D" + BacklogDebugger.SUSPEND_MODE_SYSTEM_PROP_NAME + "=" + debugSuspend);
}
if (isDebugMode()) {
jbangArgs.add("--debug=" + jvmDebugPort); // jbang --debug=port
cmds.removeIf(arg -> arg.startsWith("--jvm-debug"));
}
if (repositories != null) {
jbangArgs.add("--repos=" + repositories);
}
jbangArgs.add("camel@apache/camel");
jbangArgs.addAll(cmds);
ProcessBuilder pb = new ProcessBuilder();
pb.command(jbangArgs);
if (verbose) {
printer().println(String.join(" ", jbangArgs));
}
if (background) {
return runBackgroundProcess(pb, "Camel Main");
} else {
pb.inheritIO(); // run in foreground (with IO so logs are visible)
Process p = pb.start();
this.spawnPid = p.pid();
// wait for that process to exit as we run in foreground
return p.waitFor();
}
}
protected int runBackground(KameletMain main) throws Exception {
List<String> cmds;
if (spec != null) {
cmds = new ArrayList<>(spec.commandLine().getParseResult().originalArgs());
} else {
cmds = new ArrayList<>();
cmds.add("run");
if (transformMessageRun) {
cmds.add("--empty");
}
}
cmds.remove("--background=true");
cmds.remove("--background");
cmds.remove("--background-wait=false");
cmds.remove("--background-wait=true");
cmds.remove("--background-wait");
RunHelper.addCamelJBangCommand(cmds);
ProcessBuilder pb = new ProcessBuilder();
pb.command(cmds);
if (verbose) {
printer().println(String.join(" ", cmds));
}
return runBackgroundProcess(pb, "Camel Main");
}
protected int runBackgroundProcess(ProcessBuilder pb, String kind) throws Exception {
Path logPath = null;
if (backgroundWait) {
// store background output in a log file to capture any error on startup
logPath = getRunBackgroundLogFile("" + new Random().nextLong());
try {
Path logDir = CommandLineHelper.getCamelDir();
Files.createDirectories(logDir); //make sure the parent dir exists
Files.createFile(logPath);
logPath.toFile().deleteOnExit();
} catch (IOException e) {
// Ignore
}
pb.redirectErrorStream(true);
pb.redirectOutput(logPath.toFile());
}
Process p = pb.start();
this.spawnPid = p.pid();
if (!exportRun && !transformRun && !transformMessageRun) {
printer().println(
"Running " + kind + ": " + name + " in background with PID: " + p.pid()
+ (backgroundWait ? " (waiting to startup)" : ""));
}
int ec = 0;
if (logPath != null) {
StopWatch watch = new StopWatch();
int state = 0; // state 5 is running
while (p.isAlive() && watch.taken() < 20000 && state < 5) {
JsonObject root = loadStatus(p.pid());
if (root != null) {
JsonObject context = (JsonObject) root.get("context");
if (context != null) {
state = context.getInteger("phase");
}
}
if (state < 5) {
try {
Thread.sleep(500);
} catch (Exception e) {
// we want to exit
break;
}
}
}
if (!p.isAlive()) {
ec = p.exitValue();
if (ec != 0) {
printer().println(kind + ": " + name + " startup failure");
printer().println("");
String text = Files.readString(logPath);
printer().print(text);
}
} else {
printer().println(kind + ": " + name + " (state: " + extractState(state) + ")");
}
}
if (logPath != null) {
try {
Files.deleteIfExists(logPath);
} catch (IOException e) {
// Ignore
}
}
return ec;
}
protected int runDebug(KameletMain main) throws Exception {
// to be implemented in Debug
return 0;
}
protected int runCustomCamelVersion(KameletMain main) throws Exception {
InputStream is = Run.class.getClassLoader().getResourceAsStream("templates/run-custom-camel-version.tmpl");
String content = IOHelper.loadText(is);
IOHelper.close(is);
content = content.replaceFirst("\\{\\{ \\.JavaVersion }}", "21");
if (repositories != null) {
content = content.replaceFirst("\\{\\{ \\.MavenRepositories }}", "//REPOS " + repositories);
} else {
content = content.replaceFirst("\\{\\{ \\.MavenRepositories }}", "");
}
// use custom distribution of camel
StringBuilder sb = new StringBuilder();
sb.append(String.format("//DEPS org.apache.camel:camel-bom:%s@pom%n", camelVersion));
sb.append(String.format("//DEPS org.apache.camel:camel-core:%s%n", camelVersion));
sb.append(String.format("//DEPS org.apache.camel:camel-core-engine:%s%n", camelVersion));
sb.append(String.format("//DEPS org.apache.camel:camel-main:%s%n", camelVersion));
sb.append(String.format("//DEPS org.apache.camel:camel-java-joor-dsl:%s%n", camelVersion));
sb.append(String.format("//DEPS org.apache.camel:camel-kamelet:%s%n", camelVersion));
sb.append(String.format("//DEPS org.apache.camel:camel-kamelet-main:%s%n", camelVersion));
if (VersionHelper.isGE(camelVersion, "3.19.0")) {
sb.append(String.format("//DEPS org.apache.camel:camel-cli-connector:%s%n", camelVersion));
}
content = content.replaceFirst("\\{\\{ \\.CamelDependencies }}", sb.toString());
// use apache distribution of camel-jbang/github-resolver
String v = camelVersion.substring(0, camelVersion.lastIndexOf('.'));
sb = new StringBuilder();
sb.append(String.format("//DEPS org.apache.camel:camel-jbang-core:%s%n", v));
sb.append(String.format("//DEPS org.apache.camel:camel-resourceresolver-github:%s%n", v));
content = content.replaceFirst("\\{\\{ \\.CamelJBangDependencies }}", sb.toString());
sb = new StringBuilder();
sb.append(String.format("//DEPS org.apache.camel.kamelets:camel-kamelets:%s%n", kameletsVersion));
content = content.replaceFirst("\\{\\{ \\.CamelKameletsDependencies }}", sb.toString());
String fn = CommandLineHelper.CAMEL_JBANG_WORK_DIR + "/CustomCamelJBang.java";
Files.writeString(Paths.get(fn), content);
List<String> cmds;
if (spec != null) {
cmds = new ArrayList<>(spec.commandLine().getParseResult().originalArgs());
} else {
cmds = new ArrayList<>();
cmds.add("run");
}
if (background) {
cmds.remove("--background=true");
cmds.remove("--background");
cmds.remove("--background-wait=true");
cmds.remove("--background-wait=false");
cmds.remove("--background-wait");
}
if (repositories != null) {
if (!VersionHelper.isGE(v, "3.18.1")) {
// --repos is not supported in 3.18.0 or older, so remove
cmds.remove("--repos=" + repositories);
}
}
cmds.remove("--camel-version=" + camelVersion);
// need to use jbang command to specify camel version
List<String> jbangArgs = new ArrayList<>();
jbangArgs.add("jbang");
jbangArgs.add(CommandLineHelper.CAMEL_JBANG_WORK_DIR + "/CustomCamelJBang.java");
jbangArgs.addAll(cmds);
if (verbose) {
printer().println(String.join(" ", jbangArgs));
}
ProcessBuilder pb = new ProcessBuilder();
pb.command(jbangArgs);
if (background) {
return runBackgroundProcess(pb, "Camel Main");
} else {
pb.inheritIO(); // run in foreground (with IO so logs are visible)
Process p = pb.start();
this.spawnPid = p.pid();
// wait for that process to exit as we run in foreground
return p.waitFor();
}
}
protected int runKameletMain(KameletMain main) throws Exception {
main.start();
main.run();
// cleanup and delete log file
if (logFile != null) {
FileUtil.deleteFile(logFile);
}
return main.getExitCode();
}
private String loadFromCode(String code, String name, boolean file) throws IOException {
String fn = CommandLineHelper.CAMEL_JBANG_WORK_DIR + "/" + name + ".java";
InputStream is = Run.class.getClassLoader().getResourceAsStream("templates/code-java.tmpl");
String content = IOHelper.loadText(is);
IOHelper.close(is);
if (!file) {
// need to replace single quote as double quotes (from input string)
code = code.replace("'", "\"");
code = code.trim();
}
// ensure the code ends with semicolon to finish the java statement
if (!code.endsWith(";")) {
code = code + ";";
}
content = StringHelper.replaceFirst(content, "{{ .Name }}", name);
content = StringHelper.replaceFirst(content, "{{ .Code }}", code);
Files.writeString(Paths.get(fn), content);
return "file:" + fn;
}
private String evalGistSource(KameletMain main, String file) throws Exception {
StringJoiner routes = new StringJoiner(",");
StringJoiner kamelets = new StringJoiner(",");
StringJoiner properties = new StringJoiner(",");
fetchGistUrls(file, routes, kamelets, properties);
if (properties.length() > 0) {
main.addInitialProperty("camel.component.properties.location", properties.toString());
}
if (kamelets.length() > 0) {
String loc = main.getInitialProperties().getProperty("camel.component.kamelet.location");
if (loc != null) {
// local kamelets first
loc = kamelets + "," + loc;
} else {
loc = kamelets.toString();
}
main.addInitialProperty("camel.component.kamelet.location", loc);
}
if (routes.length() > 0) {
return routes.toString();
}
return null;
}
private String evalGithubSource(KameletMain main, String file) throws Exception {
String ext = FileUtil.onlyExt(file);
boolean wildcard = FileUtil.onlyName(file, false).contains("*");
if (ext != null && !wildcard) {
// it is a single file so map to
return asGithubSingleUrl(file);
} else {
StringJoiner routes = new StringJoiner(",");
StringJoiner kamelets = new StringJoiner(",");
StringJoiner properties = new StringJoiner(",");
fetchGithubUrls(file, routes, kamelets, properties);
if (properties.length() > 0) {
main.addInitialProperty("camel.component.properties.location", properties.toString());
}
if (kamelets.length() > 0) {
String loc = main.getInitialProperties().getProperty("camel.component.kamelet.location");
if (loc != null) {
// local kamelets first
loc = kamelets + "," + loc;
} else {
loc = kamelets.toString();
}
main.addInitialProperty("camel.component.kamelet.location", loc);
}
if (routes.length() > 0) {
return routes.toString();
}
return null;
}
}
private String loadFromClipboard(String file) throws UnsupportedFlavorException, IOException {
// run from clipboard (not real file exists)
String ext = FileUtil.onlyExt(file, true);
if (ext == null || ext.isEmpty()) {
throw new IllegalArgumentException(
"When running from clipboard, an extension is required to let Camel know what kind of file to use");
}
Clipboard c = Toolkit.getDefaultToolkit().getSystemClipboard();
Object t = c.getData(DataFlavor.stringFlavor);
if (t != null) {
String fn = CLIPBOARD_GENERATED_FILE + "." + ext;
if ("java".equals(ext)) {
String fqn = determineClassName(t.toString());
if (fqn == null) {
// wrap code in wrapper
return loadFromCode(t.toString(), "ClipboardRoute", true);
}
// drop package in file name
String cn = fqn;
if (fqn.contains(".")) {
cn = cn.substring(cn.lastIndexOf('.') + 1);
}
fn = cn + ".java";
}
Files.writeString(Paths.get(fn), t.toString());
file = "file:" + fn;
}
return file;
}
private KameletMain createMainInstance() {
KameletMain main;
if (localKameletDir == null || localKameletDir.isEmpty()) {
main = new KameletMain(CAMEL_INSTANCE_TYPE);
} else {
StringJoiner sj = new StringJoiner(",");
String[] parts = localKameletDir.split(",");
for (String part : parts) {
// automatic map github https urls to github resolver
if (part.startsWith("https://github.com/")) {
part = asGithubSingleUrl(part);
} else if (part.startsWith("https://gist.github.com/")) {
part = asGistSingleUrl(part);
}
part = FileUtil.compactPath(part);
if (!ResourceHelper.hasScheme(part) && !part.startsWith("github:")) {
part = "file:" + part;
}
sj.add(part);
}
main = new KameletMain(CAMEL_INSTANCE_TYPE, sj.toString());
writeSettings(LOCAL_KAMELET_DIR, sj.toString());
}
return main;
}
private void configureLogging(Path baseDir) throws Exception {
if (logging) {
// allow to configure individual logging levels in application.properties
Properties prop = loadProfileProperties(baseDir);
if (prop != null) {
for (Object obj : prop.keySet()) {
String key = obj.toString();
String value = prop.getProperty(key);
if (key.startsWith("logging.level.")) {
key = key.substring(14);
} else if (key.startsWith("quarkus.log.category.")) {
key = key.substring(21);
if (key.endsWith(".level")) {
key = key.substring(0, key.length() - 6);
}
} else {
continue;
}
key = StringHelper.removeLeadingAndEndingQuotes(key);
String line = key + "=" + value;
String line2 = key + " = " + value;
if (!loggingCategory.contains(line) && !loggingCategory.contains(line2)) {
loggingCategory.add(line);
}
}
}
RuntimeUtil.configureLog(loggingLevel, loggingColor, loggingJson, scriptRun, false, loggingConfigPath,
loggingCategory);
writeSettings("loggingLevel", loggingLevel);
writeSettings("loggingColor", loggingColor ? "true" : "false");
writeSettings("loggingJson", loggingJson ? "true" : "false");
if (!scriptRun) {
// remember log file
String name = RuntimeUtil.getPid() + ".log";
final Path logDir = CommandLineHelper.getCamelDir();
Files.createDirectories(logDir); //make sure the parent dir exists
logFile = logDir.resolve(name);
try {
// Create an empty file that will be deleted on exit
Files.createFile(logFile);
logFile.toFile().deleteOnExit();
} catch (IOException e) {
// Ignore
}
}
} else {
if (exportRun) {
RuntimeUtil.configureLog(loggingLevel, false, false, false, true, null, null);
writeSettings("loggingLevel", loggingLevel);
} else {
RuntimeUtil.configureLog("off", false, false, false, false, null, null);
writeSettings("loggingLevel", "off");
}
}
}
private void generateOpenApi() throws Exception {
Path filePath = Paths.get(openapi);
if (!Files.exists(filePath) || !Files.isRegularFile(filePath)) {
throw new FileNotFoundException("Cannot find file: " + filePath);
}
try (InputStream is = Run.class.getClassLoader().getResourceAsStream("templates/rest-dsl.yaml.tmpl")) {
String content = IOHelper.loadText(is);
String onlyName = filePath.toString();
content = content.replaceFirst("\\{\\{ \\.Spec }}", onlyName);
Files.writeString(Paths.get(OPENAPI_GENERATED_FILE), content);
// we need to include the spec on the classpath
files.add(openapi);
}
}
private boolean knownFile(String file) throws Exception {
// always include kamelets
String ext = FileUtil.onlyExt(file, false);
if ("kamelet.yaml".equals(ext)) {
return true;
}
String ext2 = FileUtil.onlyExt(file, true);
if (ext2 != null) {
SourceScheme sourceScheme = SourceScheme.fromUri(file);
// special for yaml or xml, as we need to check if they have camel or not
if (!sourceScheme.isRemote() && ("xml".equals(ext2) || "yaml".equals(ext2))) {
// load content into memory
Source source = SourceHelper.resolveSource(file);
if ("xml".equals(ext2)) {
XmlStreamDetector detector = new XmlStreamDetector(
new ByteArrayInputStream(source.content().getBytes(StandardCharsets.UTF_8)));
XmlStreamInfo info = detector.information();
if (!info.isValid()) {
return false;
}
return ACCEPTED_XML_ROOT_ELEMENTS.contains(info.getRootElementName());
} else {
// TODO: we probably need a way to parse the content and match against the YAML DSL expected by Camel
// This check looks very fragile
return source.content().contains("- from:")
|| source.content().contains("- route:")
|| source.content().contains("- routeTemplate") || source.content().contains("- route-template:")
|| source.content().contains("- routeConfiguration:")
|| source.content().contains("- route-configuration:")
|| source.content().contains("- rest:")
|| source.content().contains("- beans:")
// also support Pipes.
|| source.content().contains("Pipe");
}
}
// if the ext is an accepted file then we include it as a potential route
// (java files need to be included as route to support pojos/processors with routes)
return SourceHelper.isAcceptedSourceFile(ext2);
} else {
// assume match as it can be wildcard or dir
return true;
}
}
private boolean skipFile(String name) {
if (name.startsWith("github:") || name.startsWith("https://github.com/")
|| name.startsWith("https://gist.github.com/")) {
return false;
}
// flatten file
name = FileUtil.stripPath(name);
if (OPENAPI_GENERATED_FILE.equals(name)) {
return false;
}
if ("pom.xml".equalsIgnoreCase(name)) {
return true;
}
if ("build.gradle".equalsIgnoreCase(name)) {
return true;
}
if ("camel-runner.jar".equals(name)) {
return true;
}
if ("docker-compose.yml".equals(name) || "docker-compose.yaml".equals(name) || "compose.yml".equals(name)
|| "compose.yaml".equals(name)) {
return true;
}
if (name.equals("NOTICE.txt") || name.equals("LICENSE.txt")) {
return true;
}
if (name.startsWith(".")) {
// relative file is okay, otherwise we assume it's a hidden file
boolean ok = name.startsWith("..") || name.startsWith("./");
if (!ok) {
return true;
}
}
// is the file excluded?
if (isExcluded(name, excludes)) {
return true;
}
// skip dirs
if (!name.startsWith("classpath:")) {
Path path = Path.of(name);
if (Files.exists(path) && Files.isDirectory(path)) {
return true;
}
}
if (FileUtil.onlyExt(name) == null) {
return true;
}
String on = FileUtil.onlyName(name, true);
on = on.toLowerCase(Locale.ROOT);
if (on.startsWith("readme")) {
return true;
}
return false;
}
private static boolean isExcluded(String name, List<String> excludes) {
if (excludes != null) {
for (String pattern : excludes) {
pattern = pattern.trim();
if (AntPathMatcher.INSTANCE.match(pattern, name)) {
return true;
}
}
}
return false;
}
private boolean isGroovyFile(String name) {
return name.endsWith(".groovy");
}
private boolean isScriptFile(String name) {
return name.endsWith(".sh");
}
private boolean isTlsFile(String name) {
return name.endsWith(".crt") || name.endsWith(".key") || name.endsWith(".pem");
}
private boolean jkubeFile(String name) {
return name.endsWith(".jkube.yaml") || name.endsWith(".jkube.yml");
}
private void writeSettings(String key, String value) {
try {
// use java.util.Properties to ensure the value is escaped correctly
Properties prop = new Properties();
prop.setProperty(key, value);
StringWriter sw = new StringWriter();
prop.store(sw, null);
Path runSettingsPath = CommandLineHelper.getWorkDir().resolve(RUN_SETTINGS_FILE);
StringBuilder content = new StringBuilder();
String[] lines = sw.toString().split(System.lineSeparator());
for (String line : lines) {
// properties store timestamp as comment which we want to skip
if (!line.startsWith("#")) {
content.append(line).append(System.lineSeparator());
}
}
// Append to the file if it exists, otherwise create it
if (Files.exists(runSettingsPath)) {
Files.write(runSettingsPath, content.toString().getBytes(StandardCharsets.UTF_8),
StandardOpenOption.APPEND);
} else {
Files.write(runSettingsPath, content.toString().getBytes(StandardCharsets.UTF_8));
}
} catch (Exception e) {
// ignore
}
}
protected static void removeDir(Path directory) {
if (Files.exists(directory)) {
try (Stream<Path> files = Files.walk(directory)) {
files.sorted(java.util.Comparator.reverseOrder())
.forEach(path -> {
try {
Files.deleteIfExists(path);
} catch (IOException e) {
// Fallback to deleteOnExit if we can't delete immediately
try {
path.toFile().deleteOnExit();
} catch (Exception ex) {
// Ignore
}
}
});
} catch (IOException e) {
// Ignore
}
}
}
private static String determineClassName(String content) {
Matcher matcher = PACKAGE_PATTERN.matcher(content);
String pn = matcher.find() ? matcher.group(1) : null;
matcher = CLASS_PATTERN.matcher(content);
String cn = matcher.find() ? matcher.group(1) : null;
String fqn;
if (pn != null) {
fqn = pn + "." + cn;
} else {
fqn = cn;
}
return fqn;
}
private static boolean isReloadRecursive(String reload) {
for (String part : reload.split(",")) {
String dir = FileUtil.onlyPath(part);
if (dir != null) {
return true;
}
}
return false;
}
private | Run |
java | quarkusio__quarkus | independent-projects/tools/devtools-common/src/main/java/io/quarkus/platform/catalog/compatibility/ExtensionCatalogCompatibility.java | {
"start": 477,
"end": 6074
} | class ____ {
/**
* Collects extension compatibility info for a given catalog. This method simply calls
* {@code forExtensions(catalog.getExtensions(), catalog)}.
*
* @param catalog extension catalog
* @return extension compatibility info for all the extensions in the catalog
*/
public static ExtensionCatalogCompatibility forCatalog(ExtensionCatalog catalog) {
return forExtensions(catalog.getExtensions(), catalog);
}
/**
* Collects extension compatibility info for specific extensions from the extension catalog.
*
* @param extensions extensions to collect the compatibility info for
* @param catalog extension catalog
* @return extension compatibility info for the provided extensions
*/
public static ExtensionCatalogCompatibility forExtensions(Iterable<Extension> extensions, ExtensionCatalog catalog) {
final Map<ArtifactKey, Extension> catalogExtMap = new HashMap<>(catalog.getExtensions().size());
for (Extension e : catalog.getExtensions()) {
catalogExtMap.put(e.getArtifact().getKey(), e);
}
final Map<ArtifactKey, Map<String, CapabilityInfo>> capInfoMap = new HashMap<>();
final List<ExtensionCapabilityInfo> branches = new ArrayList<>();
for (Extension e : extensions) {
final List<ArtifactKey> depKeys = extensionDependencies(e.getMetadata());
Map<String, CapabilityInfo> allCaps = Collections.emptyMap();
Map<String, CapabilityInfo> extCaps = providedCapabilities(e, capInfoMap);
if (!extCaps.isEmpty()) {
allCaps = new HashMap<>(extCaps);
}
for (ArtifactKey key : depKeys) {
final Extension dep = catalogExtMap.get(key);
// normally, the catalog should contain all of them
if (dep == null) {
continue;
}
extCaps = providedCapabilities(dep, capInfoMap);
if (!extCaps.isEmpty()) {
if (allCaps.isEmpty()) {
allCaps = new HashMap<>(extCaps);
} else {
allCaps.putAll(extCaps);
}
}
}
if (!allCaps.isEmpty()) {
branches.add(new ExtensionCapabilityInfo(e, allCaps));
}
}
List<ExtensionCompatibility> conflictingExtensions = Collections.emptyList();
for (ExtensionCapabilityInfo extCapInfo : branches) {
Map<ArtifactKey, Extension> conflicts = null;
for (ExtensionCapabilityInfo otherExtCapInfo : branches) {
if (otherExtCapInfo == extCapInfo) {
continue;
}
if (extCapInfo.isInConflictWith(otherExtCapInfo)) {
if (conflicts == null) {
conflicts = new HashMap<>();
}
conflicts.put(otherExtCapInfo.e.getArtifact().getKey(), otherExtCapInfo.e);
}
}
if (conflicts != null) {
if (conflictingExtensions.isEmpty()) {
conflictingExtensions = new ArrayList<>();
}
conflictingExtensions.add(new ExtensionCompatibility(extCapInfo.e, conflicts));
}
}
return new ExtensionCatalogCompatibility(conflictingExtensions);
}
private final List<ExtensionCompatibility> compatibilityInfo;
private ExtensionCatalogCompatibility(List<ExtensionCompatibility> conflictingExtensions) {
this.compatibilityInfo = Objects.requireNonNull(conflictingExtensions);
}
public Collection<ExtensionCompatibility> getExtensionCompatibility() {
return compatibilityInfo;
}
public boolean isEmpty() {
return compatibilityInfo.isEmpty();
}
private static Map<String, CapabilityInfo> providedCapabilities(Extension e,
Map<ArtifactKey, Map<String, CapabilityInfo>> capInfoMap) {
Map<String, CapabilityInfo> map = capInfoMap.get(e.getArtifact().getKey());
if (map == null) {
final List<String> providedNames = providedCapabilities(e);
if (providedNames.isEmpty()) {
map = Collections.emptyMap();
} else {
map = new HashMap<>(providedNames.size());
for (String name : providedNames) {
map.put(name, new CapabilityInfo(name, e));
}
}
capInfoMap.put(e.getArtifact().getKey(), map);
}
return map;
}
@SuppressWarnings("unchecked")
private static List<String> providedCapabilities(Extension e) {
Map<?, ?> map = (Map<?, ?>) e.getMetadata().getOrDefault("capabilities", Collections.emptyMap());
if (map.isEmpty()) {
return Collections.emptyList();
}
final List<String> caps = (List<String>) map.get("provides");
return caps == null ? Collections.emptyList() : caps;
}
@SuppressWarnings("unchecked")
private static List<ArtifactKey> extensionDependencies(Map<String, Object> metadata) {
final List<String> extDeps = (List<String>) metadata.getOrDefault("extension-dependencies", Collections.emptyList());
if (extDeps.isEmpty()) {
return Collections.emptyList();
}
return extDeps.stream().map(ArtifactKey::fromString).collect(Collectors.toList());
}
private static | ExtensionCatalogCompatibility |
java | apache__camel | components/camel-undertow/src/test/java/org/apache/camel/component/undertow/rest/RestUndertowMethodNotAllowedTest.java | {
"start": 1292,
"end": 2984
} | class ____ extends BaseUndertowTest {
@Test
public void testPostMethodNotAllowed() {
try {
template.sendBodyAndHeader("http://localhost:" + getPort() + "/users/123/basic", "body", Exchange.HTTP_METHOD,
"POST");
fail("Shall not pass!");
} catch (Exception e) {
HttpOperationFailedException hofe = assertIsInstanceOf(HttpOperationFailedException.class, e.getCause());
assertEquals(405, hofe.getStatusCode());
}
}
@Test
public void testGetMethodAllowed() {
try {
template.sendBodyAndHeader("http://localhost:" + getPort() + "/users/123/basic", "body", Exchange.HTTP_METHOD,
"GET");
} catch (Exception e) {
fail("Shall pass with GET http method!");
}
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
// configure to use undertow on localhost
restConfiguration().component("undertow").host("localhost").port(getPort());
// use the rest DSL to define the rest services
rest("/users/")
.get("{id}/basic").to("direct:basic");
from("direct:basic")
.to("mock:input")
.process(exchange -> {
String id = exchange.getIn().getHeader("id", String.class);
exchange.getMessage().setBody(id + ";Donald Duck");
});
}
};
}
}
| RestUndertowMethodNotAllowedTest |
java | apache__flink | flink-formats/flink-orc/src/main/java/org/apache/flink/orc/vector/OrcMapColumnVector.java | {
"start": 1233,
"end": 2082
} | class ____ extends AbstractOrcColumnVector
implements org.apache.flink.table.data.columnar.vector.MapColumnVector {
private final MapColumnVector hiveVector;
private final ColumnVector keyFlinkVector;
private final ColumnVector valueFlinkVector;
public OrcMapColumnVector(MapColumnVector hiveVector, MapType type) {
super(hiveVector);
this.hiveVector = hiveVector;
this.keyFlinkVector = createFlinkVector(hiveVector.keys, type.getKeyType());
this.valueFlinkVector = createFlinkVector(hiveVector.values, type.getValueType());
}
@Override
public MapData getMap(int i) {
long offset = hiveVector.offsets[i];
long length = hiveVector.lengths[i];
return new ColumnarMapData(keyFlinkVector, valueFlinkVector, (int) offset, (int) length);
}
}
| OrcMapColumnVector |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/context/ApplicationContext.java | {
"start": 1051,
"end": 2672
} | interface ____ provide configuration for an application.
* This is read-only while the application is running, but may be
* reloaded if the implementation supports this.
*
* <p>An ApplicationContext provides:
* <ul>
* <li>Bean factory methods for accessing application components.
* Inherited from {@link org.springframework.beans.factory.ListableBeanFactory}.
* <li>The ability to load file resources in a generic fashion.
* Inherited from the {@link org.springframework.core.io.ResourceLoader} interface.
* <li>The ability to publish events to registered listeners.
* Inherited from the {@link ApplicationEventPublisher} interface.
* <li>The ability to resolve messages, supporting internationalization.
* Inherited from the {@link MessageSource} interface.
* <li>Inheritance from a parent context. Definitions in a descendant context
* will always take priority. This means, for example, that a single parent
* context can be used by an entire web application, while each servlet has
* its own child context that is independent of that of any other servlet.
* </ul>
*
* <p>In addition to standard {@link org.springframework.beans.factory.BeanFactory}
* lifecycle capabilities, ApplicationContext implementations detect and invoke
* {@link ApplicationContextAware} beans as well as {@link ResourceLoaderAware},
* {@link ApplicationEventPublisherAware} and {@link MessageSourceAware} beans.
*
* @author Rod Johnson
* @author Juergen Hoeller
* @see ConfigurableApplicationContext
* @see org.springframework.beans.factory.BeanFactory
* @see org.springframework.core.io.ResourceLoader
*/
public | to |
java | spring-projects__spring-boot | module/spring-boot-security/src/test/java/org/springframework/boot/security/autoconfigure/actuate/web/servlet/SecurityRequestMatchersManagementContextConfigurationTests.java | {
"start": 5188,
"end": 5316
} | class ____ {
@Bean
JerseyApplicationPath jerseyApplicationPath() {
return () -> "/admin";
}
}
}
| TestJerseyConfiguration |
java | redisson__redisson | redisson/src/main/java/org/redisson/liveobject/core/RedissonObjectBuilder.java | {
"start": 9636,
"end": 13781
} | class ____ of " + rr.getTypeName() + " with codec type of " + rr.getCodec());
}
private boolean isDefaultCodec(RedissonReference rr) {
return rr.getCodec() == null
|| rr.getCodec().equals(config.getCodec().getClass().getName());
}
private Object fromReference(RedissonRxClient redisson, RedissonReference rr) throws ReflectiveOperationException {
Class<?> type = rr.getRxJavaType();
/**
* Live Object from reference in rxjava client is not supported yet.
*/
return getObject(redisson, rr, type, codecProvider);
}
private Object fromReference(RedissonReactiveClient redisson, RedissonReference rr) throws ReflectiveOperationException {
Class<?> type = rr.getReactiveType();
/**
* Live Object from reference in reactive client is not supported yet.
*/
return getObject(redisson, rr, type, codecProvider);
}
public RedissonReference toReference(Object object) {
if (object != null && ClassUtils.isAnnotationPresent(object.getClass(), REntity.class)) {
throw new IllegalArgumentException("REntity should be attached to Redisson before save");
}
if (object instanceof RObject && !(object instanceof RLiveObject)) {
Class<?> clazz = object.getClass().getInterfaces()[0];
RObject rObject = (RObject) object;
if (rObject.getCodec() != null) {
codecProvider.registerCodec((Class) rObject.getCodec().getClass(), rObject.getCodec());
}
return new RedissonReference(clazz, rObject.getName(), rObject.getCodec());
}
if (object instanceof RObjectReactive && !(object instanceof RLiveObject)) {
Class<?> clazz = object.getClass().getInterfaces()[0];
RObjectReactive rObject = (RObjectReactive) object;
if (rObject.getCodec() != null) {
codecProvider.registerCodec((Class) rObject.getCodec().getClass(), rObject.getCodec());
}
return new RedissonReference(clazz, rObject.getName(), rObject.getCodec());
}
if (object instanceof RObjectRx && !(object instanceof RLiveObject)) {
Class<?> clazz = object.getClass().getInterfaces()[0];
RObjectRx rObject = (RObjectRx) object;
if (rObject.getCodec() != null) {
codecProvider.registerCodec((Class) rObject.getCodec().getClass(), rObject.getCodec());
}
return new RedissonReference(clazz, rObject.getName(), rObject.getCodec());
}
try {
if (object instanceof RLiveObject) {
Class<?> rEntity = object.getClass().getSuperclass();
NamingScheme ns = getNamingScheme(rEntity);
return new RedissonReference(rEntity,
ns.getName(rEntity, ((RLiveObject) object).getLiveObjectId()));
}
} catch (Exception e) {
throw new IllegalArgumentException(e);
}
return null;
}
private <T extends RObject, K extends Codec> T createRObject(RedissonClient redisson, Class<T> expectedType, String name, K codec) throws ReflectiveOperationException {
Class<?>[] interfaces = expectedType.getInterfaces();
for (Class<?> iType : interfaces) {
boolean isDefaultCodec = codec.getClass() == config.getCodec().getClass();
if (isDefaultCodec) {
Method builder = DEFAULT_CODEC_REFERENCES.get(iType);
if (builder != null) {
return (T) builder.invoke(redisson, name);
}
} else {
Method builder = CUSTOM_CODEC_REFERENCES.get(iType);
if (builder != null) {
return (T) builder.invoke(redisson, name, codec);
}
}
}
String codecName = null;
if (codec != null) {
codecName = codec.getClass().getName();
}
throw new ClassNotFoundException("No RObject is found to match | type |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/engine/extension/ProgrammaticExtensionRegistrationTests.java | {
"start": 19250,
"end": 19482
} | class ____ extends AbstractTestCase {
@SuppressWarnings("JUnitMalformedDeclaration")
@RegisterExtension
Object extension = "not an extension type";
}
static | InstanceLevelExtensionRegistrationWithNonExtensionFieldValueTestCase |
java | spring-projects__spring-boot | module/spring-boot-micrometer-metrics/src/test/java/org/springframework/boot/micrometer/metrics/autoconfigure/logging/logback/LogbackMetricsAutoConfigurationTests.java | {
"start": 2044,
"end": 2180
} | class ____ {
@Bean
LogbackMetrics customLogbackMetrics() {
return new LogbackMetrics();
}
}
}
| CustomLogbackMetricsConfiguration |
java | quarkusio__quarkus | extensions/vertx-http/deployment/src/test/java/io/quarkus/vertx/http/NonApplicationAndRootPathTest.java | {
"start": 724,
"end": 2156
} | class ____ {
private static final String APP_PROPS = "" +
"quarkus.http.root-path=/api\n";
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addAsResource(new StringAsset(APP_PROPS), "application.properties")
.addClasses(MyObserver.class))
.addBuildChainCustomizer(buildCustomizer());
static Consumer<BuildChainBuilder> buildCustomizer() {
return new Consumer<BuildChainBuilder>() {
@Override
public void accept(BuildChainBuilder builder) {
builder.addBuildStep(new BuildStep() {
@Override
public void execute(BuildContext context) {
NonApplicationRootPathBuildItem buildItem = context.consume(NonApplicationRootPathBuildItem.class);
context.produce(buildItem.routeBuilder()
.route("non-app-relative")
.handler(new MyHandler())
.blockingRoute()
.build());
}
}).produces(RouteBuildItem.class)
.consumes(NonApplicationRootPathBuildItem.class)
.build();
}
};
}
public static | NonApplicationAndRootPathTest |
java | apache__camel | core/camel-core-model/src/main/java/org/apache/camel/model/cloud/ServiceCallDefinition.java | {
"start": 1881,
"end": 23040
} | class ____ extends NoOutputDefinition<ServiceCallDefinition> {
@XmlAttribute
@Metadata(required = true)
private String name;
@XmlAttribute
private String uri;
@XmlAttribute
@Metadata(defaultValue = ServiceCallDefinitionConstants.DEFAULT_COMPONENT)
private String component;
@XmlAttribute
@Metadata(javaType = "org.apache.camel.ExchangePattern", enums = "InOnly,InOut")
private String pattern;
@XmlAttribute
private String configurationRef;
@XmlAttribute
private String serviceDiscoveryRef;
@XmlTransient
private ServiceDiscovery serviceDiscovery;
@XmlAttribute
private String serviceFilterRef;
@XmlTransient
private ServiceFilter serviceFilter;
@XmlAttribute
private String serviceChooserRef;
@XmlTransient
private ServiceChooser serviceChooser;
@XmlAttribute
private String loadBalancerRef;
@XmlTransient
private ServiceLoadBalancer loadBalancer;
@XmlAttribute
private String expressionRef;
@XmlTransient
private Expression expression;
@XmlElements({
@XmlElement(name = "cachingServiceDiscovery", type = CachingServiceCallServiceDiscoveryConfiguration.class),
@XmlElement(name = "combinedServiceDiscovery", type = CombinedServiceCallServiceDiscoveryConfiguration.class),
@XmlElement(name = "consulServiceDiscovery", type = ConsulServiceCallServiceDiscoveryConfiguration.class),
@XmlElement(name = "dnsServiceDiscovery", type = DnsServiceCallServiceDiscoveryConfiguration.class),
@XmlElement(name = "kubernetesServiceDiscovery", type = KubernetesServiceCallServiceDiscoveryConfiguration.class),
@XmlElement(name = "staticServiceDiscovery", type = StaticServiceCallServiceDiscoveryConfiguration.class),
@XmlElement(name = "zookeeperServiceDiscovery", type = ZooKeeperServiceCallServiceDiscoveryConfiguration.class) })
private ServiceCallServiceDiscoveryConfiguration serviceDiscoveryConfiguration;
@XmlElements({
@XmlElement(name = "blacklistServiceFilter", type = BlacklistServiceCallServiceFilterConfiguration.class),
@XmlElement(name = "combinedServiceFilter", type = CombinedServiceCallServiceFilterConfiguration.class),
@XmlElement(name = "customServiceFilter", type = CustomServiceCallServiceFilterConfiguration.class),
@XmlElement(name = "healthyServiceFilter", type = HealthyServiceCallServiceFilterConfiguration.class),
@XmlElement(name = "passThroughServiceFilter", type = PassThroughServiceCallServiceFilterConfiguration.class) })
private ServiceCallServiceFilterConfiguration serviceFilterConfiguration;
@XmlElements({
@XmlElement(name = "defaultLoadBalancer", type = DefaultServiceCallServiceLoadBalancerConfiguration.class) })
private ServiceCallServiceLoadBalancerConfiguration loadBalancerConfiguration;
@XmlElement(name = "expression")
private ServiceCallExpressionConfiguration expressionConfiguration;
public ServiceCallDefinition() {
}
public ServiceCallDefinition(String name) {
this.name = name;
}
@Override
public ProcessorDefinition<?> copyDefinition() {
// deprecated so we do not implement copy
return this;
}
@Override
public String toString() {
return "ServiceCall[" + name + "]";
}
@Override
public String getShortName() {
return "serviceCall";
}
@Override
public String getLabel() {
return "serviceCall";
}
// *****************************
// Properties
// *****************************
public String getName() {
return name;
}
/**
* Sets the name of the service to use
*/
public void setName(String name) {
this.name = name;
}
public String getPattern() {
return pattern;
}
/**
* Sets the optional {@link ExchangePattern} used to invoke this endpoint
*/
public void setPattern(String pattern) {
this.pattern = pattern;
}
public String getConfigurationRef() {
return configurationRef;
}
/**
* Refers to a ServiceCall configuration to use
*/
public void setConfigurationRef(String configurationRef) {
this.configurationRef = configurationRef;
}
public String getUri() {
return uri;
}
/**
* The uri of the endpoint to send to. The uri can be dynamic computed using the
* {@link org.apache.camel.language.simple.SimpleLanguage} expression.
*/
public void setUri(String uri) {
this.uri = uri;
}
public String getComponent() {
return component;
}
/**
* The component to use.
*/
public void setComponent(String component) {
this.component = component;
}
public String getServiceDiscoveryRef() {
return serviceDiscoveryRef;
}
/**
* Sets a reference to a custom {@link ServiceDiscovery} to use.
*/
public void setServiceDiscoveryRef(String serviceDiscoveryRef) {
this.serviceDiscoveryRef = serviceDiscoveryRef;
}
public ServiceDiscovery getServiceDiscovery() {
return serviceDiscovery;
}
/**
* Sets a custom {@link ServiceDiscovery} to use.
*/
public void setServiceDiscovery(ServiceDiscovery serviceDiscovery) {
this.serviceDiscovery = serviceDiscovery;
}
public String getServiceFilterRef() {
return serviceFilterRef;
}
/**
* Sets a reference to a custom {@link ServiceFilter} to use.
*/
public void setServiceFilterRef(String serviceFilterRef) {
this.serviceFilterRef = serviceFilterRef;
}
public ServiceFilter getServiceFilter() {
return serviceFilter;
}
/**
* Sets a custom {@link ServiceFilter} to use.
*/
public void setServiceFilter(ServiceFilter serviceFilter) {
this.serviceFilter = serviceFilter;
}
public String getServiceChooserRef() {
return serviceChooserRef;
}
/**
* Sets a reference to a custom {@link ServiceChooser} to use.
*/
public void setServiceChooserRef(String serviceChooserRef) {
this.serviceChooserRef = serviceChooserRef;
}
public ServiceChooser getServiceChooser() {
return serviceChooser;
}
/**
* Sets a custom {@link ServiceChooser} to use.
*/
public void setServiceChooser(ServiceChooser serviceChooser) {
this.serviceChooser = serviceChooser;
}
public String getLoadBalancerRef() {
return loadBalancerRef;
}
/**
* Sets a reference to a custom {@link ServiceLoadBalancer} to use.
*/
public void setLoadBalancerRef(String loadBalancerRef) {
this.loadBalancerRef = loadBalancerRef;
}
public ServiceLoadBalancer getLoadBalancer() {
return loadBalancer;
}
/**
* Sets a custom {@link ServiceLoadBalancer} to use.
*/
public void setLoadBalancer(ServiceLoadBalancer loadBalancer) {
this.loadBalancer = loadBalancer;
}
public String getExpressionRef() {
return expressionRef;
}
/**
* Set a reference to a custom {@link Expression} to use.
*/
public void setExpressionRef(String expressionRef) {
if (this.expressionConfiguration == null) {
this.expressionConfiguration = new ServiceCallExpressionConfiguration();
}
this.expressionConfiguration.expressionType(new RefExpression(expressionRef));
}
public Expression getExpression() {
return expression;
}
/**
* Set a custom {@link Expression} to use.
*/
public void setExpression(Expression expression) {
this.expression = expression;
}
public ServiceCallServiceDiscoveryConfiguration getServiceDiscoveryConfiguration() {
return serviceDiscoveryConfiguration;
}
/**
* Configures the ServiceDiscovery using the given configuration.
*/
public void setServiceDiscoveryConfiguration(ServiceCallServiceDiscoveryConfiguration serviceDiscoveryConfiguration) {
this.serviceDiscoveryConfiguration = serviceDiscoveryConfiguration;
}
public ServiceCallServiceFilterConfiguration getServiceFilterConfiguration() {
return serviceFilterConfiguration;
}
/**
* Configures the ServiceFilter using the given configuration.
*/
public void setServiceFilterConfiguration(ServiceCallServiceFilterConfiguration serviceFilterConfiguration) {
this.serviceFilterConfiguration = serviceFilterConfiguration;
}
public ServiceCallServiceLoadBalancerConfiguration getLoadBalancerConfiguration() {
return loadBalancerConfiguration;
}
/**
* Configures the LoadBalancer using the given configuration.
*/
public void setLoadBalancerConfiguration(ServiceCallServiceLoadBalancerConfiguration loadBalancerConfiguration) {
this.loadBalancerConfiguration = loadBalancerConfiguration;
}
public ServiceCallExpressionConfiguration getExpressionConfiguration() {
return expressionConfiguration;
}
/**
* Configures the Expression using the given configuration.
*/
public void setExpressionConfiguration(ServiceCallExpressionConfiguration expressionConfiguration) {
this.expressionConfiguration = expressionConfiguration;
}
// *****************************
// Fluent API
// *****************************
/**
* Sets the optional {@link ExchangePattern} used to invoke this endpoint
*/
public ServiceCallDefinition pattern(ExchangePattern pattern) {
return pattern(pattern.name());
}
/**
* Sets the optional {@link ExchangePattern} used to invoke this endpoint
*/
public ServiceCallDefinition pattern(String pattern) {
setPattern(pattern);
return this;
}
/**
* Sets the name of the service to use
*/
public ServiceCallDefinition name(String name) {
setName(name);
return this;
}
/**
* Sets the uri of the service to use
*/
public ServiceCallDefinition uri(String uri) {
setUri(uri);
return this;
}
/**
* Sets the component to use
*/
public ServiceCallDefinition component(String component) {
setComponent(component);
return this;
}
/**
* Refers to a ServiceCall configuration to use
*/
public ServiceCallDefinition serviceCallConfiguration(String ref) {
configurationRef = ref;
return this;
}
/**
* Sets a reference to a custom {@link ServiceDiscovery} to use.
*/
public ServiceCallDefinition serviceDiscovery(String serviceDiscoveryRef) {
setServiceDiscoveryRef(serviceDiscoveryRef);
return this;
}
/**
* Sets a custom {@link ServiceDiscovery} to use.
*/
public ServiceCallDefinition serviceDiscovery(ServiceDiscovery serviceDiscovery) {
setServiceDiscovery(serviceDiscovery);
return this;
}
/**
* Sets a reference to a custom {@link ServiceFilter} to use.
*/
public ServiceCallDefinition serviceFilter(String serviceFilterRef) {
setServiceDiscoveryRef(serviceDiscoveryRef);
return this;
}
/**
* Sets a custom {@link ServiceFilter} to use.
*/
public ServiceCallDefinition serviceFilter(ServiceFilter serviceFilter) {
setServiceFilter(serviceFilter);
return this;
}
/**
* Sets a reference to a custom {@link ServiceChooser} to use.
*/
public ServiceCallDefinition serviceChooser(String serviceChooserRef) {
setServiceChooserRef(serviceChooserRef);
return this;
}
/**
* Sets a custom {@link ServiceChooser} to use.
*/
public ServiceCallDefinition serviceChooser(ServiceChooser serviceChooser) {
setServiceChooser(serviceChooser);
return this;
}
/**
* Sets a reference to a custom {@link ServiceLoadBalancer} to use.
*/
public ServiceCallDefinition loadBalancer(String loadBalancerRef) {
setLoadBalancerRef(loadBalancerRef);
return this;
}
/**
* Sets a custom {@link ServiceLoadBalancer} to use.
*/
public ServiceCallDefinition loadBalancer(ServiceLoadBalancer loadBalancer) {
setLoadBalancer(loadBalancer);
return this;
}
/**
* Sets a reference to a custom {@link Expression} to use.
*/
public ServiceCallDefinition expression(String expressionRef) {
setExpressionRef(expressionRef);
return this;
}
/**
* Sets a custom {@link Expression} to use.
*/
public ServiceCallDefinition expression(Expression expression) {
setExpression(expression);
return this;
}
/**
* Sets a custom {@link Expression} to use through an expression builder clause.
*
* @return a expression builder clause to set the body
*/
public ExpressionClause<ServiceCallDefinition> expression() {
ExpressionClause<ServiceCallDefinition> clause = new ExpressionClause<>(this);
setExpression(clause);
return clause;
}
/**
* Configures the ServiceDiscovery using the given configuration.
*/
public ServiceCallDefinition serviceDiscoveryConfiguration(
ServiceCallServiceDiscoveryConfiguration serviceDiscoveryConfiguration) {
setServiceDiscoveryConfiguration(serviceDiscoveryConfiguration);
return this;
}
/**
* Configures the ServiceFilter using the given configuration.
*/
public ServiceCallDefinition serviceFilterConfiguration(ServiceCallServiceFilterConfiguration serviceFilterConfiguration) {
setServiceFilterConfiguration(serviceFilterConfiguration);
return this;
}
/**
* Configures the LoadBalancer using the given configuration.
*/
public ServiceCallDefinition loadBalancerConfiguration(
ServiceCallServiceLoadBalancerConfiguration loadBalancerConfiguration) {
setLoadBalancerConfiguration(loadBalancerConfiguration);
return this;
}
/**
* Configures the Expression using the given configuration.
*/
public ServiceCallDefinition expressionConfiguration(ServiceCallExpressionConfiguration expressionConfiguration) {
setExpressionConfiguration(expressionConfiguration);
return this;
}
// *****************************
// Shortcuts - ServiceDiscovery
// *****************************
public CachingServiceCallServiceDiscoveryConfiguration cachingServiceDiscovery() {
CachingServiceCallServiceDiscoveryConfiguration conf = new CachingServiceCallServiceDiscoveryConfiguration(this);
setServiceDiscoveryConfiguration(conf);
return conf;
}
public ConsulServiceCallServiceDiscoveryConfiguration consulServiceDiscovery() {
ConsulServiceCallServiceDiscoveryConfiguration conf = new ConsulServiceCallServiceDiscoveryConfiguration(this);
setServiceDiscoveryConfiguration(conf);
return conf;
}
public ServiceCallDefinition consulServiceDiscovery(String url) {
ConsulServiceCallServiceDiscoveryConfiguration conf = new ConsulServiceCallServiceDiscoveryConfiguration(this);
conf.setUrl(url);
setServiceDiscoveryConfiguration(conf);
return this;
}
public DnsServiceCallServiceDiscoveryConfiguration dnsServiceDiscovery() {
DnsServiceCallServiceDiscoveryConfiguration conf = new DnsServiceCallServiceDiscoveryConfiguration(this);
setServiceDiscoveryConfiguration(conf);
return conf;
}
public ServiceCallDefinition dnsServiceDiscovery(String domain) {
DnsServiceCallServiceDiscoveryConfiguration conf = new DnsServiceCallServiceDiscoveryConfiguration(this);
conf.setDomain(domain);
setServiceDiscoveryConfiguration(conf);
return this;
}
public ServiceCallDefinition dnsServiceDiscovery(String domain, String protocol) {
DnsServiceCallServiceDiscoveryConfiguration conf = new DnsServiceCallServiceDiscoveryConfiguration(this);
conf.setDomain(domain);
conf.setProto(protocol);
setServiceDiscoveryConfiguration(conf);
return this;
}
public KubernetesServiceCallServiceDiscoveryConfiguration kubernetesServiceDiscovery() {
KubernetesServiceCallServiceDiscoveryConfiguration conf = new KubernetesServiceCallServiceDiscoveryConfiguration(this);
setServiceDiscoveryConfiguration(conf);
return conf;
}
public KubernetesServiceCallServiceDiscoveryConfiguration kubernetesClientServiceDiscovery() {
KubernetesServiceCallServiceDiscoveryConfiguration conf = new KubernetesServiceCallServiceDiscoveryConfiguration(this);
conf.setLookup("client");
setServiceDiscoveryConfiguration(conf);
return conf;
}
public ServiceCallDefinition kubernetesEnvServiceDiscovery() {
KubernetesServiceCallServiceDiscoveryConfiguration conf = new KubernetesServiceCallServiceDiscoveryConfiguration(this);
conf.setLookup("environment");
setServiceDiscoveryConfiguration(conf);
return this;
}
public ServiceCallDefinition kubernetesDnsServiceDiscovery(String namespace, String domain) {
KubernetesServiceCallServiceDiscoveryConfiguration conf = new KubernetesServiceCallServiceDiscoveryConfiguration(this);
conf.setLookup("dns");
conf.setNamespace(namespace);
conf.setDnsDomain(domain);
setServiceDiscoveryConfiguration(conf);
return this;
}
public CombinedServiceCallServiceDiscoveryConfiguration combinedServiceDiscovery() {
CombinedServiceCallServiceDiscoveryConfiguration conf = new CombinedServiceCallServiceDiscoveryConfiguration(this);
setServiceDiscoveryConfiguration(conf);
return conf;
}
public StaticServiceCallServiceDiscoveryConfiguration staticServiceDiscovery() {
StaticServiceCallServiceDiscoveryConfiguration conf = new StaticServiceCallServiceDiscoveryConfiguration(this);
setServiceDiscoveryConfiguration(conf);
return conf;
}
public ZooKeeperServiceCallServiceDiscoveryConfiguration zookeeperServiceDiscovery() {
ZooKeeperServiceCallServiceDiscoveryConfiguration conf = new ZooKeeperServiceCallServiceDiscoveryConfiguration(this);
setServiceDiscoveryConfiguration(conf);
return conf;
}
public ServiceCallDefinition zookeeperServiceDiscovery(String nodes, String basePath) {
ZooKeeperServiceCallServiceDiscoveryConfiguration conf = new ZooKeeperServiceCallServiceDiscoveryConfiguration(this);
conf.setNodes(nodes);
conf.setBasePath(basePath);
setServiceDiscoveryConfiguration(conf);
return this;
}
// *****************************
// Shortcuts - ServiceFilter
// *****************************
public ServiceCallDefinition healthyFilter() {
HealthyServiceCallServiceFilterConfiguration conf = new HealthyServiceCallServiceFilterConfiguration(this);
setServiceFilterConfiguration(conf);
return this;
}
public ServiceCallDefinition passThroughFilter() {
PassThroughServiceCallServiceFilterConfiguration conf = new PassThroughServiceCallServiceFilterConfiguration(this);
setServiceFilterConfiguration(conf);
return this;
}
public CombinedServiceCallServiceFilterConfiguration combinedFilter() {
CombinedServiceCallServiceFilterConfiguration conf = new CombinedServiceCallServiceFilterConfiguration(this);
setServiceFilterConfiguration(conf);
return conf;
}
public BlacklistServiceCallServiceFilterConfiguration blacklistFilter() {
BlacklistServiceCallServiceFilterConfiguration conf = new BlacklistServiceCallServiceFilterConfiguration();
setServiceFilterConfiguration(conf);
return conf;
}
public ServiceCallDefinition customFilter(String serviceFilter) {
CustomServiceCallServiceFilterConfiguration conf = new CustomServiceCallServiceFilterConfiguration();
conf.setServiceFilterRef(serviceFilter);
setServiceFilterConfiguration(conf);
return this;
}
public ServiceCallDefinition customFilter(ServiceFilter serviceFilter) {
CustomServiceCallServiceFilterConfiguration conf = new CustomServiceCallServiceFilterConfiguration();
conf.setServiceFilter(serviceFilter);
setServiceFilterConfiguration(conf);
return this;
}
// *****************************
// Shortcuts - LoadBalancer
// *****************************
public ServiceCallDefinition defaultLoadBalancer() {
DefaultServiceCallServiceLoadBalancerConfiguration conf = new DefaultServiceCallServiceLoadBalancerConfiguration();
setLoadBalancerConfiguration(conf);
return this;
}
}
| ServiceCallDefinition |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestJobAclsManager.java | {
"start": 1343,
"end": 5910
} | class ____ {
@Test
public void testClusterAdmins() {
Map<JobACL, AccessControlList> tmpJobACLs = new HashMap<JobACL, AccessControlList>();
Configuration conf = new Configuration();
String jobOwner = "testuser";
conf.set(JobACL.VIEW_JOB.getAclName(), jobOwner);
conf.set(JobACL.MODIFY_JOB.getAclName(), jobOwner);
conf.setBoolean(MRConfig.MR_ACLS_ENABLED, true);
String clusterAdmin = "testuser2";
conf.set(MRConfig.MR_ADMINS, clusterAdmin);
JobACLsManager aclsManager = new JobACLsManager(conf);
tmpJobACLs = aclsManager.constructJobACLs(conf);
final Map<JobACL, AccessControlList> jobACLs = tmpJobACLs;
UserGroupInformation callerUGI = UserGroupInformation.createUserForTesting(
clusterAdmin, new String[] {});
// cluster admin should have access
boolean val = aclsManager.checkAccess(callerUGI, JobACL.VIEW_JOB, jobOwner,
jobACLs.get(JobACL.VIEW_JOB));
assertTrue(val, "cluster admin should have view access");
val = aclsManager.checkAccess(callerUGI, JobACL.MODIFY_JOB, jobOwner,
jobACLs.get(JobACL.MODIFY_JOB));
assertTrue(val, "cluster admin should have modify access");
}
@Test
public void testClusterNoAdmins() {
Map<JobACL, AccessControlList> tmpJobACLs = new HashMap<JobACL, AccessControlList>();
Configuration conf = new Configuration();
String jobOwner = "testuser";
conf.set(JobACL.VIEW_JOB.getAclName(), "");
conf.setBoolean(MRConfig.MR_ACLS_ENABLED, true);
String noAdminUser = "testuser2";
JobACLsManager aclsManager = new JobACLsManager(conf);
tmpJobACLs = aclsManager.constructJobACLs(conf);
final Map<JobACL, AccessControlList> jobACLs = tmpJobACLs;
UserGroupInformation callerUGI = UserGroupInformation.createUserForTesting(
noAdminUser, new String[] {});
// random user should not have access
boolean val = aclsManager.checkAccess(callerUGI, JobACL.VIEW_JOB, jobOwner,
jobACLs.get(JobACL.VIEW_JOB));
assertFalse(val, "random user should not have view access");
val = aclsManager.checkAccess(callerUGI, JobACL.MODIFY_JOB, jobOwner,
jobACLs.get(JobACL.MODIFY_JOB));
assertFalse(val, "random user should not have modify access");
callerUGI = UserGroupInformation.createUserForTesting(jobOwner,
new String[] {});
// Owner should have access
val = aclsManager.checkAccess(callerUGI, JobACL.VIEW_JOB, jobOwner,
jobACLs.get(JobACL.VIEW_JOB));
assertTrue(val, "owner should have view access");
val = aclsManager.checkAccess(callerUGI, JobACL.MODIFY_JOB, jobOwner,
jobACLs.get(JobACL.MODIFY_JOB));
assertTrue(val, "owner should have modify access");
}
@Test
public void testAclsOff() {
Map<JobACL, AccessControlList> tmpJobACLs = new HashMap<JobACL, AccessControlList>();
Configuration conf = new Configuration();
String jobOwner = "testuser";
conf.set(JobACL.VIEW_JOB.getAclName(), jobOwner);
conf.setBoolean(MRConfig.MR_ACLS_ENABLED, false);
String noAdminUser = "testuser2";
JobACLsManager aclsManager = new JobACLsManager(conf);
tmpJobACLs = aclsManager.constructJobACLs(conf);
final Map<JobACL, AccessControlList> jobACLs = tmpJobACLs;
UserGroupInformation callerUGI = UserGroupInformation.createUserForTesting(
noAdminUser, new String[] {});
// acls off so anyone should have access
boolean val = aclsManager.checkAccess(callerUGI, JobACL.VIEW_JOB, jobOwner,
jobACLs.get(JobACL.VIEW_JOB));
assertTrue(val, "acls off so anyone should have access");
}
@Test
public void testGroups() {
Map<JobACL, AccessControlList> tmpJobACLs = new HashMap<JobACL, AccessControlList>();
Configuration conf = new Configuration();
String jobOwner = "testuser";
conf.set(JobACL.VIEW_JOB.getAclName(), jobOwner);
conf.setBoolean(MRConfig.MR_ACLS_ENABLED, true);
String user = "testuser2";
String adminGroup = "adminGroup";
conf.set(MRConfig.MR_ADMINS, " " + adminGroup);
JobACLsManager aclsManager = new JobACLsManager(conf);
tmpJobACLs = aclsManager.constructJobACLs(conf);
final Map<JobACL, AccessControlList> jobACLs = tmpJobACLs;
UserGroupInformation callerUGI = UserGroupInformation.createUserForTesting(
user, new String[] {adminGroup});
// acls off so anyone should have access
boolean val = aclsManager.checkAccess(callerUGI, JobACL.VIEW_JOB, jobOwner,
jobACLs.get(JobACL.VIEW_JOB));
assertTrue(val, "user in admin group should have access");
}
}
| TestJobAclsManager |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/codec/vectors/es816/OffHeapBinarizedVectorValues.java | {
"start": 10430,
"end": 13437
} | class ____ extends OffHeapBinarizedVectorValues {
private final DirectMonotonicReader ordToDoc;
private final IndexedDISI disi;
// dataIn was used to init a new IndexedDIS for #randomAccess()
private final IndexInput dataIn;
private final OrdToDocDISIReaderConfiguration configuration;
SparseOffHeapVectorValues(
OrdToDocDISIReaderConfiguration configuration,
int dimension,
int size,
float[] centroid,
float centroidDp,
BinaryQuantizer binaryQuantizer,
IndexInput dataIn,
VectorSimilarityFunction similarityFunction,
FlatVectorsScorer vectorsScorer,
IndexInput slice
) throws IOException {
super(dimension, size, centroid, centroidDp, binaryQuantizer, similarityFunction, vectorsScorer, slice);
this.configuration = configuration;
this.dataIn = dataIn;
this.ordToDoc = configuration.getDirectMonotonicReader(dataIn);
this.disi = configuration.getIndexedDISI(dataIn);
}
@Override
public SparseOffHeapVectorValues copy() throws IOException {
return new SparseOffHeapVectorValues(
configuration,
dimension,
size,
centroid,
centroidDp,
binaryQuantizer,
dataIn,
similarityFunction,
vectorsScorer,
slice.clone()
);
}
@Override
public int ordToDoc(int ord) {
return (int) ordToDoc.get(ord);
}
@Override
public Bits getAcceptOrds(Bits acceptDocs) {
if (acceptDocs == null) {
return null;
}
return new Bits() {
@Override
public boolean get(int index) {
return acceptDocs.get(ordToDoc(index));
}
@Override
public int length() {
return size;
}
};
}
@Override
public DocIndexIterator iterator() {
return IndexedDISI.asDocIndexIterator(disi);
}
@Override
public VectorScorer scorer(float[] target) throws IOException {
SparseOffHeapVectorValues copy = copy();
DocIndexIterator iterator = copy.iterator();
RandomVectorScorer scorer = vectorsScorer.getRandomVectorScorer(similarityFunction, copy, target);
return new VectorScorer() {
@Override
public float score() throws IOException {
return scorer.score(iterator.index());
}
@Override
public DocIdSetIterator iterator() {
return iterator;
}
};
}
}
private static | SparseOffHeapVectorValues |
java | spring-projects__spring-data-jpa | spring-data-jpa/src/main/java/org/springframework/data/jpa/repository/query/QueryParameterSetter.java | {
"start": 2390,
"end": 4088
} | class ____ implements QueryParameterSetter {
private final Function<JpaParametersParameterAccessor, Object> valueExtractor;
private final Parameter<?> parameter;
/**
* @param valueExtractor must not be {@literal null}.
* @param parameter must not be {@literal null}.
*/
private NamedOrIndexedQueryParameterSetter(Function<JpaParametersParameterAccessor, Object> valueExtractor,
Parameter<?> parameter) {
Assert.notNull(valueExtractor, "ValueExtractor must not be null");
this.valueExtractor = valueExtractor;
this.parameter = parameter;
}
@Override
public void setParameter(BindableQuery query, JpaParametersParameterAccessor accessor, ErrorHandler errorHandler) {
Object value = valueExtractor.apply(accessor);
try {
setParameter(query, value, errorHandler);
} catch (RuntimeException e) {
errorHandler.handleError(e);
}
}
@SuppressWarnings("unchecked")
private void setParameter(BindableQuery query, Object value, ErrorHandler errorHandler) {
if (parameter instanceof ParameterExpression) {
query.setParameter((Parameter<Object>) parameter, value);
} else if (query.hasNamedParameters() && parameter.getName() != null) {
query.setParameter(parameter.getName(), value);
} else {
Integer position = parameter.getPosition();
if (position != null //
&& (query.getParameters().size() >= position //
|| errorHandler == LENIENT //
|| query.registerExcessParameters())) {
query.setParameter(position, value);
}
}
}
}
/**
* {@link QueryParameterSetter} for named or indexed parameters that have a {@link TemporalType} specified.
*/
| NamedOrIndexedQueryParameterSetter |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregationBuilder.java | {
"start": 2034,
"end": 5687
} | class ____ extends AbstractAggregationBuilder<FilterAggregationBuilder> {
public static final String NAME = "filter";
private final QueryBuilder filter;
/**
* @param name
* the name of this aggregation
* @param filter
* Set the filter to use, only documents that match this
* filter will fall into the bucket defined by this
* {@link SingleBucketAggregation} aggregation.
*/
public FilterAggregationBuilder(String name, QueryBuilder filter) {
super(name);
if (filter == null) {
throw new IllegalArgumentException("[filter] must not be null: [" + name + "]");
}
this.filter = filter;
}
protected FilterAggregationBuilder(
FilterAggregationBuilder clone,
AggregatorFactories.Builder factoriesBuilder,
Map<String, Object> metadata
) {
super(clone, factoriesBuilder, metadata);
this.filter = clone.filter;
}
@Override
protected AggregationBuilder shallowCopy(AggregatorFactories.Builder factoriesBuilder, Map<String, Object> metadata) {
return new FilterAggregationBuilder(this, factoriesBuilder, metadata);
}
/**
* Read from a stream.
*/
public FilterAggregationBuilder(StreamInput in) throws IOException {
super(in);
filter = in.readNamedWriteable(QueryBuilder.class);
}
@Override
public boolean supportsSampling() {
return true;
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
out.writeNamedWriteable(filter);
}
@Override
public BucketCardinality bucketCardinality() {
return BucketCardinality.ONE;
}
@Override
protected AggregationBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException {
QueryBuilder result = Rewriteable.rewrite(filter, queryRewriteContext);
if (result != filter) {
return new FilterAggregationBuilder(getName(), result);
}
return this;
}
@Override
protected AggregatorFactory doBuild(
AggregationContext context,
AggregatorFactory parent,
AggregatorFactories.Builder subFactoriesBuilder
) throws IOException {
return new FilterAggregatorFactory(filter, name, context, parent, subFactoriesBuilder, metadata);
}
@Override
protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException {
if (filter != null) {
filter.toXContent(builder, params);
}
return builder;
}
public static FilterAggregationBuilder parse(XContentParser parser, String aggregationName) throws IOException {
QueryBuilder filter = parseTopLevelQuery(parser);
return new FilterAggregationBuilder(aggregationName, filter);
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), filter);
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null || getClass() != obj.getClass()) return false;
if (super.equals(obj) == false) return false;
FilterAggregationBuilder other = (FilterAggregationBuilder) obj;
return Objects.equals(filter, other.filter);
}
@Override
public String getType() {
return NAME;
}
public QueryBuilder getFilter() {
return filter;
}
@Override
public TransportVersion getMinimalSupportedVersion() {
return TransportVersion.zero();
}
public static | FilterAggregationBuilder |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/operators/chaining/ChainTaskTest.java | {
"start": 10374,
"end": 11241
} | class ____
implements GroupReduceFunction<Record, Record>, GroupCombineFunction<Record, Record> {
private static final long serialVersionUID = 1L;
private int cnt = 0;
@Override
public void reduce(Iterable<Record> records, Collector<Record> out) throws Exception {
if (++this.cnt >= 5) {
throw new RuntimeException("Expected Test Exception");
}
for (Record r : records) {
out.collect(r);
}
}
@Override
public void combine(Iterable<Record> values, Collector<Record> out) throws Exception {
reduce(values, out);
}
}
/**
* FlatMap function that outputs the last emitted element when closing.
*
* @param <T> Input and output type.
*/
public static | MockFailingCombineStub |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/ThreadUtils.java | {
"start": 1355,
"end": 1545
} | class ____ {
/**
* A predicate implementation which always returns true.
*
* @deprecated Use a {@link Predicate}.
*/
@Deprecated
private static final | ThreadUtils |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/webapp/view/TestHtmlPage.java | {
"start": 1448,
"end": 2324
} | class ____ extends HtmlPage {
@Override
public void render(Page.HTML<__> html) {
html.
title("short test").
p().__("should throw");
}
}
@Test
void testUsual() {
Injector injector = WebAppTests.testPage(TestView.class);
PrintWriter out = injector.getInstance(PrintWriter.class);
// Verify the HTML page has correct meta tags in the header
verify(out).print(" http-equiv=\"X-UA-Compatible\"");
verify(out).print(" content=\"IE=8\"");
verify(out).print(" http-equiv=\"Content-type\"");
verify(out).print(String.format(" content=\"%s\"", MimeType.HTML));
verify(out).print("test");
verify(out).print(" id=\"testid\"");
verify(out).print("test note");
}
@Test
void testShort() {
assertThrows(WebAppException.class, () -> {
WebAppTests.testPage(ShortView.class);
});
}
}
| ShortView |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/rest/RestController.java | {
"start": 38692,
"end": 39780
} | class ____ extends DelegatingRestChannel {
private final LongCounter requestsCounter;
private final RestHandler restHandler;
private MeteringRestChannelDecorator(RestChannel delegate, LongCounter requestCounter, RestHandler restHandler) {
super(delegate);
this.requestsCounter = requestCounter;
this.restHandler = restHandler;
}
@Override
public void sendResponse(RestResponse response) {
super.sendResponse(response);
recordRequestMetric(response.status(), restHandler.getName(), request().method().name(), requestsCounter);
}
}
// exposed for tests; marked as UpdateForV10 because this assertion should have flushed out all double-close bugs by the time v10 is
// released so we should be able to drop the tests that check we behave reasonably in production on this impossible path
@UpdateForV10(owner = UpdateForV10.Owner.DISTRIBUTED_COORDINATION)
static boolean PERMIT_DOUBLE_RESPONSE = false;
private static final | MeteringRestChannelDecorator |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/cdi/bcextensions/SyntheticObserverOfParameterizedTypeTest.java | {
"start": 2687,
"end": 2962
} | class ____ {
@Inject
Event<List<MyData>> event;
void fireEvent() {
event.fire(List.of(new MyData("Hello"), new MyData("World")));
event.fire(List.of(new MyData("Hello"), new MyData("again")));
}
}
static | MyService |
java | google__error-prone | check_api/src/main/java/com/google/errorprone/matchers/method/MethodInvocationMatcher.java | {
"start": 9480,
"end": 10052
} | class ____ {
private final Set<Node> states;
final @Nullable Set<Node> def;
final SetMultimap<Token, Node> mapping;
NodeWithDefault(Set<Node> states, Set<Node> def, SetMultimap<Token, Node> mapping) {
this.states = states;
this.def = def;
this.mapping = mapping;
}
}
/** Shared by all compiled graphs, because it has no varying properties. */
private static final Node ACCEPT = new Node();
/** Converts a DFA produced by {@link #compile(Iterable)} into a Matcher based on map lookups. */
private static final | NodeWithDefault |
java | netty__netty | transport-classes-io_uring/src/main/java/io/netty/channel/uring/IoUringDatagramChannel.java | {
"start": 2074,
"end": 13891
} | class ____ extends AbstractIoUringChannel implements DatagramChannel {
private static final InternalLogger logger = InternalLoggerFactory.getInstance(IoUringDatagramChannel.class);
private static final boolean IP_MULTICAST_ALL =
SystemPropertyUtil.getBoolean("io.netty.channel.iouring.ipMulticastAll", false);
private static final ChannelMetadata METADATA = new ChannelMetadata(true, 16);
private static final String EXPECTED_TYPES =
" (expected: " + StringUtil.simpleClassName(DatagramPacket.class) + ", " +
StringUtil.simpleClassName(AddressedEnvelope.class) + '<' +
StringUtil.simpleClassName(ByteBuf.class) + ", " +
StringUtil.simpleClassName(InetSocketAddress.class) + ">, " +
StringUtil.simpleClassName(ByteBuf.class) + ')';
private final IoUringDatagramChannelConfig config;
private volatile boolean connected;
static {
if (logger.isDebugEnabled()) {
logger.debug("-Dio.netty.channel.iouring.ipMulticastAll: {}", IP_MULTICAST_ALL);
}
}
// These buffers are used for msghdr, iov, sockaddr_in / sockaddr_in6 when doing recvmsg / sendmsg
//
// TODO: Alternative we could also allocate these everytime from the ByteBufAllocator or we could use
// some sort of other pool. Let's keep it simple for now.
//
// Consider exposing some configuration for that.
private final MsgHdrMemoryArray recvmsgHdrs = new MsgHdrMemoryArray((short) 256);
private final MsgHdrMemoryArray sendmsgHdrs = new MsgHdrMemoryArray((short) 256);
private final int[] sendmsgResArray = new int[sendmsgHdrs.capacity()];
/**
* Create a new instance which selects the {@link SocketProtocolFamily} to use depending
* on the Operation Systems default which will be chosen.
*/
public IoUringDatagramChannel() {
this(null);
}
/**
* Create a new instance using the given {@link SocketProtocolFamily}. If {@code null} is used it will depend
* on the Operation Systems default which will be chosen.
*/
public IoUringDatagramChannel(SocketProtocolFamily family) {
this(LinuxSocket.newSocketDgram(useIpv6(family)), false);
}
private static boolean useIpv6(SocketProtocolFamily family) {
if (family == null) {
return Socket.isIPv6Preferred();
}
return family == SocketProtocolFamily.INET6;
}
/**
* Create a new instance which selects the {@link SocketProtocolFamily} to use depending
* on the Operation Systems default which will be chosen.
*/
public IoUringDatagramChannel(int fd) {
this(new LinuxSocket(fd), true);
}
private IoUringDatagramChannel(LinuxSocket fd, boolean active) {
// Always use a blocking fd and so make use of fast-poll.
super(null, fd, active);
// Configure IP_MULTICAST_ALL - disable by default to match the behaviour of NIO.
try {
fd.setIpMulticastAll(IP_MULTICAST_ALL);
} catch (IOException e) {
logger.debug("Failed to set IP_MULTICAST_ALL to {}", IP_MULTICAST_ALL, e);
}
config = new IoUringDatagramChannelConfig(this);
}
@Override
public InetSocketAddress remoteAddress() {
return (InetSocketAddress) super.remoteAddress();
}
@Override
public InetSocketAddress localAddress() {
return (InetSocketAddress) super.localAddress();
}
@Override
public ChannelMetadata metadata() {
return METADATA;
}
@Override
public boolean isActive() {
return socket.isOpen() && (config.getActiveOnOpen() && isRegistered() || super.isActive());
}
@Override
public boolean isConnected() {
return connected;
}
@Override
public ChannelFuture joinGroup(InetAddress multicastAddress) {
return joinGroup(multicastAddress, newPromise());
}
@Override
public ChannelFuture joinGroup(InetAddress multicastAddress, ChannelPromise promise) {
try {
return joinGroup(
multicastAddress,
NetworkInterface.getByInetAddress(localAddress().getAddress()), null, promise);
} catch (IOException e) {
promise.setFailure(e);
}
return promise;
}
@Override
public ChannelFuture joinGroup(
InetSocketAddress multicastAddress, NetworkInterface networkInterface) {
return joinGroup(multicastAddress, networkInterface, newPromise());
}
@Override
public ChannelFuture joinGroup(
InetSocketAddress multicastAddress, NetworkInterface networkInterface,
ChannelPromise promise) {
return joinGroup(multicastAddress.getAddress(), networkInterface, null, promise);
}
@Override
public ChannelFuture joinGroup(
InetAddress multicastAddress, NetworkInterface networkInterface, InetAddress source) {
return joinGroup(multicastAddress, networkInterface, source, newPromise());
}
@Override
public ChannelFuture joinGroup(
final InetAddress multicastAddress, final NetworkInterface networkInterface,
final InetAddress source, final ChannelPromise promise) {
ObjectUtil.checkNotNull(multicastAddress, "multicastAddress");
ObjectUtil.checkNotNull(networkInterface, "networkInterface");
try {
socket.joinGroup(multicastAddress, networkInterface, source);
promise.setSuccess();
} catch (IOException e) {
promise.setFailure(e);
}
return promise;
}
@Override
public ChannelFuture leaveGroup(InetAddress multicastAddress) {
return leaveGroup(multicastAddress, newPromise());
}
@Override
public ChannelFuture leaveGroup(InetAddress multicastAddress, ChannelPromise promise) {
try {
return leaveGroup(
multicastAddress, NetworkInterface.getByInetAddress(localAddress().getAddress()), null, promise);
} catch (IOException e) {
promise.setFailure(e);
}
return promise;
}
@Override
public ChannelFuture leaveGroup(
InetSocketAddress multicastAddress, NetworkInterface networkInterface) {
return leaveGroup(multicastAddress, networkInterface, newPromise());
}
@Override
public ChannelFuture leaveGroup(
InetSocketAddress multicastAddress,
NetworkInterface networkInterface, ChannelPromise promise) {
return leaveGroup(multicastAddress.getAddress(), networkInterface, null, promise);
}
@Override
public ChannelFuture leaveGroup(
InetAddress multicastAddress, NetworkInterface networkInterface, InetAddress source) {
return leaveGroup(multicastAddress, networkInterface, source, newPromise());
}
@Override
public ChannelFuture leaveGroup(
final InetAddress multicastAddress, final NetworkInterface networkInterface, final InetAddress source,
final ChannelPromise promise) {
ObjectUtil.checkNotNull(multicastAddress, "multicastAddress");
ObjectUtil.checkNotNull(networkInterface, "networkInterface");
try {
socket.leaveGroup(multicastAddress, networkInterface, source);
promise.setSuccess();
} catch (IOException e) {
promise.setFailure(e);
}
return promise;
}
@Override
public ChannelFuture block(
InetAddress multicastAddress, NetworkInterface networkInterface,
InetAddress sourceToBlock) {
return block(multicastAddress, networkInterface, sourceToBlock, newPromise());
}
@Override
public ChannelFuture block(
final InetAddress multicastAddress, final NetworkInterface networkInterface,
final InetAddress sourceToBlock, final ChannelPromise promise) {
ObjectUtil.checkNotNull(multicastAddress, "multicastAddress");
ObjectUtil.checkNotNull(sourceToBlock, "sourceToBlock");
ObjectUtil.checkNotNull(networkInterface, "networkInterface");
promise.setFailure(new UnsupportedOperationException("Multicast not supported"));
return promise;
}
@Override
public ChannelFuture block(InetAddress multicastAddress, InetAddress sourceToBlock) {
return block(multicastAddress, sourceToBlock, newPromise());
}
@Override
public ChannelFuture block(
InetAddress multicastAddress, InetAddress sourceToBlock, ChannelPromise promise) {
try {
return block(
multicastAddress,
NetworkInterface.getByInetAddress(localAddress().getAddress()),
sourceToBlock, promise);
} catch (Throwable e) {
promise.setFailure(e);
}
return promise;
}
@Override
protected AbstractUnsafe newUnsafe() {
return new IoUringDatagramChannelUnsafe();
}
@Override
protected void doBind(SocketAddress localAddress) throws Exception {
if (localAddress instanceof InetSocketAddress) {
InetSocketAddress socketAddress = (InetSocketAddress) localAddress;
if (socketAddress.getAddress().isAnyLocalAddress() &&
socketAddress.getAddress() instanceof Inet4Address) {
if (socket.family() == SocketProtocolFamily.INET6) {
localAddress = new InetSocketAddress(LinuxSocket.INET6_ANY, socketAddress.getPort());
}
}
}
super.doBind(localAddress);
active = true;
}
private static void checkUnresolved(AddressedEnvelope<?, ?> envelope) {
if (envelope.recipient() instanceof InetSocketAddress
&& (((InetSocketAddress) envelope.recipient()).isUnresolved())) {
throw new UnresolvedAddressException();
}
}
@Override
protected Object filterOutboundMessage(Object msg) {
if (msg instanceof DatagramPacket) {
DatagramPacket packet = (DatagramPacket) msg;
checkUnresolved(packet);
ByteBuf content = packet.content();
return !content.hasMemoryAddress() ?
packet.replace(newDirectBuffer(packet, content)) : msg;
}
if (msg instanceof ByteBuf) {
ByteBuf buf = (ByteBuf) msg;
return !buf.hasMemoryAddress()? newDirectBuffer(buf) : buf;
}
if (msg instanceof AddressedEnvelope) {
@SuppressWarnings("unchecked")
AddressedEnvelope<Object, SocketAddress> e = (AddressedEnvelope<Object, SocketAddress>) msg;
checkUnresolved(e);
if (e.content() instanceof ByteBuf &&
(e.recipient() == null || e.recipient() instanceof InetSocketAddress)) {
ByteBuf content = (ByteBuf) e.content();
return !content.hasMemoryAddress()?
new DefaultAddressedEnvelope<>(
newDirectBuffer(e, content), (InetSocketAddress) e.recipient()) : e;
}
}
throw new UnsupportedOperationException(
"unsupported message type: " + StringUtil.simpleClassName(msg) + EXPECTED_TYPES);
}
@Override
public DatagramChannelConfig config() {
return config;
}
@Override
protected void doDisconnect() throws Exception {
// TODO: use io_uring for this too...
socket.disconnect();
connected = active = false;
resetCachedAddresses();
}
@Override
protected void doClose() throws Exception {
super.doClose();
connected = false;
}
private final | IoUringDatagramChannel |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetPluginInfoResponsePBImpl.java | {
"start": 1099,
"end": 2516
} | class ____ extends GetPluginInfoResponse {
private CsiAdaptorProtos.GetPluginInfoResponse.Builder builder;
public GetPluginInfoResponsePBImpl(
CsiAdaptorProtos.GetPluginInfoResponse responseProto) {
this.builder = responseProto.toBuilder();
}
public GetPluginInfoResponsePBImpl() {
this.builder = CsiAdaptorProtos.GetPluginInfoResponse.newBuilder();
}
@Override
public void setDriverName(String driverName) {
Preconditions.checkNotNull(builder);
builder.setName(driverName);
}
@Override
public String getDriverName() {
Preconditions.checkNotNull(builder);
return builder.getName();
}
@Override
public void setVersion(String version) {
Preconditions.checkNotNull(builder);
builder.setVendorVersion(version);
}
@Override
public String getVersion() {
Preconditions.checkNotNull(builder);
return builder.getVendorVersion();
}
public CsiAdaptorProtos.GetPluginInfoResponse getProto() {
Preconditions.checkNotNull(builder);
return builder.build();
}
@Override
public int hashCode() {
return getProto().hashCode();
}
@Override
public boolean equals(Object other) {
if (other == null) {
return false;
}
if (other.getClass().isAssignableFrom(this.getClass())) {
return this.getProto().equals(this.getClass().cast(other).getProto());
}
return false;
}
}
| GetPluginInfoResponsePBImpl |
java | micronaut-projects__micronaut-core | http-client/src/main/java/io/micronaut/http/client/netty/ConnectionManager.java | {
"start": 52511,
"end": 52574
} | class ____ implements
* the HTTP parts.
*/
final | just |
java | apache__camel | components/camel-jgroups-raft/src/main/java/org/apache/camel/component/jgroups/raft/cluster/JGroupsRaftClusterView.java | {
"start": 1380,
"end": 5694
} | class ____ extends AbstractCamelClusterView {
private static final transient Logger LOG = LoggerFactory.getLogger(JGroupsRaftClusterView.class);
private final CamelClusterMember localMember = new JGropusraftLocalMember();
private String jgroupsConfig;
private String jgroupsClusterName;
private RaftHandle raftHandle;
private String raftId;
private volatile boolean isMaster;
protected JGroupsRaftClusterView(CamelClusterService cluster, String namespace, String jgroupsConfig,
String jgroupsClusterName, RaftHandle raftHandle, String raftId) {
super(cluster, namespace);
this.jgroupsConfig = jgroupsConfig;
this.jgroupsClusterName = jgroupsClusterName;
this.raftHandle = raftHandle;
this.raftId = raftId;
}
@Override
public Optional<CamelClusterMember> getLeader() {
if (isMaster) {
return Optional.of(localMember);
} else {
return Optional.empty();
}
}
@Override
public CamelClusterMember getLocalMember() {
return localMember;
}
@Override
public List<CamelClusterMember> getMembers() {
return new ArrayList<CamelClusterMember>() {
{
add(localMember);
}
};
}
@Override
protected void doStart() throws Exception {
if (raftHandle == null && jgroupsConfig != null && !jgroupsConfig.isEmpty()) {
raftHandle = new RaftHandle(new JChannel(jgroupsConfig), new NopStateMachine()).raftId(raftId);
} else if (raftHandle == null) {
raftHandle = new RaftHandle(new JChannel(JGroupsRaftConstants.DEFAULT_JGROUPSRAFT_CONFIG), new NopStateMachine())
.raftId(raftId);
}
fireLeadershipChangedEvent((CamelClusterMember) null);
// it may take a while for event to trigger and allow us to join so retry a while
Exception cause = null;
for (int i = 1; i < 11; i++) {
LOG.debug("Attempt #{} for raft {} to join {}", i, raftId, jgroupsClusterName);
try {
raftHandle.addRoleListener(new ClusterRoleChangeListener(this));
raftHandle.channel().connect(jgroupsClusterName);
LOG.debug("Joined and connected to {} with raft id: {}", jgroupsClusterName, raftId);
cause = null;
break;
} catch (Exception e) {
cause = e;
}
// wait for next attempt
Thread.sleep(5000);
}
if (cause != null) {
throw cause;
}
}
@Override
protected void doStop() throws Exception {
isMaster = false;
fireLeadershipChangedEvent((CamelClusterMember) null);
LOG.info("Disconnecting JGroupsraft Channel for JGroupsRaftClusterView with Id {}", raftId);
raftHandle.channel().disconnect();
if (raftHandle != null && raftHandle.log() != null) {
raftHandle.log().close();
LOG.info("Closed Log for JGroupsRaftClusterView with Id {}", raftId);
}
}
@Override
protected void doShutdown() throws Exception {
isMaster = false;
fireLeadershipChangedEvent((CamelClusterMember) null);
if (raftHandle != null) {
if (raftHandle.channel() != null) {
LOG.info("Closing JGroupsraft Channel for JGroupsRaftClusterView with Id {}", raftId);
raftHandle.channel().close();
LOG.info("Closed JGroupsraft Channel Channel for JGroupsRaftClusterView with Id {}", raftId);
}
if (raftHandle.log() != null) {
LOG.info("Closing Log for JGroupsRaftClusterView with Id {}", raftId);
raftHandle.log().close();
LOG.info("Closed Log for JGroupsRaftClusterView with Id {}", raftId);
}
raftHandle = null;
}
}
public boolean isMaster() {
return isMaster;
}
public void setMaster(boolean master) {
isMaster = master;
}
@Override
protected void fireLeadershipChangedEvent(CamelClusterMember leader) {
super.fireLeadershipChangedEvent(leader);
}
private final | JGroupsRaftClusterView |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/mapper/SourceLoader.java | {
"start": 21766,
"end": 22056
} | interface ____ {
/**
* Returns a leaf loader if the provided context contains patches for the specified field;
* returns null otherwise.
*/
SyntheticVectorsLoader.Leaf leaf(LeafReaderContext context) throws IOException;
| SyntheticVectorsLoader |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractNanosEvaluator.java | {
"start": 5868,
"end": 6801
} | class ____ implements EvalOperator.ExpressionEvaluator.Factory {
private final Source source;
private final EvalOperator.ExpressionEvaluator.Factory value;
private final EvalOperator.ExpressionEvaluator.Factory chronoField;
private final ZoneId zone;
public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory value,
EvalOperator.ExpressionEvaluator.Factory chronoField, ZoneId zone) {
this.source = source;
this.value = value;
this.chronoField = chronoField;
this.zone = zone;
}
@Override
public DateExtractNanosEvaluator get(DriverContext context) {
return new DateExtractNanosEvaluator(source, value.get(context), chronoField.get(context), zone, context);
}
@Override
public String toString() {
return "DateExtractNanosEvaluator[" + "value=" + value + ", chronoField=" + chronoField + ", zone=" + zone + "]";
}
}
}
| Factory |
java | spring-cloud__spring-cloud-gateway | spring-cloud-gateway-server-webflux/src/main/java/org/springframework/cloud/gateway/filter/factory/RetryGatewayFilterFactory.java | {
"start": 2119,
"end": 11111
} | class ____ extends AbstractGatewayFilterFactory<RetryGatewayFilterFactory.RetryConfig> {
/**
* Retry iteration key.
*/
public static final String RETRY_ITERATION_KEY = "retry_iteration";
private static final Log log = LogFactory.getLog(RetryGatewayFilterFactory.class);
public RetryGatewayFilterFactory() {
super(RetryConfig.class);
}
private static <T> List<T> toList(T... items) {
return new ArrayList<>(Arrays.asList(items));
}
@Override
public List<String> shortcutFieldOrder() {
return Arrays.asList("retries", "statuses", "methods", "backoff.firstBackoff", "backoff.maxBackoff",
"backoff.factor", "backoff.basedOnPreviousValue", "jitter.randomFactor", "timeout");
}
@Override
public GatewayFilter apply(RetryConfig retryConfig) {
retryConfig.validate();
Repeat<ServerWebExchange> statusCodeRepeat = null;
if (!retryConfig.getStatuses().isEmpty() || !retryConfig.getSeries().isEmpty()) {
Predicate<RepeatContext<ServerWebExchange>> repeatPredicate = context -> {
ServerWebExchange exchange = context.applicationContext();
if (exceedsMaxIterations(exchange, retryConfig)) {
return false;
}
HttpStatusCode statusCode = exchange.getResponse().getStatusCode();
boolean retryableStatusCode = retryConfig.getStatuses().contains(statusCode);
// null status code might mean a network exception?
if (!retryableStatusCode && statusCode != null) {
// try the series
retryableStatusCode = false;
for (int i = 0; i < retryConfig.getSeries().size(); i++) {
if (statusCode instanceof HttpStatus) {
HttpStatus httpStatus = (HttpStatus) statusCode;
if (httpStatus.series().equals(retryConfig.getSeries().get(i))) {
retryableStatusCode = true;
break;
}
}
}
}
final boolean finalRetryableStatusCode = retryableStatusCode;
trace("retryableStatusCode: %b, statusCode %s, configured statuses %s, configured series %s",
() -> finalRetryableStatusCode, () -> statusCode, retryConfig::getStatuses,
retryConfig::getSeries);
HttpMethod httpMethod = exchange.getRequest().getMethod();
boolean retryableMethod = retryConfig.getMethods().contains(httpMethod);
trace("retryableMethod: %b, httpMethod %s, configured methods %s", () -> retryableMethod,
() -> httpMethod, retryConfig::getMethods);
return retryableMethod && finalRetryableStatusCode;
};
statusCodeRepeat = Repeat.onlyIf(repeatPredicate)
.doOnRepeat(context -> reset(context.applicationContext()));
BackoffConfig backoff = retryConfig.getBackoff();
if (backoff != null) {
statusCodeRepeat = statusCodeRepeat.backoff(getBackoff(backoff));
}
JitterConfig jitter = retryConfig.getJitter();
if (jitter != null) {
statusCodeRepeat = statusCodeRepeat.jitter(getJitter(jitter));
}
Duration timeout = retryConfig.getTimeout();
if (timeout != null) {
statusCodeRepeat = statusCodeRepeat.timeout(timeout);
}
}
Retry<ServerWebExchange> exceptionRetry = null;
if (!retryConfig.getExceptions().isEmpty()) {
Predicate<RetryContext<ServerWebExchange>> retryContextPredicate = context -> {
ServerWebExchange exchange = context.applicationContext();
if (exceedsMaxIterations(exchange, retryConfig)) {
return false;
}
Throwable exception = context.exception();
for (Class<? extends Throwable> retryableClass : retryConfig.getExceptions()) {
if (retryableClass.isInstance(exception)
|| (exception != null && retryableClass.isInstance(exception.getCause()))) {
trace("exception or its cause is retryable %s, configured exceptions %s",
() -> getExceptionNameWithCause(exception), retryConfig::getExceptions);
HttpMethod httpMethod = exchange.getRequest().getMethod();
boolean retryableMethod = retryConfig.getMethods().contains(httpMethod);
trace("retryableMethod: %b, httpMethod %s, configured methods %s", () -> retryableMethod,
() -> httpMethod, retryConfig::getMethods);
return retryableMethod;
}
}
trace("exception or its cause is not retryable %s, configured exceptions %s",
() -> getExceptionNameWithCause(exception), retryConfig::getExceptions);
return false;
};
exceptionRetry = Retry.onlyIf(retryContextPredicate)
.doOnRetry(context -> reset(context.applicationContext()))
.retryMax(retryConfig.getRetries());
BackoffConfig backoff = retryConfig.getBackoff();
if (backoff != null) {
exceptionRetry = exceptionRetry.backoff(getBackoff(backoff));
}
JitterConfig jitter = retryConfig.getJitter();
if (jitter != null) {
exceptionRetry = exceptionRetry.jitter(getJitter(jitter));
}
Duration timeout = retryConfig.getTimeout();
if (timeout != null) {
exceptionRetry = exceptionRetry.timeout(timeout);
}
}
GatewayFilter gatewayFilter = apply(retryConfig.getRouteId(), statusCodeRepeat, exceptionRetry);
return new GatewayFilter() {
@Override
public Mono<Void> filter(ServerWebExchange exchange, GatewayFilterChain chain) {
return gatewayFilter.filter(exchange, chain);
}
@Override
public String toString() {
return filterToStringCreator(RetryGatewayFilterFactory.this).append("routeId", retryConfig.getRouteId())
.append("retries", retryConfig.getRetries())
.append("series", retryConfig.getSeries())
.append("statuses", retryConfig.getStatuses())
.append("methods", retryConfig.getMethods())
.append("exceptions", retryConfig.getExceptions())
.append("backoff", retryConfig.getBackoff())
.append("jitter", retryConfig.getJitter())
.append("timeout", retryConfig.getTimeout())
.toString();
}
};
}
private String getExceptionNameWithCause(Throwable exception) {
if (exception != null) {
StringBuilder builder = new StringBuilder(exception.getClass().getName());
Throwable cause = exception.getCause();
if (cause != null) {
builder.append("{cause=").append(cause.getClass().getName()).append("}");
}
return builder.toString();
}
else {
return "null";
}
}
private Backoff getBackoff(BackoffConfig backoff) {
return Backoff.exponential(backoff.firstBackoff, backoff.maxBackoff, backoff.factor,
backoff.basedOnPreviousValue);
}
private Jitter getJitter(JitterConfig jitter) {
return Jitter.random(jitter.randomFactor);
}
public boolean exceedsMaxIterations(ServerWebExchange exchange, RetryConfig retryConfig) {
Integer iteration = exchange.getAttribute(RETRY_ITERATION_KEY);
// TODO: deal with null iteration
boolean exceeds = iteration != null && iteration >= retryConfig.getRetries();
trace("exceedsMaxIterations %b, iteration %d, configured retries %d", () -> exceeds, () -> iteration,
retryConfig::getRetries);
return exceeds;
}
@Deprecated
/**
* Use {@link ServerWebExchangeUtils#reset(ServerWebExchange)}
*/
public void reset(ServerWebExchange exchange) {
Connection conn = exchange.getAttribute(ServerWebExchangeUtils.CLIENT_RESPONSE_CONN_ATTR);
if (conn != null) {
trace("disposing response connection before next iteration");
conn.dispose();
exchange.getAttributes().remove(ServerWebExchangeUtils.CLIENT_RESPONSE_CONN_ATTR);
}
ServerWebExchangeUtils.reset(exchange);
}
public GatewayFilter apply(@Nullable String routeId, @Nullable Repeat<ServerWebExchange> repeat,
@Nullable Retry<ServerWebExchange> retry) {
enableBodyCaching(routeId);
return (exchange, chain) -> {
trace("Entering retry-filter");
// chain.filter returns a Mono<Void>
Publisher<Void> publisher = chain.filter(exchange)
// .log("retry-filter", Level.INFO)
.doOnSuccess(aVoid -> updateIteration(exchange))
.doOnError(throwable -> updateIteration(exchange));
if (retry != null) {
// retryWhen returns a Mono<Void>
// retry needs to go before repeat
publisher = ((Mono<Void>) publisher)
.retryWhen(reactor.util.retry.Retry.withThrowable(retry.withApplicationContext(exchange)));
}
if (repeat != null) {
// repeatWhen returns a Flux<Void>
// so this needs to be last and the variable a Publisher<Void>
publisher = ((Mono<Void>) publisher).repeatWhen(repeat.withApplicationContext(exchange));
}
return Mono.fromDirect(publisher);
};
}
private void updateIteration(ServerWebExchange exchange) {
int iteration = exchange.getAttributeOrDefault(RETRY_ITERATION_KEY, -1);
int newIteration = iteration + 1;
trace("setting new iteration in attr %d", () -> newIteration);
exchange.getAttributes().put(RETRY_ITERATION_KEY, newIteration);
}
@SafeVarargs
private final void trace(String message, Supplier<Object>... argSuppliers) {
if (log.isTraceEnabled()) {
Object[] args = new Object[argSuppliers.length];
int i = 0;
for (Supplier<Object> a : argSuppliers) {
args[i] = a.get();
++i;
}
log.trace(String.format(message, args));
}
}
@SuppressWarnings("unchecked")
public static | RetryGatewayFilterFactory |
java | bumptech__glide | samples/flickr/src/main/java/com/bumptech/glide/samples/flickr/FlickrPhotoList.java | {
"start": 4305,
"end": 6195
} | class ____ extends RecyclerView.Adapter<PhotoTitleViewHolder>
implements ListPreloader.PreloadModelProvider<Photo> {
private final LayoutInflater inflater;
private List<Photo> photos = Collections.emptyList();
FlickrPhotoListAdapter() {
this.inflater = LayoutInflater.from(getActivity());
}
void setPhotos(List<Photo> photos) {
this.photos = photos;
notifyDataSetChanged();
}
@Override
public PhotoTitleViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
View view = inflater.inflate(R.layout.flickr_photo_list_item, parent, false);
PhotoTitleViewHolder vh = new PhotoTitleViewHolder(view);
preloadSizeProvider.setView(vh.imageView);
return vh;
}
@Override
public void onBindViewHolder(PhotoTitleViewHolder holder, int position) {
final Photo current = photos.get(position);
fullRequest.load(current).thumbnail(thumbRequest.load(current)).into(holder.imageView);
holder.imageView.setOnClickListener(
new View.OnClickListener() {
@Override
public void onClick(View view) {
Intent intent = FullscreenActivity.getIntent(getActivity(), current);
startActivity(intent);
}
});
holder.titleView.setText(current.getTitle());
}
@Override
public long getItemId(int i) {
return RecyclerView.NO_ID;
}
@Override
public int getItemCount() {
return photos.size();
}
@NonNull
@Override
public List<Photo> getPreloadItems(int position) {
return photos.subList(position, position + 1);
}
@Nullable
@Override
public RequestBuilder<Drawable> getPreloadRequestBuilder(@NonNull Photo item) {
return fullRequest.thumbnail(thumbRequest.load(item)).load(item);
}
}
private static final | FlickrPhotoListAdapter |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/NullNeedsCastForVarargsTest.java | {
"start": 5869,
"end": 6290
} | class ____ {
void test() {
List<String> list = Arrays.asList((String) null);
}
}
""")
.doTest();
}
@Test
public void arraysAsList_integerTargetType_withArrayCast() {
refactoringHelper
.addInputLines(
"Test.java",
"""
import java.util.Arrays;
import java.util.List;
| Test |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/jackson/StackTraceElementMixIn.java | {
"start": 1368,
"end": 2656
} | class ____ {
@JsonCreator
StackTraceElementMixIn(
// @formatter:off
@JsonProperty(StackTraceElementConstants.ATTR_CLASS) final String declaringClass,
@JsonProperty(StackTraceElementConstants.ATTR_METHOD) final String methodName,
@JsonProperty(StackTraceElementConstants.ATTR_FILE) final String fileName,
@JsonProperty(StackTraceElementConstants.ATTR_LINE) final int lineNumber)
// @formatter:on
{
// empty
}
@JsonProperty(StackTraceElementConstants.ATTR_CLASS)
@JacksonXmlProperty(localName = StackTraceElementConstants.ATTR_CLASS, isAttribute = true)
abstract String getClassName();
@JsonProperty(StackTraceElementConstants.ATTR_FILE)
@JacksonXmlProperty(localName = StackTraceElementConstants.ATTR_FILE, isAttribute = true)
abstract String getFileName();
@JsonProperty(StackTraceElementConstants.ATTR_LINE)
@JacksonXmlProperty(localName = StackTraceElementConstants.ATTR_LINE, isAttribute = true)
abstract int getLineNumber();
@JsonProperty(StackTraceElementConstants.ATTR_METHOD)
@JacksonXmlProperty(localName = StackTraceElementConstants.ATTR_METHOD, isAttribute = true)
abstract String getMethodName();
}
| StackTraceElementMixIn |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/deser/bean/PropertyValue.java | {
"start": 4771,
"end": 5538
} | class ____
extends PropertyValue
{
final SettableBeanProperty _property;
public Merging(PropertyValue next, TokenBuffer buffered,
SettableBeanProperty prop)
{
super(next, buffered);
_property = prop;
}
@Override
public void assign(DeserializationContext ctxt, Object bean)
{
TokenBuffer buffered = (TokenBuffer) value;
try (JsonParser p = buffered.asParser()) {
p.nextToken();
// !!! 12-Aug-2025, tatu: We need DeserializationContext...
// but for testing just pass null for now.
_property.deserializeAndSet(p, ctxt, bean);
}
}
}
}
| Merging |
java | spring-projects__spring-boot | core/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/template/TemplateAvailabilityProviders.java | {
"start": 4704,
"end": 6208
} | class ____
* @param resourceLoader the resource loader
* @return a {@link TemplateAvailabilityProvider} or null
*/
public @Nullable TemplateAvailabilityProvider getProvider(String view, Environment environment,
ClassLoader classLoader, ResourceLoader resourceLoader) {
Assert.notNull(view, "'view' must not be null");
Assert.notNull(environment, "'environment' must not be null");
Assert.notNull(classLoader, "'classLoader' must not be null");
Assert.notNull(resourceLoader, "'resourceLoader' must not be null");
Boolean useCache = environment.getProperty("spring.template.provider.cache", Boolean.class, true);
if (!useCache) {
return findProvider(view, environment, classLoader, resourceLoader);
}
TemplateAvailabilityProvider provider = this.resolved.get(view);
if (provider == null) {
synchronized (this.cache) {
provider = findProvider(view, environment, classLoader, resourceLoader);
provider = (provider != null) ? provider : NONE;
this.resolved.put(view, provider);
this.cache.put(view, provider);
}
}
return (provider != NONE) ? provider : null;
}
private @Nullable TemplateAvailabilityProvider findProvider(String view, Environment environment,
ClassLoader classLoader, ResourceLoader resourceLoader) {
for (TemplateAvailabilityProvider candidate : this.providers) {
if (candidate.isTemplateAvailable(view, environment, classLoader, resourceLoader)) {
return candidate;
}
}
return null;
}
private static final | loader |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/support/BootstrapTestUtilsMergedConfigTests.java | {
"start": 22213,
"end": 22297
} | class ____ {
@ContextConfiguration(classes = AppleConfig.class)
| EmptyConfigTestCase |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/inject/OverlappingQualifierAndScopeAnnotationTest.java | {
"start": 3057,
"end": 3137
} | interface ____ {}
@javax.inject.Qualifier
@ | MyGuiceScope |
java | spring-projects__spring-framework | spring-r2dbc/src/main/java/org/springframework/r2dbc/core/binding/MutableBindings.java | {
"start": 967,
"end": 3350
} | class ____ extends Bindings {
private final BindMarkers markers;
/**
* Create new {@link MutableBindings}.
* @param markers the {@link BindMarkers} to wrap
*/
public MutableBindings(BindMarkers markers) {
super(new LinkedHashMap<>());
Assert.notNull(markers, "BindMarkers must not be null");
this.markers = markers;
}
/**
* Obtain the next {@link BindMarker}.
* Increments {@link BindMarkers} state
* @return the next {@link BindMarker}
*/
public BindMarker nextMarker() {
return this.markers.next();
}
/**
* Obtain the next {@link BindMarker} with a name {@code hint}.
* Increments {@link BindMarkers} state.
* @param hint name hint
* @return the next {@link BindMarker}
*/
public BindMarker nextMarker(String hint) {
return this.markers.next(hint);
}
/**
* Bind a value to {@link BindMarker}.
* @param marker the {@link BindMarker} to bind to
* @param value the value to bind
*/
public MutableBindings bind(BindMarker marker, Object value) {
Assert.notNull(marker, "BindMarker must not be null");
Assert.notNull(value, "Value must not be null");
getBindings().put(marker, new ValueBinding(marker, value));
return this;
}
/**
* Bind a value and return the related {@link BindMarker}.
* Increments {@link BindMarkers} state.
* @param value the value to bind
*/
public BindMarker bind(Object value) {
Assert.notNull(value, "Value must not be null");
BindMarker marker = nextMarker();
getBindings().put(marker, new ValueBinding(marker, value));
return marker;
}
/**
* Bind a {@code NULL} value to {@link BindMarker}.
* @param marker the {@link BindMarker} to bind to
* @param valueType the value type
*/
public MutableBindings bindNull(BindMarker marker, Class<?> valueType) {
Assert.notNull(marker, "BindMarker must not be null");
Assert.notNull(valueType, "Value type must not be null");
getBindings().put(marker, new NullBinding(marker, valueType));
return this;
}
/**
* Bind a {@code NULL} value and return the related {@link BindMarker}.
* Increments {@link BindMarkers} state.
* @param valueType the value type
*/
public BindMarker bindNull(Class<?> valueType) {
Assert.notNull(valueType, "Value type must not be null");
BindMarker marker = nextMarker();
getBindings().put(marker, new NullBinding(marker, valueType));
return marker;
}
}
| MutableBindings |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/model/source/internal/hbm/RelationalValueSourceHelper.java | {
"start": 2820,
"end": 15867
} | class ____ implements ColumnsAndFormulasSource {
@Override
public String getFormulaAttribute() {
return null;
}
@Override
public String getColumnAttribute() {
return null;
}
@Override
public List getColumnOrFormulaElements() {
return Collections.emptyList();
}
@Override
public SizeSource getSizeSource() {
return null;
}
@Override
public Boolean isNullable() {
return null;
}
@Override
public Set<String> getIndexConstraintNames() {
return Collections.emptySet();
}
@Override
public boolean isUnique() {
return false;
}
@Override
public Set<String> getUniqueKeyConstraintNames() {
return Collections.emptySet();
}
}
/**
* Given a {@link ColumnsAndFormulasSource}, build a single {@link RelationalValueSource}.
* More than one {@link RelationalValueSource} will result in an exception.
*
* @param mappingDocument the mapping document
* @param containingTableName The logical name of the table containing the relational values
* @param columnsAndFormulasSource the adapter describing the value sources.
*
* @return The single RelationalValueSource.
*/
public static RelationalValueSource buildValueSource(
MappingDocument mappingDocument,
String containingTableName,
ColumnsAndFormulasSource columnsAndFormulasSource) {
final List<RelationalValueSource> sources = buildValueSources(
mappingDocument,
containingTableName,
columnsAndFormulasSource
);
if ( sources.size() > 1 ) {
throw new MappingException( multipleError( columnsAndFormulasSource ), mappingDocument.getOrigin() );
}
return sources.get( 0 );
}
/**
* Given a {@link ColumnsAndFormulasSource}, build a single {@link RelationalValueSource}
* which is required to be a column. More than one {@link RelationalValueSource} will result
* in an exception. A formula, rather than a column, will result in an exception.
*
* @param mappingDocument the mapping document
* @param containingTableName The logical name of the table containing the relational values
* @param columnsAndFormulasSource the adapter describing the value sources.
*
* @return The single ColumnSource.
*/
public static ColumnSource buildColumnSource(
MappingDocument mappingDocument,
String containingTableName,
ColumnsAndFormulasSource columnsAndFormulasSource) {
final List<RelationalValueSource> sources = buildValueSources(
mappingDocument,
containingTableName,
columnsAndFormulasSource
);
if ( sources.size() > 1 ) {
throw new MappingException( multipleError( columnsAndFormulasSource ), mappingDocument.getOrigin() );
}
final RelationalValueSource result = sources.get( 0 );
if ( result instanceof ColumnSource columnSource ) {
return columnSource;
}
else {
throw new MappingException( formulaError( columnsAndFormulasSource, (DerivedValueSource) result ),
mappingDocument.getOrigin() );
}
}
/**
* Given a {@link ColumnsAndFormulasSource}, build the corresponding list of
* {@link ColumnSource}. Any formula, rather than a column, will result in an exception.
*
* @param mappingDocument the mapping document
* @param containingTableName The logical name of the table containing the relational values
* @param columnsAndFormulasSource the adapter describing the value sources.
*
* @return The corresponding list.
*/
public static List<ColumnSource> buildColumnSources(
MappingDocument mappingDocument,
String containingTableName,
ColumnsAndFormulasSource columnsAndFormulasSource) {
final List<RelationalValueSource> sources = buildValueSources(
mappingDocument,
containingTableName,
columnsAndFormulasSource
);
final List<ColumnSource> columnSources = CollectionHelper.arrayList( sources.size() );
for ( RelationalValueSource source : sources ) {
if ( source instanceof ColumnSource columnSource ) {
columnSources.add( columnSource );
}
else {
throw new MappingException( formulaError( columnsAndFormulasSource, (DerivedValueSource) source ),
mappingDocument.getOrigin() );
}
}
return columnSources;
}
private static String multipleError(ColumnsAndFormulasSource columnsAndFormulasSource) {
final String errorMessage;
if ( columnsAndFormulasSource.getSourceType().canBeNamed()
&& isNotEmpty( columnsAndFormulasSource.getSourceName() ) ) {
errorMessage = String.format(
Locale.ENGLISH,
"Expecting just a single formula/column in context of <%s name=\"%s\"/>",
columnsAndFormulasSource.getSourceType().getElementName(),
columnsAndFormulasSource.getSourceName()
);
}
else {
errorMessage = String.format(
Locale.ENGLISH,
"Expecting just a single formula/column in context of <%s/>",
columnsAndFormulasSource.getSourceType().getElementName()
);
}
return errorMessage;
}
private static String formulaError(ColumnsAndFormulasSource columnsAndFormulasSource, DerivedValueSource formulaSource) {
if ( columnsAndFormulasSource.getSourceType().canBeNamed()
&& isNotEmpty( columnsAndFormulasSource.getSourceName() ) ) {
return String.format(
Locale.ENGLISH,
"Expecting single column in context of <%s name=\"%s\"/>, but found formula [%s]",
columnsAndFormulasSource.getSourceType().getElementName(),
columnsAndFormulasSource.getSourceName(),
formulaSource.getExpression()
);
}
else {
return String.format(
Locale.ENGLISH,
"Expecting single column in context of <%s/>, but found formula [%s]",
columnsAndFormulasSource.getSourceType().getElementName(),
formulaSource.getExpression()
);
}
}
/**
* Given a {@link ColumnsAndFormulasSource}, build the corresponding list of
* {@link RelationalValueSource}
*
* @param mappingDocument the mapping document
* @param containingTableName The logical name of the table containing the relational values
* @param columnsAndFormulasSource the adapter describing the value sources.
*
* @return The corresponding list.
*/
public static List<RelationalValueSource> buildValueSources(
MappingDocument mappingDocument,
String containingTableName,
ColumnsAndFormulasSource columnsAndFormulasSource) {
List<RelationalValueSource> result = new ArrayList<>();
if ( isNotEmpty( columnsAndFormulasSource.getFormulaAttribute() ) ) {
// we have an explicit formula attribute (i.e., <SOMETHING formula="abc"/>)
validateUseOfFormulaAttribute( mappingDocument, columnsAndFormulasSource );
result.add(
new FormulaImpl(
mappingDocument,
containingTableName,
columnsAndFormulasSource.getFormulaAttribute()
)
);
}
else if ( isNotEmpty( columnsAndFormulasSource.getColumnOrFormulaElements() ) ) {
validateUseOfColumnOrFormulaNestedElements( mappingDocument, columnsAndFormulasSource );
for ( Object selectable : columnsAndFormulasSource.getColumnOrFormulaElements() ) {
if ( selectable instanceof JaxbHbmColumnType columnElement ) {
result.add(
new ColumnSourceImpl(
mappingDocument,
containingTableName,
columnElement,
columnsAndFormulasSource.getIndexConstraintNames(),
columnsAndFormulasSource.getUniqueKeyConstraintNames()
)
);
}
else if ( selectable instanceof String string ) {
result.add( new FormulaImpl( mappingDocument, containingTableName, string ) );
}
else {
throw new MappingException(
"Unexpected column/formula JAXB type : " + selectable.getClass().getName(),
mappingDocument.getOrigin()
);
}
}
}
else {
// we have either an explicitly named column via the column attribute, or an implicit
// column reference. Aside from applying an implicit naming strategy (or not), these 2
// case are handled the exact same way
result.add(
new ColumnAttributeSourceImpl(
mappingDocument,
containingTableName,
columnsAndFormulasSource.getColumnAttribute(),
columnsAndFormulasSource.getSizeSource(),
columnsAndFormulasSource.isNullable(),
columnsAndFormulasSource.isUnique(),
columnsAndFormulasSource.getIndexConstraintNames(),
columnsAndFormulasSource.getUniqueKeyConstraintNames()
)
);
}
return result;
}
private static void validateUseOfFormulaAttribute(
MappingDocument sourceDocument,
ColumnsAndFormulasSource columnsAndFormulasSource) {
// 1) make sure there is no column attribute
if ( isNotEmpty( columnsAndFormulasSource.getColumnAttribute() ) ) {
final String errorMessage;
if ( columnsAndFormulasSource.getSourceType().canBeNamed()
&& isNotEmpty( columnsAndFormulasSource.getSourceName() ) ) {
errorMessage = String.format(
Locale.ENGLISH,
"column attribute and formula attribute may not be specified together near <%s name=\"%s\" column=\"%s\" formula=\"%s\" />",
columnsAndFormulasSource.getSourceType().getElementName(),
columnsAndFormulasSource.getSourceName(),
columnsAndFormulasSource.getColumnAttribute(),
columnsAndFormulasSource.getFormulaAttribute()
);
}
else {
errorMessage = String.format(
Locale.ENGLISH,
"column attribute and formula attribute may not be specified together near <%s column=\"%s\" formula=\"%s\" />",
columnsAndFormulasSource.getSourceType().getElementName(),
columnsAndFormulasSource.getColumnAttribute(),
columnsAndFormulasSource.getFormulaAttribute()
);
}
throw new MappingException( errorMessage, sourceDocument.getOrigin() );
}
// 2) and no column/formula nested elements
if ( isNotEmpty( columnsAndFormulasSource.getColumnOrFormulaElements() ) ) {
final String errorMessage;
if ( columnsAndFormulasSource.getSourceType().canBeNamed()
&& isNotEmpty( columnsAndFormulasSource.getSourceName() ) ) {
errorMessage = String.format(
Locale.ENGLISH,
"formula attribute may not be specified along with <column/> or <formula/> subelement(s) near <%s name=\"%s\" formula=\"%s\" />",
columnsAndFormulasSource.getSourceType().getElementName(),
columnsAndFormulasSource.getSourceName(),
columnsAndFormulasSource.getFormulaAttribute()
);
}
else {
errorMessage = String.format(
Locale.ENGLISH,
"formula attribute may not be specified along with <column/> or <formula/> subelement(s) near <%s formula=\"%s\" />",
columnsAndFormulasSource.getSourceType().getElementName(),
columnsAndFormulasSource.getFormulaAttribute()
);
}
throw new MappingException( errorMessage, sourceDocument.getOrigin() );
}
}
private static void validateUseOfColumnOrFormulaNestedElements(
MappingDocument sourceDocument,
ColumnsAndFormulasSource columnsAndFormulasSource) {
if ( isNotEmpty( columnsAndFormulasSource.getColumnAttribute() ) ) {
final String errorMessage;
if ( columnsAndFormulasSource.getSourceType().canBeNamed()
&& isNotEmpty( columnsAndFormulasSource.getSourceName() ) ) {
errorMessage = String.format(
Locale.ENGLISH,
"column attribute may not be specified along with <column/> or <formula/> subelement(s) near <%s name=\"%s\" column=\"%s\" />",
columnsAndFormulasSource.getSourceType().getElementName(),
columnsAndFormulasSource.getSourceName(),
columnsAndFormulasSource.getColumnAttribute()
);
}
else {
errorMessage = String.format(
Locale.ENGLISH,
"column attribute may not be specified along with <column/> or <formula/> subelement(s) near <%s column=\"%s\" />",
columnsAndFormulasSource.getSourceType().getElementName(),
columnsAndFormulasSource.getColumnAttribute()
);
}
throw new MappingException( errorMessage, sourceDocument.getOrigin() );
}
}
private static void validateCustomWriteFragment(
MappingDocument sourceDocument,
ColumnsAndFormulasSource columnsAndFormulasSource,
JaxbHbmColumnType columnMapping,
String customWrite) {
if ( customWrite != null && !customWrite.matches("[^?]*\\?[^?]*") ) {
final String errorMessage;
if ( columnsAndFormulasSource.getSourceType().canBeNamed()
&& isNotEmpty( columnsAndFormulasSource.getSourceName() ) ) {
errorMessage = String.format(
Locale.ENGLISH,
"write expression must contain exactly one value placeholder ('?') character near <column name=\"%s\" ... write=\"%s\" /> for <%s name=\"%s\" />",
columnMapping.getName(),
customWrite,
columnsAndFormulasSource.getSourceType().getElementName(),
columnsAndFormulasSource.getSourceName()
);
}
else {
errorMessage = String.format(
Locale.ENGLISH,
"write expression must contain exactly one value placeholder ('?') character near <column name=\"%s\" ... write=\"%s\" /> for <%s />",
columnMapping.getName(),
customWrite,
columnsAndFormulasSource.getSourceType().getElementName()
);
}
throw new MappingException( errorMessage, sourceDocument.getOrigin() );
}
}
}
| AbstractColumnsAndFormulasSource |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/issue_1600/Issue1647.java | {
"start": 819,
"end": 1405
} | class ____ {
private boolean withFields;
private List<String> verificationIds;
public boolean isWithFields() {
return withFields;
}
public Params setWithFields(boolean withFields) {
this.withFields = withFields;
return this;
}
public List<String> getVerificationIds() {
return verificationIds;
}
public Params setVerificationIds(List<String> verificationIds) {
this.verificationIds = verificationIds;
return this;
}
}
}
| Params |
java | quarkusio__quarkus | extensions/vertx-http/deployment/src/test/java/io/quarkus/vertx/http/tls/TlsServerWithP12WithAliasTest.java | {
"start": 970,
"end": 2008
} | class ____ {
@TestHTTPResource(value = "/tls", tls = true)
URL url;
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(MyBean.class)
.addAsResource(new File("target/certs/ssl-test-alias-keystore.p12"), "server-keystore.pkcs12"))
.overrideConfigKey("quarkus.tls.key-store.p12.path", "server-keystore.pkcs12")
.overrideConfigKey("quarkus.tls.key-store.p12.alias", "alias")
.overrideConfigKey("quarkus.tls.key-store.p12.password", "secret")
.overrideConfigKey("quarkus.tls.key-store.p12.alias-password", "alias-password");
@Test
public void testSslServerWithPkcs12() {
RestAssured
.given()
.trustStore(new File("target/certs/ssl-test-alias-truststore.jks"), "secret")
.get(url).then().statusCode(200).body(is("ssl"));
}
@ApplicationScoped
static | TlsServerWithP12WithAliasTest |
java | apache__kafka | server-common/src/main/java/org/apache/kafka/server/share/persister/PartitionData.java | {
"start": 3741,
"end": 5954
} | class ____ {
private int partition;
private int stateEpoch;
private long startOffset;
private int deliveryCompleteCount;
private short errorCode;
private String errorMessage;
private int leaderEpoch;
private List<PersisterStateBatch> stateBatches;
public Builder setPartition(int partition) {
this.partition = partition;
return this;
}
public Builder setStateEpoch(int stateEpoch) {
this.stateEpoch = stateEpoch;
return this;
}
public Builder setStartOffset(long startOffset) {
this.startOffset = startOffset;
return this;
}
public Builder setDeliveryCompleteCount(int deliveryCompleteCount) {
this.deliveryCompleteCount = deliveryCompleteCount;
return this;
}
public Builder setErrorCode(short errorCode) {
this.errorCode = errorCode;
return this;
}
public Builder setErrorMessage(String errorMessage) {
this.errorMessage = errorMessage;
return this;
}
public Builder setLeaderEpoch(int leaderEpoch) {
this.leaderEpoch = leaderEpoch;
return this;
}
public Builder setStateBatches(List<PersisterStateBatch> stateBatches) {
this.stateBatches = stateBatches;
return this;
}
public PartitionData build() {
return new PartitionData(partition, stateEpoch, startOffset, deliveryCompleteCount, errorCode, errorMessage, leaderEpoch, stateBatches);
}
}
@Override
public String toString() {
return "PartitionData(" +
"partition=" + partition + "," +
"stateEpoch=" + stateEpoch + "," +
"startOffset=" + startOffset + "," +
"deliveryCompleteCount=" + deliveryCompleteCount + "," +
"errorCode=" + errorCode + "," +
"errorMessage=" + errorMessage + "," +
"leaderEpoch=" + leaderEpoch + "," +
"stateBatches=" + stateBatches +
")";
}
}
| Builder |
java | assertj__assertj-core | assertj-core/src/main/java/org/assertj/core/util/Sets.java | {
"start": 902,
"end": 5080
} | class ____ {
/**
* Creates a <em>mutable</em> {@link LinkedHashSet} containing the given elements.
*
* @param <T> the generic type of the {@code HashSet} to create.
* @param elements the elements to store in the {@code HashSet}.
* @return the created {@code HashSet}, or {@code null} if the given array of elements is {@code null}.
*/
@SafeVarargs
public static <T> Set<T> set(T... elements) {
return newLinkedHashSet(elements);
}
/**
* Creates a <em>mutable</em> {@code HashSet}.
*
* @param <T> the generic type of the {@code HashSet} to create.
* @return the created {@code HashSet}.
*/
public static <T> HashSet<T> newHashSet() {
return new HashSet<>();
}
/**
* Creates a <em>mutable</em> {@code HashSet} containing the given elements.
*
* @param <T> the generic type of the {@code HashSet} to create.
* @param elements the elements to store in the {@code HashSet}.
* @return the created {@code HashSet}, or {@code null} if the given array of elements is {@code null}.
*/
public static <T> HashSet<T> newHashSet(Iterable<? extends T> elements) {
if (elements == null) {
return null;
}
return Streams.stream(elements).collect(toCollection(HashSet::new));
}
/**
* Creates a <em>mutable</em> {@code LinkedHashSet} containing the given elements.
*
* @param <T> the generic type of the {@code LinkedHashSet} to create.
* @param elements the elements to store in the {@code LinkedHashSet}.
* @return the created {@code LinkedHashSet}, or {@code null} if the given array of elements is {@code null}.
*/
public static <T> LinkedHashSet<T> newLinkedHashSet(Iterable<? extends T> elements) {
if (elements == null) {
return null;
}
return Streams.stream(elements).collect(toCollection(LinkedHashSet::new));
}
/**
* Creates a <em>mutable</em> {@code LinkedHashSet}.
*
* @param <T> the generic type of the {@code LinkedHashSet} to create.
* @return the created {@code LinkedHashSet}.
*/
public static <T> LinkedHashSet<T> newLinkedHashSet() {
return new LinkedHashSet<>();
}
/**
* Creates a <em>mutable</em> {@link LinkedHashSet} containing the given elements.
*
* @param <T> the generic type of the {@code LinkedHashSet} to create.
* @param elements the elements to store in the {@code LinkedHashSet}.
* @return the created {@code LinkedHashSet}, or {@code null} if the given array of elements is {@code null}.
*/
@SafeVarargs
public static <T> LinkedHashSet<T> newLinkedHashSet(T... elements) {
if (elements == null) {
return null;
}
LinkedHashSet<T> set = newLinkedHashSet();
java.util.Collections.addAll(set, elements);
return set;
}
/**
* Creates a <em>mutable</em> {@link TreeSet}.
*
* @param <T> the generic type of the {@link TreeSet} to create.
* @return the created {@link TreeSet}.
*/
public static <T> TreeSet<T> newTreeSet() {
return new TreeSet<>();
}
/**
* Creates a <em>mutable</em> {@link TreeSet} containing the given elements.
*
* @param <T> the generic type of the {@link TreeSet} to create.
* @param elements the elements to store in the {@link TreeSet}.
* @return the created {@link TreeSet}, or {@code null} if the given array of elements is {@code null}.
*/
@SafeVarargs
public static <T> TreeSet<T> newTreeSet(T... elements) {
if (elements == null) {
return null;
}
TreeSet<T> set = newTreeSet();
java.util.Collections.addAll(set, elements);
return set;
}
/**
* Creates a <em>mutable</em> {@code HashSet} containing the reference elements not in the toRemove set.
*
* @param <T> the generic type of the {@code HashSet} to create.
* @param reference the reference elements
* @param toRemove the elements to remove from the reference sets
* @return the created {@code HashSet} containing the reference elements not in the toRemove set.
*/
public static <T> Set<T> removeAll(Set<T> reference, Set<T> toRemove) {
Set<T> result = newLinkedHashSet(reference);
result.removeAll(toRemove);
return result;
}
private Sets() {}
}
| Sets |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.