language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/volume/csi/VolumeId.java | {
"start": 1123,
"end": 1725
} | class ____ {
private final String volumeId;
public VolumeId(String volumeId) {
this.volumeId = volumeId;
}
public String getId() {
return this.volumeId;
}
@Override
public String toString() {
return this.volumeId;
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof VolumeId)) {
return false;
}
return StringUtils.equalsIgnoreCase(volumeId,
((VolumeId) obj).getId());
}
@Override
public int hashCode() {
HashCodeBuilder hc = new HashCodeBuilder();
hc.append(volumeId);
return hc.toHashCode();
}
}
| VolumeId |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AllLastSerializationTests.java | {
"start": 509,
"end": 1239
} | class ____ extends AbstractExpressionSerializationTests<AllLast> {
@Override
protected AllLast createTestInstance() {
return new AllLast(randomSource(), randomChild(), randomChild());
}
@Override
protected AllLast mutateInstance(AllLast instance) throws IOException {
Expression field = instance.field();
Expression sort = instance.sort();
if (randomBoolean()) {
field = randomValueOtherThan(field, AbstractExpressionSerializationTests::randomChild);
} else {
sort = randomValueOtherThan(sort, AbstractExpressionSerializationTests::randomChild);
}
return new AllLast(instance.source(), field, sort);
}
}
| AllLastSerializationTests |
java | apache__spark | sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/ProcedureCatalog.java | {
"start": 1062,
"end": 1462
} | interface ____ extends CatalogPlugin {
/**
* Load a procedure by {@link Identifier identifier} from the catalog.
*
* @param ident a procedure identifier
* @return the loaded unbound procedure
*/
UnboundProcedure loadProcedure(Identifier ident);
/**
* List all procedures in the specified namespace.
*
*/
Identifier[] listProcedures(String[] namespace);
}
| ProcedureCatalog |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/index/mapper/AbstractShapeGeometryFieldMapperTests.java | {
"start": 7611,
"end": 8174
} | class
____ top = encoder.decodeY(ints[0]);
double bottom = encoder.decodeY(ints[1]);
double negLeft = encoder.decodeX(ints[2]);
double negRight = encoder.decodeX(ints[3]);
double posLeft = encoder.decodeX(ints[4]);
double posRight = encoder.decodeX(ints[5]);
return new Rectangle(Math.min(negLeft, posLeft), Math.max(negRight, posRight), top, bottom);
} else if (list.size() == 4) {
// Data in order defined by Rectangle | double |
java | apache__kafka | connect/runtime/src/main/java/org/apache/kafka/connect/runtime/HerderRequest.java | {
"start": 849,
"end": 896
} | interface ____ {
void cancel();
}
| HerderRequest |
java | alibaba__nacos | config/src/main/java/com/alibaba/nacos/config/server/model/gray/AbstractGrayRule.java | {
"start": 852,
"end": 2379
} | class ____ implements GrayRule {
protected String rawGrayRuleExp;
protected int priority;
protected volatile boolean valid = true;
public AbstractGrayRule() {
}
public AbstractGrayRule(String rawGrayRuleExp, int priority) {
try {
parse(rawGrayRuleExp);
this.priority = priority;
} catch (NacosException e) {
valid = false;
}
this.rawGrayRuleExp = rawGrayRuleExp;
}
/**
* parse gray rule.
*
* @param rawGrayRule raw gray rule.
* @throws NacosException if parse failed.
* @date 2024/3/14
*/
protected abstract void parse(String rawGrayRule) throws NacosException;
/**
* match gray rule.
*
* @param labels conn labels.
* @return true if match.
* @date 2024/3/14
*/
public abstract boolean match(Map<String, String> labels);
public boolean isValid() {
return valid;
}
/**
* get type.
*
* @return gray rule type.
* @date 2024/3/14
*/
public abstract String getType();
/**
* get version.
*
* @return gray rule version.
* @date 2024/3/14
*/
public abstract String getVersion();
public String getRawGrayRuleExp() {
return rawGrayRuleExp;
}
public int getPriority() {
return priority;
}
public void setPriority(int priority) {
this.priority = priority;
}
}
| AbstractGrayRule |
java | quarkusio__quarkus | extensions/qute/deployment/src/main/java/io/quarkus/qute/deployment/devui/QuteDevUIProcessor.java | {
"start": 1331,
"end": 11787
} | class ____ {
@BuildStep(onlyIf = IsDevelopment.class)
public void pages(
EffectiveTemplatePathsBuildItem effectiveTemplatePaths,
List<CheckedTemplateBuildItem> checkedTemplates,
TemplateVariantsBuildItem variants,
TemplatesAnalysisBuildItem templatesAnalysis,
List<TemplateExtensionMethodBuildItem> templateExtensionMethods,
List<TemplateDataBuildItem> templateDatas,
List<ImplicitValueResolverBuildItem> implicitTemplateDatas,
List<TemplateGlobalBuildItem> templateGlobals,
BuildProducer<CardPageBuildItem> cardPages) {
CardPageBuildItem pageBuildItem = new CardPageBuildItem();
List<TemplatePathBuildItem> sortedTemplatePaths = effectiveTemplatePaths.getTemplatePaths().stream()
.sorted(Comparator.comparing(tp -> tp.getPath().toLowerCase())).collect(Collectors.toList());
pageBuildItem.addBuildTimeData("templates",
createTemplatesJson(sortedTemplatePaths, checkedTemplates, templatesAnalysis, variants));
List<TemplateExtensionMethodBuildItem> sortedExtensionMethods = templateExtensionMethods.stream()
.sorted(new Comparator<TemplateExtensionMethodBuildItem>() {
@Override
public int compare(TemplateExtensionMethodBuildItem m1, TemplateExtensionMethodBuildItem m2) {
DotName m1Class = m1.getMethod().declaringClass().name();
DotName m2Class = m2.getMethod().declaringClass().name();
int ret = m1Class.compareTo(m2Class);
return ret == 0 ? m1.getMethod().name().compareTo(m2.getMethod().name()) : ret;
}
}).collect(Collectors.toList());
pageBuildItem.addBuildTimeData("extensionMethods", createExtensionMethodsJson(sortedExtensionMethods));
List<TemplateDataBuildItem> sortedTemplateData = new ArrayList<>(templateDatas);
Set<DotName> explicitTargets = new HashSet<>();
for (TemplateDataBuildItem td : templateDatas) {
explicitTargets.add(td.getTargetClass().name());
}
for (ImplicitValueResolverBuildItem itd : implicitTemplateDatas) {
if (!explicitTargets.contains(itd.getClazz().name())) {
sortedTemplateData.add(new TemplateDataBuildItem(itd.getTemplateData(), itd.getClazz()));
}
}
sortedTemplateData = sortedTemplateData.stream()
.sorted(Comparator.comparing(td -> td.getTargetClass().name())).collect(Collectors.toList());
if (!sortedTemplateData.isEmpty()) {
pageBuildItem.addBuildTimeData("templateData", createTemplateDataJson(sortedTemplateData));
}
List<TemplateGlobalBuildItem> sortedTemplateGlobals = templateGlobals.stream()
.sorted(Comparator.comparing(tg -> tg.getName().toLowerCase())).collect(Collectors.toList());
if (!sortedTemplateGlobals.isEmpty()) {
pageBuildItem.addBuildTimeData("templateGlobals", createTemplateGlobalsJson(sortedTemplateGlobals));
}
pageBuildItem.addPage(Page.webComponentPageBuilder()
.title("Templates")
.icon("font-awesome-solid:file-code")
.componentLink("qwc-qute-templates.js")
.staticLabel(String.valueOf(sortedTemplatePaths.size())));
pageBuildItem.addPage(Page.webComponentPageBuilder()
.title("Extension Methods")
.icon("font-awesome-solid:puzzle-piece")
.componentLink("qwc-qute-extension-methods.js")
.staticLabel(String.valueOf(sortedExtensionMethods.size())));
if (!sortedTemplateData.isEmpty()) {
pageBuildItem.addPage(Page.webComponentPageBuilder()
.title("Template Data")
.icon("font-awesome-solid:database")
.componentLink("qwc-qute-template-data.js")
.staticLabel(String.valueOf(sortedTemplateData.size())));
}
if (!sortedTemplateGlobals.isEmpty()) {
pageBuildItem.addPage(Page.webComponentPageBuilder()
.title("Global Variables")
.icon("font-awesome-solid:globe")
.componentLink("qwc-qute-template-globals.js")
.staticLabel(String.valueOf(sortedTemplateGlobals.size())));
}
cardPages.produce(pageBuildItem);
}
private List<Map<String, String>> createTemplateGlobalsJson(List<TemplateGlobalBuildItem> sortedTemplateGlobals) {
List<Map<String, String>> globals = new ArrayList<>();
for (TemplateGlobalBuildItem global : sortedTemplateGlobals) {
Map<String, String> map = new HashMap<>();
map.put("name", global.getName());
map.put("target", global.getDeclaringClass() + "#"
+ (global.isField() ? global.getTarget().asField().name() : global.getTarget().asMethod().name() + "()"));
globals.add(map);
}
return globals;
}
private List<Map<String, Object>> createTemplateDataJson(List<TemplateDataBuildItem> sortedTemplateData) {
List<Map<String, Object>> data = new ArrayList<>();
for (TemplateDataBuildItem templateData : sortedTemplateData) {
Map<String, Object> map = new HashMap<>();
map.put("target", templateData.getTargetClass().name().toString());
if (templateData.hasNamespace()) {
map.put("namespace", templateData.getNamespace());
}
if (templateData.getIgnore() != null && templateData.getIgnore().length > 0) {
map.put("ignores", Arrays.toString(templateData.getIgnore()));
}
if (templateData.isProperties()) {
map.put("properties", true);
}
data.add(map);
}
return data;
}
@SuppressForbidden(reason = "Type#toString() is what we want to use here")
private List<Map<String, String>> createExtensionMethodsJson(
List<TemplateExtensionMethodBuildItem> sortedExtensionMethods) {
List<Map<String, String>> extensionMethods = new ArrayList<>();
for (TemplateExtensionMethodBuildItem templateExtensionMethod : sortedExtensionMethods) {
Map<String, String> extensionMethod = new HashMap<>();
extensionMethod.put("name", templateExtensionMethod.getMethod().declaringClass().name() + "#"
+ templateExtensionMethod.getMethod().name() + "()");
if (templateExtensionMethod.getMatchRegex() != null && !templateExtensionMethod.getMatchRegex().isEmpty()) {
extensionMethod.put("matchRegex", templateExtensionMethod.getMatchRegex());
} else if (!templateExtensionMethod.getMatchNames().isEmpty()) {
extensionMethod.put("matchNames", templateExtensionMethod.getMatchNames().toString());
} else {
extensionMethod.put("matchName", templateExtensionMethod.getMatchName());
}
if (templateExtensionMethod.hasNamespace()) {
extensionMethod.put("namespace", templateExtensionMethod.getNamespace());
} else {
extensionMethod.put("matchType", templateExtensionMethod.getMatchType().toString());
}
extensionMethods.add(extensionMethod);
}
return extensionMethods;
}
private List<Map<String, Object>> createTemplatesJson(List<TemplatePathBuildItem> sortedTemplatePaths,
List<CheckedTemplateBuildItem> checkedTemplates, TemplatesAnalysisBuildItem templatesAnalysis,
TemplateVariantsBuildItem variants) {
List<Map<String, Object>> templates = new ArrayList<>();
for (TemplatePathBuildItem templatePath : sortedTemplatePaths) {
Map<String, Object> template = new HashMap<>();
template.put("path", templatePath.getPath());
CheckedTemplateBuildItem checkedTemplate = findCheckedTemplate(getBasePath(templatePath.getPath(), variants),
checkedTemplates);
if (checkedTemplate != null) {
template.put("checkedTemplate",
checkedTemplate.getDescription());
}
TemplateAnalysis analysis = templatesAnalysis.getAnalysis().stream()
.filter(ta -> ta.path.equals(templatePath.getPath())).findFirst().orElse(null);
if (analysis != null) {
if (!analysis.fragmentIds.isEmpty()) {
List<String> fragmentIds = new ArrayList<>();
analysis.fragmentIds.forEach(fragmentIds::add);
template.put("fragmentIds", fragmentIds);
}
if (!analysis.parameterDeclarations.isEmpty()) {
List<String> paramDeclarations = new ArrayList<>();
for (ParameterDeclaration pd : analysis.parameterDeclarations) {
paramDeclarations.add(String.format("{@%s %s%s}",
pd.getTypeInfo().substring(1, pd.getTypeInfo().length() - 1), pd.getKey(),
pd.getDefaultValue() != null ? "=" + pd.getDefaultValue().toOriginalString() : ""));
}
template.put("paramDeclarations", paramDeclarations);
}
}
templates.add(template);
}
return templates;
}
private String getBasePath(String path, TemplateVariantsBuildItem variants) {
for (Entry<String, List<String>> e : variants.getVariants().entrySet()) {
if (e.getValue().contains(path)) {
return e.getKey();
}
}
return null;
}
private CheckedTemplateBuildItem findCheckedTemplate(String basePath, List<CheckedTemplateBuildItem> checkedTemplates) {
if (basePath != null) {
for (CheckedTemplateBuildItem checkedTemplate : checkedTemplates) {
if (checkedTemplate.isFragment()) {
continue;
}
if (checkedTemplate.templateId.equals(basePath)) {
return checkedTemplate;
}
}
}
return null;
}
}
| QuteDevUIProcessor |
java | grpc__grpc-java | xds/src/main/java/io/grpc/xds/SharedCallCounterMap.java | {
"start": 3383,
"end": 3509
} | class ____ {
private static final SharedCallCounterMap instance = new SharedCallCounterMap();
}
}
| SharedCallCounterMapHolder |
java | apache__avro | lang/java/avro/src/main/java/org/apache/avro/io/parsing/Symbol.java | {
"start": 9606,
"end": 10095
} | class ____ extends Symbol {
/**
* Set to <tt>true</tt> if and only if this implicit action is a trailing
* action. That is, it is an action that follows real symbol. E.g
* {@link Symbol#DEFAULT_END_ACTION}.
*/
public final boolean isTrailing;
private ImplicitAction() {
this(false);
}
private ImplicitAction(boolean isTrailing) {
super(Kind.IMPLICIT_ACTION);
this.isTrailing = isTrailing;
}
}
protected static | ImplicitAction |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/metrics/NodeManagerMetrics.java | {
"start": 1648,
"end": 6163
} | class ____ {
// CHECKSTYLE:OFF:VisibilityModifier
@Metric MutableCounterInt containersLaunched;
@Metric MutableCounterInt containersCompleted;
@Metric MutableCounterInt containersFailed;
@Metric MutableCounterInt containersKilled;
@Metric MutableCounterInt containersRolledBackOnFailure;
@Metric("# of reInitializing containers")
MutableGaugeInt containersReIniting;
@Metric("# of initializing containers")
MutableGaugeInt containersIniting;
@Metric MutableGaugeInt containersRunning;
@Metric("# of paused containers") MutableGaugeInt containersPaused;
@Metric("Current allocated memory in GB")
MutableGaugeInt allocatedGB;
@Metric("Current # of allocated containers")
MutableGaugeInt allocatedContainers;
@Metric MutableGaugeInt availableGB;
@Metric("Current allocated Virtual Cores")
MutableGaugeInt allocatedVCores;
@Metric MutableGaugeInt availableVCores;
@Metric("Container launch duration")
MutableRate containerLaunchDuration;
@Metric("Containers queued (Guaranteed)")
MutableGaugeInt containersGuaranteedQueued;
@Metric("Containers queued (Opportunistic)")
MutableGaugeInt containersOpportunisticQueued;
@Metric("# of bad local dirs")
MutableGaugeInt badLocalDirs;
@Metric("# of bad log dirs")
MutableGaugeInt badLogDirs;
@Metric("Disk utilization % on good local dirs")
MutableGaugeInt goodLocalDirsDiskUtilizationPerc;
@Metric("Disk utilization % on good log dirs")
MutableGaugeInt goodLogDirsDiskUtilizationPerc;
@Metric("Current allocated memory by opportunistic containers in GB")
MutableGaugeLong allocatedOpportunisticGB;
@Metric("Current allocated Virtual Cores by opportunistic containers")
MutableGaugeInt allocatedOpportunisticVCores;
@Metric("# of running opportunistic containers")
MutableGaugeInt runningOpportunisticContainers;
@Metric("Local cache size (public and private) before clean (Bytes)")
MutableGaugeLong cacheSizeBeforeClean;
@Metric("# of total bytes deleted from the public and private local cache")
MutableGaugeLong totalBytesDeleted;
@Metric("# of bytes deleted from the public local cache")
MutableGaugeLong publicBytesDeleted;
@Metric("# of bytes deleted from the private local cache")
MutableGaugeLong privateBytesDeleted;
@Metric("Current used physical memory by all containers in GB")
MutableGaugeInt containerUsedMemGB;
@Metric("Current used virtual memory by all containers in GB")
MutableGaugeInt containerUsedVMemGB;
@Metric("Aggregated CPU utilization of all containers")
MutableGaugeFloat containerCpuUtilization;
@Metric("Current used memory by this node in GB")
MutableGaugeInt nodeUsedMemGB;
@Metric("Current used virtual memory by this node in GB")
MutableGaugeInt nodeUsedVMemGB;
@Metric("Current CPU utilization")
MutableGaugeFloat nodeCpuUtilization;
@Metric("Current GPU utilization")
MutableGaugeFloat nodeGpuUtilization;
@Metric("Current running apps")
MutableGaugeInt applicationsRunning;
@Metric("Missed localization requests in bytes")
MutableCounterLong localizedCacheMissBytes;
@Metric("Cached localization requests in bytes")
MutableCounterLong localizedCacheHitBytes;
@Metric("Localization cache hit ratio (bytes)")
MutableGaugeInt localizedCacheHitBytesRatio;
@Metric("Missed localization requests (files)")
MutableCounterLong localizedCacheMissFiles;
@Metric("Cached localization requests (files)")
MutableCounterLong localizedCacheHitFiles;
@Metric("Localization cache hit ratio (files)")
MutableGaugeInt localizedCacheHitFilesRatio;
@Metric("Container localization time in milliseconds")
MutableRate localizationDurationMillis;
@Metric("ContainerMonitor time cost in milliseconds")
MutableGaugeLong containersMonitorCostTime;
// CHECKSTYLE:ON:VisibilityModifier
private JvmMetrics jvmMetrics = null;
private long allocatedMB;
private long availableMB;
private long allocatedOpportunisticMB;
private NodeManagerMetrics(JvmMetrics jvmMetrics) {
this.jvmMetrics = jvmMetrics;
}
public static NodeManagerMetrics create() {
return create(DefaultMetricsSystem.instance());
}
private static NodeManagerMetrics create(MetricsSystem ms) {
JvmMetrics jm = JvmMetrics.initSingleton("NodeManager", null);
return ms.register(new NodeManagerMetrics(jm));
}
public JvmMetrics getJvmMetrics() {
return jvmMetrics;
}
// Potential instrumentation | NodeManagerMetrics |
java | apache__rocketmq | controller/src/main/java/org/apache/rocketmq/controller/elect/ElectPolicy.java | {
"start": 879,
"end": 1516
} | interface ____ {
/**
* elect a master
*
* @param clusterName the broker group belongs to
* @param brokerName the broker group name
* @param syncStateBrokers all broker replicas in syncStateSet
* @param allReplicaBrokers all broker replicas
* @param oldMaster old master
* @param brokerId broker id(can be used as prefer or assigned in some elect policy)
* @return new master's broker id
*/
Long elect(String clusterName, String brokerName, Set<Long> syncStateBrokers, Set<Long> allReplicaBrokers,
Long oldMaster, Long brokerId);
}
| ElectPolicy |
java | elastic__elasticsearch | modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java | {
"start": 3181,
"end": 15231
} | class ____ extends TcpTransport {
private static final Logger logger = LogManager.getLogger(Netty4Transport.class);
public static final ChannelOption<Integer> OPTION_TCP_KEEP_IDLE = NioChannelOption.of(NetUtils.getTcpKeepIdleSocketOption());
public static final ChannelOption<Integer> OPTION_TCP_KEEP_INTERVAL = NioChannelOption.of(NetUtils.getTcpKeepIntervalSocketOption());
public static final ChannelOption<Integer> OPTION_TCP_KEEP_COUNT = NioChannelOption.of(NetUtils.getTcpKeepCountSocketOption());
private final SharedGroupFactory sharedGroupFactory;
private final RecvByteBufAllocator recvByteBufAllocator;
private final ByteSizeValue receivePredictorMin;
private final ByteSizeValue receivePredictorMax;
private final Map<String, ServerBootstrap> serverBootstraps = newConcurrentMap();
private volatile Bootstrap clientBootstrap;
private volatile SharedGroupFactory.SharedGroup sharedGroup;
protected final boolean remoteClusterPortEnabled;
private final ThreadWatchdog threadWatchdog;
public Netty4Transport(
Settings settings,
TransportVersion version,
ThreadPool threadPool,
NetworkService networkService,
PageCacheRecycler pageCacheRecycler,
NamedWriteableRegistry namedWriteableRegistry,
CircuitBreakerService circuitBreakerService,
SharedGroupFactory sharedGroupFactory
) {
super(settings, version, threadPool, pageCacheRecycler, circuitBreakerService, namedWriteableRegistry, networkService);
Netty4Utils.setAvailableProcessors(EsExecutors.allocatedProcessors(settings));
NettyAllocator.logAllocatorDescriptionIfNeeded();
this.sharedGroupFactory = sharedGroupFactory;
this.threadWatchdog = networkService.getThreadWatchdog();
// See AdaptiveReceiveBufferSizePredictor#DEFAULT_XXX for default values in netty..., we can use higher ones for us, even fixed one
this.receivePredictorMin = Netty4Plugin.NETTY_RECEIVE_PREDICTOR_MIN.get(settings);
this.receivePredictorMax = Netty4Plugin.NETTY_RECEIVE_PREDICTOR_MAX.get(settings);
if (receivePredictorMax.getBytes() == receivePredictorMin.getBytes()) {
recvByteBufAllocator = new FixedRecvByteBufAllocator((int) receivePredictorMax.getBytes());
} else {
recvByteBufAllocator = new AdaptiveRecvByteBufAllocator(
(int) receivePredictorMin.getBytes(),
(int) receivePredictorMin.getBytes(),
(int) receivePredictorMax.getBytes()
);
}
this.remoteClusterPortEnabled = REMOTE_CLUSTER_SERVER_ENABLED.get(settings);
}
@Override
protected Recycler<BytesRef> createRecycler(Settings settings, PageCacheRecycler pageCacheRecycler) {
return Netty4Utils.createRecycler(settings);
}
@Override
protected void doStart() {
boolean success = false;
try {
sharedGroup = sharedGroupFactory.getTransportGroup();
clientBootstrap = createClientBootstrap(sharedGroup);
if (NetworkService.NETWORK_SERVER.get(settings)) {
for (ProfileSettings profileSettings : profileSettingsSet) {
createServerBootstrap(profileSettings, sharedGroup);
bindServer(profileSettings);
}
}
threadWatchdog.run(settings, threadPool, lifecycle);
success = true;
} finally {
if (success == false) {
doStop();
}
}
}
private Bootstrap createClientBootstrap(SharedGroupFactory.SharedGroup sharedGroupForBootstrap) {
final Bootstrap bootstrap = new Bootstrap();
bootstrap.group(sharedGroupForBootstrap.getLowLevelGroup());
// NettyAllocator will return the channel type designed to work with the configured allocator
assert Netty4NioSocketChannel.class.isAssignableFrom(NettyAllocator.getChannelType());
bootstrap.channel(NettyAllocator.getChannelType());
bootstrap.option(ChannelOption.ALLOCATOR, NettyAllocator.getAllocator());
// The TCP options are re-configured for client connections to RCS remote clusters
// If how options are configured is changed here, please also update RemoteClusterClientBootstrapOptions#configure
// which is used inside SecurityNetty4Transport#getClientBootstrap
bootstrap.option(ChannelOption.TCP_NODELAY, TransportSettings.TCP_NO_DELAY.get(settings));
bootstrap.option(ChannelOption.SO_KEEPALIVE, TransportSettings.TCP_KEEP_ALIVE.get(settings));
if (TransportSettings.TCP_KEEP_ALIVE.get(settings)) {
// Note that Netty logs a warning if it can't set the option
if (TransportSettings.TCP_KEEP_IDLE.get(settings) >= 0) {
bootstrap.option(OPTION_TCP_KEEP_IDLE, TransportSettings.TCP_KEEP_IDLE.get(settings));
}
if (TransportSettings.TCP_KEEP_INTERVAL.get(settings) >= 0) {
bootstrap.option(OPTION_TCP_KEEP_INTERVAL, TransportSettings.TCP_KEEP_INTERVAL.get(settings));
}
if (TransportSettings.TCP_KEEP_COUNT.get(settings) >= 0) {
bootstrap.option(OPTION_TCP_KEEP_COUNT, TransportSettings.TCP_KEEP_COUNT.get(settings));
}
}
final ByteSizeValue tcpSendBufferSize = TransportSettings.TCP_SEND_BUFFER_SIZE.get(settings);
if (tcpSendBufferSize.getBytes() > 0) {
bootstrap.option(ChannelOption.SO_SNDBUF, Math.toIntExact(tcpSendBufferSize.getBytes()));
}
final ByteSizeValue tcpReceiveBufferSize = TransportSettings.TCP_RECEIVE_BUFFER_SIZE.get(settings);
if (tcpReceiveBufferSize.getBytes() > 0) {
bootstrap.option(ChannelOption.SO_RCVBUF, Math.toIntExact(tcpReceiveBufferSize.getBytes()));
}
bootstrap.option(ChannelOption.RCVBUF_ALLOCATOR, recvByteBufAllocator);
final boolean reuseAddress = TransportSettings.TCP_REUSE_ADDRESS.get(settings);
bootstrap.option(ChannelOption.SO_REUSEADDR, reuseAddress);
return bootstrap;
}
private void createServerBootstrap(ProfileSettings profileSettings, SharedGroupFactory.SharedGroup sharedGroupForServerBootstrap) {
String name = profileSettings.profileName;
if (logger.isDebugEnabled()) {
logger.debug(
"using profile[{}], worker_count[{}], port[{}], bind_host[{}], publish_host[{}], receive_predictor[{}->{}]",
name,
sharedGroupFactory.getTransportWorkerCount(),
profileSettings.portOrRange,
profileSettings.bindHosts,
profileSettings.publishHosts,
receivePredictorMin,
receivePredictorMax
);
}
final ServerBootstrap serverBootstrap = new ServerBootstrap();
serverBootstrap.group(sharedGroupForServerBootstrap.getLowLevelGroup());
// NettyAllocator will return the channel type designed to work with the configuredAllocator
serverBootstrap.channel(NettyAllocator.getServerChannelType());
// Set the allocators for both the server channel and the child channels created
serverBootstrap.option(ChannelOption.ALLOCATOR, NettyAllocator.getAllocator());
serverBootstrap.childOption(ChannelOption.ALLOCATOR, NettyAllocator.getAllocator());
serverBootstrap.childHandler(getServerChannelInitializer(name));
serverBootstrap.handler(new ServerChannelExceptionHandler());
serverBootstrap.childOption(ChannelOption.TCP_NODELAY, profileSettings.tcpNoDelay);
serverBootstrap.childOption(ChannelOption.SO_KEEPALIVE, profileSettings.tcpKeepAlive);
if (profileSettings.tcpKeepAlive) {
// Note that Netty logs a warning if it can't set the option
if (profileSettings.tcpKeepIdle >= 0) {
serverBootstrap.childOption(NioChannelOption.of(NetUtils.getTcpKeepIdleSocketOption()), profileSettings.tcpKeepIdle);
}
if (profileSettings.tcpKeepInterval >= 0) {
serverBootstrap.childOption(
NioChannelOption.of(NetUtils.getTcpKeepIntervalSocketOption()),
profileSettings.tcpKeepInterval
);
}
if (profileSettings.tcpKeepCount >= 0) {
serverBootstrap.childOption(NioChannelOption.of(NetUtils.getTcpKeepCountSocketOption()), profileSettings.tcpKeepCount);
}
}
if (profileSettings.sendBufferSize.getBytes() != -1) {
serverBootstrap.childOption(ChannelOption.SO_SNDBUF, Math.toIntExact(profileSettings.sendBufferSize.getBytes()));
}
if (profileSettings.receiveBufferSize.getBytes() != -1) {
serverBootstrap.childOption(ChannelOption.SO_RCVBUF, Math.toIntExact(profileSettings.receiveBufferSize.bytesAsInt()));
}
serverBootstrap.option(ChannelOption.RCVBUF_ALLOCATOR, recvByteBufAllocator);
serverBootstrap.childOption(ChannelOption.RCVBUF_ALLOCATOR, recvByteBufAllocator);
serverBootstrap.option(ChannelOption.SO_REUSEADDR, profileSettings.reuseAddress);
serverBootstrap.childOption(ChannelOption.SO_REUSEADDR, profileSettings.reuseAddress);
serverBootstrap.validate();
serverBootstraps.put(name, serverBootstrap);
}
protected ChannelHandler getServerChannelInitializer(String name) {
return new ServerChannelInitializer(name);
}
protected ChannelHandler getClientChannelInitializer(DiscoveryNode node, ConnectionProfile connectionProfile) {
return new ClientChannelInitializer();
}
static final AttributeKey<Netty4TcpChannel> CHANNEL_KEY = AttributeKey.newInstance("es-channel");
static final AttributeKey<Netty4TcpServerChannel> SERVER_CHANNEL_KEY = AttributeKey.newInstance("es-server-channel");
@Override
protected Netty4TcpChannel initiateChannel(DiscoveryNode node, ConnectionProfile connectionProfile) throws IOException {
InetSocketAddress address = node.getAddress().address();
Bootstrap bootstrapWithHandler = getClientBootstrap(connectionProfile);
bootstrapWithHandler.handler(getClientChannelInitializer(node, connectionProfile));
bootstrapWithHandler.remoteAddress(address);
ChannelFuture connectFuture = bootstrapWithHandler.connect();
Channel channel = connectFuture.channel();
if (channel == null) {
ExceptionsHelper.maybeDieOnAnotherThread(connectFuture.cause());
throw new IOException(connectFuture.cause());
}
Netty4TcpChannel nettyChannel = new Netty4TcpChannel(
channel,
false,
connectionProfile.getTransportProfile(),
rstOnClose,
connectFuture
);
channel.attr(CHANNEL_KEY).set(nettyChannel);
return nettyChannel;
}
protected Bootstrap getClientBootstrap(ConnectionProfile connectionProfile) {
return clientBootstrap.clone();
}
@Override
protected Netty4TcpServerChannel bind(String name, InetSocketAddress address) {
Channel channel = serverBootstraps.get(name).bind(address).syncUninterruptibly().channel();
Netty4TcpServerChannel esChannel = new Netty4TcpServerChannel(channel);
channel.attr(SERVER_CHANNEL_KEY).set(esChannel);
return esChannel;
}
@Override
@SuppressForbidden(reason = "debug")
protected void stopInternal() {
Releasables.close(() -> {
if (sharedGroup != null) {
sharedGroup.shutdown();
}
}, serverBootstraps::clear, () -> clientBootstrap = null);
}
static Exception exceptionFromThrowable(Throwable cause) {
if (cause instanceof Error) {
return new Exception(cause);
} else {
return (Exception) cause;
}
}
protected | Netty4Transport |
java | quarkusio__quarkus | independent-projects/tools/codestarts/src/main/java/io/quarkus/devtools/codestarts/core/CodestartSpec.java | {
"start": 519,
"end": 2803
} | class ____ {
private final String name;
private final boolean isPreselected;
private final String ref;
private final CodestartType type;
private final boolean isFallback;
private final Set<String> tags;
private final Map<String, String> metadata;
private final Map<String, String> outputStrategy;
private final Map<String, LanguageSpec> languagesSpec;
@JsonCreator
public CodestartSpec(@JsonProperty(value = "name", required = true) String name,
@JsonProperty(value = "ref") String ref,
@JsonProperty(value = "type") CodestartType type,
@JsonProperty("fallback") boolean isFallback,
@JsonProperty("preselected") boolean isPreselected,
@JsonProperty("tags") Set<String> tags,
@JsonProperty("metadata") Map<String, String> metadata,
@JsonProperty("output-strategy") Map<String, String> outputStrategy,
@JsonProperty("language") Map<String, LanguageSpec> languagesSpec) {
this.name = requireNonNull(name, "name is required");
this.tags = tags != null ? tags : Collections.emptySet();
this.ref = ref != null ? ref : name;
this.type = type != null ? type : CodestartType.CODE;
this.isFallback = isFallback;
this.isPreselected = isPreselected;
this.outputStrategy = outputStrategy != null ? outputStrategy : Collections.emptyMap();
this.metadata = metadata != null ? metadata : Collections.emptyMap();
this.languagesSpec = languagesSpec != null ? languagesSpec : Collections.emptyMap();
}
public String getName() {
return name;
}
public String getRef() {
return ref;
}
public Set<String> getTags() {
return tags;
}
public CodestartType getType() {
return type;
}
public boolean isFallback() {
return isFallback;
}
public boolean isPreselected() {
return isPreselected;
}
public Map<String, String> getMetadata() {
return metadata;
}
public Map<String, String> getOutputStrategy() {
return outputStrategy;
}
public Map<String, LanguageSpec> getLanguagesSpec() {
return languagesSpec;
}
public static final | CodestartSpec |
java | apache__maven | impl/maven-core/src/test/java/org/apache/maven/configuration/internal/CompositeBeanHelperPerformanceTest.java | {
"start": 3306,
"end": 10834
} | class ____ {
private ConverterLookup converterLookup;
private ExpressionEvaluator evaluator;
private ConfigurationListener listener;
private CompositeBeanHelper originalHelper;
private EnhancedCompositeBeanHelper optimizedHelper;
@Setup(Level.Trial)
@BeforeEach
public void setUp() throws ExpressionEvaluationException {
converterLookup = new DefaultConverterLookup();
evaluator = mock(ExpressionEvaluator.class);
listener = mock(ConfigurationListener.class);
when(evaluator.evaluate(anyString())).thenReturn("testValue");
for (int i = 0; i < 10; i++) {
when(evaluator.evaluate(Integer.toString(i))).thenReturn(i);
}
when(evaluator.evaluate("123")).thenReturn(123);
when(evaluator.evaluate("456")).thenReturn(456);
when(evaluator.evaluate("true")).thenReturn(true);
originalHelper = new CompositeBeanHelper(converterLookup, getClass().getClassLoader(), evaluator, listener);
optimizedHelper =
new EnhancedCompositeBeanHelper(converterLookup, getClass().getClassLoader(), evaluator, listener);
}
@TearDown(Level.Trial)
@AfterEach
public void tearDown() {
// Clear caches between trials (10-second periods) to allow cache benefits within each trial
EnhancedCompositeBeanHelper.clearCaches();
}
@Benchmark
public void benchmarkOriginalHelper() throws Exception {
RealisticTestBean bean = new RealisticTestBean();
// Set multiple properties to simulate real mojo configuration
// Use direct method calls instead of reflection for fair comparison
PlexusConfiguration nameConfig = new XmlPlexusConfiguration("name");
nameConfig.setValue("testValue");
originalHelper.setProperty(bean, "name", String.class, nameConfig);
PlexusConfiguration countConfig = new XmlPlexusConfiguration("count");
countConfig.setValue("123");
originalHelper.setProperty(bean, "count", Integer.class, countConfig);
PlexusConfiguration enabledConfig = new XmlPlexusConfiguration("enabled");
enabledConfig.setValue("true");
originalHelper.setProperty(bean, "enabled", Boolean.class, enabledConfig);
PlexusConfiguration descConfig = new XmlPlexusConfiguration("description");
descConfig.setValue("testValue");
originalHelper.setProperty(bean, "description", String.class, descConfig);
PlexusConfiguration timeoutConfig = new XmlPlexusConfiguration("timeout");
timeoutConfig.setValue("123");
originalHelper.setProperty(bean, "timeout", Long.class, timeoutConfig);
}
@Benchmark
public void benchmarkOptimizedHelper() throws Exception {
RealisticTestBean bean = new RealisticTestBean();
// Set multiple properties to simulate real mojo configuration
PlexusConfiguration nameConfig = new XmlPlexusConfiguration("name");
nameConfig.setValue("testValue");
optimizedHelper.setProperty(bean, "name", String.class, nameConfig);
PlexusConfiguration countConfig = new XmlPlexusConfiguration("count");
countConfig.setValue("123");
optimizedHelper.setProperty(bean, "count", Integer.class, countConfig);
PlexusConfiguration enabledConfig = new XmlPlexusConfiguration("enabled");
enabledConfig.setValue("true");
optimizedHelper.setProperty(bean, "enabled", Boolean.class, enabledConfig);
PlexusConfiguration descConfig = new XmlPlexusConfiguration("description");
descConfig.setValue("testValue");
optimizedHelper.setProperty(bean, "description", String.class, descConfig);
PlexusConfiguration timeoutConfig = new XmlPlexusConfiguration("timeout");
timeoutConfig.setValue("123");
optimizedHelper.setProperty(bean, "timeout", Long.class, timeoutConfig);
}
/**
* Benchmark that tests multiple property configurations in a single operation.
* This simulates a more realistic scenario where multiple properties are set on a bean.
*/
@Benchmark
public void benchmarkOriginalHelperMultipleProperties() throws Exception {
RealisticTestBean bean = new RealisticTestBean();
// Set multiple properties in one benchmark iteration
PlexusConfiguration config6 = new XmlPlexusConfiguration("name");
config6.setValue("testValue");
originalHelper.setProperty(bean, "name", String.class, config6);
PlexusConfiguration config5 = new XmlPlexusConfiguration("count");
config5.setValue("123");
originalHelper.setProperty(bean, "count", Integer.class, config5);
PlexusConfiguration config4 = new XmlPlexusConfiguration("enabled");
config4.setValue("true");
originalHelper.setProperty(bean, "enabled", Boolean.class, config4);
PlexusConfiguration config3 = new XmlPlexusConfiguration("description");
config3.setValue("testValue");
originalHelper.setProperty(bean, "description", String.class, config3);
PlexusConfiguration config2 = new XmlPlexusConfiguration("timeout");
config2.setValue("123");
originalHelper.setProperty(bean, "timeout", Long.class, config2);
// Repeat to test caching
PlexusConfiguration config1 = new XmlPlexusConfiguration("name");
config1.setValue("testValue2");
originalHelper.setProperty(bean, "name", String.class, config1);
PlexusConfiguration config = new XmlPlexusConfiguration("count");
config.setValue("456");
originalHelper.setProperty(bean, "count", Integer.class, config);
}
@Benchmark
public void benchmarkOptimizedHelperMultipleProperties() throws Exception {
RealisticTestBean bean = new RealisticTestBean();
// Set multiple properties in one benchmark iteration
PlexusConfiguration nameConfig = new XmlPlexusConfiguration("name");
nameConfig.setValue("testValue");
optimizedHelper.setProperty(bean, "name", String.class, nameConfig);
PlexusConfiguration countConfig = new XmlPlexusConfiguration("count");
countConfig.setValue("123");
optimizedHelper.setProperty(bean, "count", Integer.class, countConfig);
PlexusConfiguration enabledConfig = new XmlPlexusConfiguration("enabled");
enabledConfig.setValue("true");
optimizedHelper.setProperty(bean, "enabled", Boolean.class, enabledConfig);
PlexusConfiguration descConfig = new XmlPlexusConfiguration("description");
descConfig.setValue("testValue");
optimizedHelper.setProperty(bean, "description", String.class, descConfig);
PlexusConfiguration timeoutConfig = new XmlPlexusConfiguration("timeout");
timeoutConfig.setValue("123");
optimizedHelper.setProperty(bean, "timeout", Long.class, timeoutConfig);
// Repeat to test caching benefits
nameConfig.setValue("testValue2");
optimizedHelper.setProperty(bean, "name", String.class, nameConfig);
countConfig.setValue("456");
optimizedHelper.setProperty(bean, "count", Integer.class, countConfig);
}
/**
* Benchmark that tests cache benefits by repeatedly setting properties on the same class.
* This better demonstrates the caching improvements.
*/
@Benchmark
public void benchmarkOriginalHelperRepeatedOperations() throws Exception {
// Test cache benefits by using same | CompositeBeanHelperPerformanceTest |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/HazelcastSetComponentBuilderFactory.java | {
"start": 6792,
"end": 8016
} | class ____
extends AbstractComponentBuilder<HazelcastSetComponent>
implements HazelcastSetComponentBuilder {
@Override
protected HazelcastSetComponent buildConcreteComponent() {
return new HazelcastSetComponent();
}
@Override
protected boolean setPropertyOnComponent(
Component component,
String name,
Object value) {
switch (name) {
case "bridgeErrorHandler": ((HazelcastSetComponent) component).setBridgeErrorHandler((boolean) value); return true;
case "lazyStartProducer": ((HazelcastSetComponent) component).setLazyStartProducer((boolean) value); return true;
case "autowiredEnabled": ((HazelcastSetComponent) component).setAutowiredEnabled((boolean) value); return true;
case "hazelcastInstance": ((HazelcastSetComponent) component).setHazelcastInstance((com.hazelcast.core.HazelcastInstance) value); return true;
case "hazelcastMode": ((HazelcastSetComponent) component).setHazelcastMode((java.lang.String) value); return true;
default: return false;
}
}
}
} | HazelcastSetComponentBuilderImpl |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/JobStatusListener.java | {
"start": 1018,
"end": 1793
} | interface ____ {
/**
* This method is called whenever the status of the job changes.
*
* @param jobId The ID of the job.
* @param newJobStatus The status the job switched to.
* @param timestamp The timestamp when the status transition occurred.
*/
void jobStatusChanges(JobID jobId, JobStatus newJobStatus, long timestamp);
static JobStatusListener combine(JobStatusListener... jobStatusListeners) {
return (jobId, newJobStatus, timestamp) -> {
for (JobStatusListener jobStatusListener : jobStatusListeners) {
if (jobStatusListener != null) {
jobStatusListener.jobStatusChanges(jobId, newJobStatus, timestamp);
}
}
};
}
}
| JobStatusListener |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/jaxb/mapping/internal/FetchTypeMarshalling.java | {
"start": 270,
"end": 519
} | class ____ {
public static FetchType fromXml(String name) {
return name == null ? null : FetchType.valueOf( name );
}
public static String toXml(FetchType fetchType) {
return fetchType == null ? null : fetchType.name();
}
}
| FetchTypeMarshalling |
java | quarkusio__quarkus | extensions/vertx-http/deployment/src/test/java/io/quarkus/vertx/http/security/FluentApiMTLSAuthenticationOnRequestTest.java | {
"start": 855,
"end": 3004
} | class ____ {
@RegisterExtension
static QuarkusUnitTest test = new QuarkusUnitTest().setArchiveProducer(() -> ShrinkWrap
.create(JavaArchive.class)
.addClasses(AuthMechanismConfig.class, PathHandler.class)
.addAsResource(new StringAsset("""
quarkus.tls.cert-1.key-store.p12.path=server-keystore.p12
quarkus.tls.cert-1.key-store.p12.password=secret
quarkus.tls.cert-1.trust-store.p12.path=server-truststore.p12
quarkus.tls.cert-1.trust-store.p12.password=secret
quarkus.http.tls-configuration-name=cert-1
"""), "application.properties")
.addAsResource(new File("target/certs/mtls-test-keystore.p12"), "server-keystore.p12")
.addAsResource(new File("target/certs/mtls-test-server-truststore.p12"), "server-truststore.p12"));
@TestHTTPResource(value = "/mtls", tls = true)
URL url;
@TestHTTPResource(value = "/public", tls = true)
URL publicTlsPath;
@TestHTTPResource(value = "/public")
URL publicPath;
@Test
public void testClientAuthEnforcedWhenAuthRequired() {
RestAssured.given()
.keyStore("target/certs/mtls-test-client-keystore.p12", "secret")
.trustStore("target/certs/mtls-test-client-truststore.p12", "secret")
.get(url).then().statusCode(200).body(is("CN=localhost:/mtls"));
RestAssured.given()
.trustStore("target/certs/mtls-test-client-truststore.p12", "secret")
.get(url).then().statusCode(401);
}
@Test
void testPublicPathDoesNotRequireClientAuth() {
RestAssured.given()
.trustStore("target/certs/mtls-test-client-truststore.p12", "secret")
.get(publicTlsPath)
.then().statusCode(200).body(is(":/public"));
}
@Test
void testInsecureRequestsAllowed() {
RestAssured.given()
.get(publicPath)
.then().statusCode(200).body(is(":/public"));
}
public static | FluentApiMTLSAuthenticationOnRequestTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/EntityTest.java | {
"start": 1898,
"end": 12262
} | class ____ {
private DateFormat df = SimpleDateFormat.getDateTimeInstance( DateFormat.LONG, DateFormat.LONG );
@Test
public void testLoad(DomainModelScope domainModelScope, SessionFactoryScope sessionFactoryScope) throws Exception {
//put an object in DB
assertEquals( "Flight", domainModelScope.getDomainModel().getEntityBinding( Flight.class.getName() ).getTable().getName() );
sessionFactoryScope.inTransaction(
session -> {
Flight firstOne = new Flight();
firstOne.setId( Long.valueOf( 1 ) );
firstOne.setName( "AF3202" );
firstOne.setDuration( new Long( 1000000 ) );
firstOne.setDurationInSec( 2000 );
session.persist( firstOne );
session.flush();
}
);
//read it
sessionFactoryScope.inTransaction(
session -> {
Flight firstOne = session.get( Flight.class, Long.valueOf( 1 ) );
assertNotNull( firstOne );
assertEquals( Long.valueOf( 1 ), firstOne.getId() );
assertEquals( "AF3202", firstOne.getName() );
assertEquals( Long.valueOf( 1000000 ), firstOne.getDuration() );
assertFalse( 2000l == firstOne.getDurationInSec(), "Transient is not working" );
}
);
}
@Test
public void testColumn(SessionFactoryScope scope) {
//put an object in DB
scope.inTransaction(
session -> {
Flight firstOne = new Flight();
firstOne.setId( Long.valueOf( 1 ) );
firstOne.setName( "AF3202" );
firstOne.setDuration( Long.valueOf( 1000000 ) );
firstOne.setDurationInSec( 2000 );
session.persist( firstOne );
session.flush();
}
);
scope.inSession(
session -> {
Transaction tx = session.beginTransaction();
Flight firstOne = new Flight();
firstOne.setId( Long.valueOf( 1 ) );
firstOne.setName( null );
try {
session.persist( firstOne );
tx.commit();
fail( "Name column should be not null" );
}
catch (HibernateException e) {
//fine
}
}
);
//insert an object and check that name is not updatable
scope.inTransaction(
session -> {
Flight firstOne = new Flight();
firstOne.setId( Long.valueOf( 1 ) );
firstOne.setName( "AF3202" );
firstOne.setTriggeredData( "should not be insertable" );
}
);
scope.inTransaction(
session -> {
Flight firstOne = session.get( Flight.class, Long.valueOf( 1 ) );
assertNotNull( firstOne );
assertEquals( Long.valueOf( 1 ), firstOne.getId() );
assertEquals( "AF3202", firstOne.getName() );
assertFalse( "should not be insertable".equals( firstOne.getTriggeredData() ) );
firstOne.setName( "BA1234" );
firstOne.setTriggeredData( "should not be updatable" );
}
);
scope.inTransaction(
session -> {
Flight firstOne = session.get( Flight.class, Long.valueOf( 1 ) );
assertNotNull( firstOne );
assertEquals( Long.valueOf( 1 ), firstOne.getId() );
assertEquals( "AF3202", firstOne.getName() );
assertFalse( "should not be updatable".equals( firstOne.getTriggeredData() ) );
}
);
}
@Test
public void testColumnUnique(SessionFactoryScope scope) {
scope.inSession(
session -> {
Transaction tx = session.beginTransaction();
Sky sky = new Sky();
sky.id = Long.valueOf( 2 );
sky.color = "blue";
sky.day = "monday";
sky.month = "January";
Sky sameSky = new Sky();
sameSky.id = Long.valueOf( 3 );
sameSky.color = "blue";
sky.day = "tuesday";
sky.month = "January";
try {
session.persist( sky );
session.flush();
session.persist( sameSky );
tx.commit();
fail( "unique constraints not respected" );
}
catch (HibernateException e) {
//success
}
finally {
if ( tx != null ) {
tx.rollback();
}
}
}
);
}
@Test
public void testUniqueConstraint(SessionFactoryScope scope) {
int id = 5;
Sky sky = new Sky();
sky.id = Long.valueOf( id++ );
sky.color = "green";
sky.day = "monday";
sky.month = "March";
Sky otherSky = new Sky();
otherSky.id = Long.valueOf( id++ );
otherSky.color = "red";
otherSky.day = "friday";
otherSky.month = "March";
Sky sameSky = new Sky();
sameSky.id = Long.valueOf( id++ );
sameSky.color = "green";
sameSky.day = "monday";
sameSky.month = "March";
scope.inTransaction(
session -> {
session.persist( sky );
session.flush();
session.persist( otherSky );
}
);
scope.inSession(
session -> {
Transaction tx = session.beginTransaction();
try {
session.persist( sameSky );
tx.commit();
fail( "unique constraints not respected" );
}
catch (PersistenceException e) {
//success
if ( tx != null ) {
tx.rollback();
}
}
}
);
}
@Test
public void testVersion(SessionFactoryScope scope) {
// put an object in DB
scope.inTransaction(
session -> {
Flight firstOne = new Flight();
firstOne.setId( Long.valueOf( 2 ) );
firstOne.setName( "AF3202" );
firstOne.setDuration( Long.valueOf( 500 ) );
session.persist( firstOne );
session.flush();
}
);
//read it
Flight firstOne = scope.fromTransaction(
session -> session.get( Flight.class, Long.valueOf( 2 ) )
);
//read it again
Flight concurrentOne = scope.fromTransaction(
session -> {
Flight _concurrentOne = session.get( Flight.class, Long.valueOf( 2 ) );
_concurrentOne.setDuration( Long.valueOf( 1000 ) );
return session.merge( _concurrentOne );
}
);
assertFalse( firstOne == concurrentOne );
assertFalse( firstOne.getVersion().equals( concurrentOne.getVersion() ) );
//reattach the first one
scope.inSession(
session -> {
Transaction tx = session.beginTransaction();
firstOne.setName( "Second access" );
try {
session.merge( firstOne );
tx.commit();
fail( "Optimistic locking should work" );
}
catch (OptimisticLockException expected) {
if ( expected.getCause() instanceof StaleStateException ) {
//expected
}
else {
fail( "StaleStateException expected but is " + expected.getCause() );
}
}
finally {
if ( tx != null ) {
tx.rollback();
}
}
}
);
}
@Test
public void testFieldAccess(SessionFactoryScope scope) {
final Sky sky = new Sky();
sky.id = Long.valueOf( 1 );
sky.color = "black";
sky.area = "Paris";
sky.day = "23";
sky.month = "1";
scope.inTransaction(
session -> session.persist( sky )
);
sky.area = "London";
scope.inTransaction(
session -> {
Sky _sky = session.get( Sky.class, sky.id );
assertNotNull( _sky );
assertEquals( "black", _sky.color );
assertFalse( "Paris".equals( _sky.area ) );
}
);
}
@Test
public void testEntityName(DomainModelScope domainModelScope, SessionFactoryScope sessionFactoryScope) {
assertEquals( "Corporation", domainModelScope.getDomainModel().getEntityBinding( Company.class.getName() ).getTable().getName() );
sessionFactoryScope.inTransaction(
session -> {
Company comp = new Company();
session.persist( comp );
comp.setName( "JBoss Inc" );
}
);
sessionFactoryScope.inTransaction(
session -> {
List result = session.createQuery( "from Corporation" ).list();
assertNotNull( result );
assertEquals( 1, result.size() );
}
);
}
@Test
public void testNonGetter(SessionFactoryScope scope) {
Flight airFrance = new Flight();
airFrance.setId( Long.valueOf( 747 ) );
airFrance.setName( "Paris-Amsterdam" );
airFrance.setDuration( Long.valueOf( 10 ) );
airFrance.setFactor( 25 );
scope.inTransaction(
session -> session.persist( airFrance )
);
scope.inTransaction(
session -> {
Flight _airFrance = session.get( Flight.class, airFrance.getId() );
assertNotNull( _airFrance );
assertEquals( Long.valueOf( 10 ), _airFrance.getDuration() );
assertFalse( 25 == _airFrance.getFactor( false ) );
session.remove( _airFrance );
}
);
}
@Test
@SkipForDialect(dialectClass = OracleDialect.class, majorVersion = 10, reason = "oracle12c returns time in getDate. For now, skip.")
public void testTemporalType(SessionFactoryScope scope) {
final ZoneId zoneId = ( scope.getSessionFactory().getJdbcServices().getDialect() instanceof MySQLDialect ) ? ZoneId.of( "UTC")
: ZoneId.systemDefault();
Flight airFrance = new Flight();
airFrance.setId( Long.valueOf( 747 ) );
airFrance.setName( "Paris-Amsterdam" );
airFrance.setDuration( Long.valueOf( 10 ) );
airFrance.setDepartureDate( Date.from(LocalDate.of( 2005, 06, 21 ).atStartOfDay(zoneId).toInstant()) );
airFrance.setAlternativeDepartureDate( new GregorianCalendar( 2006, 02, 03, 10, 00 ) );
airFrance.getAlternativeDepartureDate().setTimeZone( TimeZone.getTimeZone( "GMT" ) );
airFrance.setBuyDate( new java.sql.Timestamp( 122367443 ) );
airFrance.setFactor( 25 );
scope.inTransaction(
session -> session.persist( airFrance )
);
scope.inTransaction(
session -> {
Query q = session.createQuery( "from Flight f where f.departureDate = :departureDate" );
q.setParameter( "departureDate", airFrance.getDepartureDate(), StandardBasicTypes.DATE );
Flight copyAirFrance = (Flight) q.uniqueResult();
assertNotNull( copyAirFrance );
assertEquals(
Date.from(LocalDate.of( 2005, 06, 21 ).atStartOfDay(zoneId).toInstant()),
copyAirFrance.getDepartureDate()
);
assertEquals( df.format( airFrance.getBuyDate() ), df.format( copyAirFrance.getBuyDate() ) );
session.remove( copyAirFrance );
}
);
}
@Test
public void testBasic(SessionFactoryScope scope) throws Exception {
scope.inSession(
session -> {
Transaction tx = session.beginTransaction();
Flight airFrance = new Flight();
airFrance.setId( Long.valueOf( 747 ) );
airFrance.setName( "Paris-Amsterdam" );
airFrance.setDuration( null );
try {
session.persist( airFrance );
tx.commit();
fail( "Basic(optional=false) fails" );
}
catch (Exception e) {
//success
if ( tx != null ) {
tx.rollback();
}
}
}
);
}
@AfterEach
public void runDropSchema(SessionFactoryScope scope) {
scope.getSessionFactory().getSchemaManager().truncate();
}
}
| EntityTest |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestReplaceDatanodeFailureReplication.java | {
"start": 8059,
"end": 9326
} | class ____ extends SubjectInheritingThread {
private final Path filepath;
private final HdfsDataOutputStream out;
private final long sleepms;
private volatile boolean running = true;
SlowWriter(DistributedFileSystem fs, Path filepath, final long sleepms)
throws IOException {
super(SlowWriter.class.getSimpleName() + ":" + filepath);
this.filepath = filepath;
this.out = (HdfsDataOutputStream) fs.create(filepath, REPLICATION);
this.sleepms = sleepms;
}
@Override public void work() {
int i = 0;
try {
sleep(sleepms);
for (; running; i++) {
LOG.info(getName() + " writes " + i);
out.write(i);
out.hflush();
sleep(sleepms);
}
} catch (InterruptedException e) {
LOG.info(getName() + " interrupted:" + e);
} catch (IOException e) {
throw new RuntimeException(getName(), e);
} finally {
LOG.info(getName() + " terminated: i=" + i);
}
}
void interruptRunning() {
running = false;
interrupt();
}
void joinAndClose() throws InterruptedException {
LOG.info(getName() + " join and close");
join();
IOUtils.closeStream(out);
}
}
}
| SlowWriter |
java | google__guava | android/guava/src/com/google/common/base/FinalizableReferenceQueue.java | {
"start": 12694,
"end": 12736
} | class ____
*
* System | loader |
java | spring-projects__spring-security | oauth2/oauth2-client/src/main/java/org/springframework/security/oauth2/client/web/server/WebSessionServerOAuth2AuthorizedClientRepository.java | {
"start": 1418,
"end": 4101
} | class ____ implements ServerOAuth2AuthorizedClientRepository {
private static final String DEFAULT_AUTHORIZED_CLIENTS_ATTR_NAME = WebSessionServerOAuth2AuthorizedClientRepository.class
.getName() + ".AUTHORIZED_CLIENTS";
private final String sessionAttributeName = DEFAULT_AUTHORIZED_CLIENTS_ATTR_NAME;
@Override
@SuppressWarnings("unchecked")
public <T extends OAuth2AuthorizedClient> Mono<T> loadAuthorizedClient(String clientRegistrationId,
Authentication principal, ServerWebExchange exchange) {
Assert.hasText(clientRegistrationId, "clientRegistrationId cannot be empty");
Assert.notNull(exchange, "exchange cannot be null");
// @formatter:off
return exchange.getSession()
.map(this::getAuthorizedClients)
.flatMap((clients) -> Mono.justOrEmpty((T) clients.get(clientRegistrationId)));
// @formatter:on
}
@Override
public Mono<Void> saveAuthorizedClient(OAuth2AuthorizedClient authorizedClient, Authentication principal,
ServerWebExchange exchange) {
Assert.notNull(authorizedClient, "authorizedClient cannot be null");
Assert.notNull(exchange, "exchange cannot be null");
// @formatter:off
return exchange.getSession()
.doOnSuccess((session) -> {
Map<String, OAuth2AuthorizedClient> authorizedClients = getAuthorizedClients(session);
authorizedClients.put(authorizedClient.getClientRegistration().getRegistrationId(), authorizedClient);
session.getAttributes().put(this.sessionAttributeName, authorizedClients);
})
.then(Mono.empty());
// @formatter:on
}
@Override
public Mono<Void> removeAuthorizedClient(String clientRegistrationId, Authentication principal,
ServerWebExchange exchange) {
Assert.hasText(clientRegistrationId, "clientRegistrationId cannot be empty");
Assert.notNull(exchange, "exchange cannot be null");
// @formatter:off
return exchange.getSession()
.doOnSuccess((session) -> {
Map<String, OAuth2AuthorizedClient> authorizedClients = getAuthorizedClients(session);
authorizedClients.remove(clientRegistrationId);
if (authorizedClients.isEmpty()) {
session.getAttributes().remove(this.sessionAttributeName);
}
else {
session.getAttributes().put(this.sessionAttributeName, authorizedClients);
}
})
.then(Mono.empty());
// @formatter:on
}
private Map<String, OAuth2AuthorizedClient> getAuthorizedClients(WebSession session) {
Assert.notNull(session, "session cannot be null");
Map<String, OAuth2AuthorizedClient> authorizedClients = session.getAttribute(this.sessionAttributeName);
return (authorizedClients != null) ? authorizedClients : new HashMap<>();
}
}
| WebSessionServerOAuth2AuthorizedClientRepository |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/feature/FeaturesTest.java | {
"start": 304,
"end": 904
} | class ____ extends TestCase {
public void test_0() throws Exception {
SerializeConfig config = new SerializeConfig();
config.setAsmEnable(false);
String text = JSON.toJSONString(new Entity(), config);
Assert.assertEquals("{\"value\":null}", text);
}
public void test_1() throws Exception {
SerializeConfig config = new SerializeConfig();
config.setAsmEnable(true);
String text = JSON.toJSONString(new Entity(), config);
Assert.assertEquals("{\"value\":null}", text);
}
public static | FeaturesTest |
java | google__guava | android/guava-tests/test/com/google/common/io/MultiReaderTest.java | {
"start": 918,
"end": 3784
} | class ____ extends TestCase {
public void testOnlyOneOpen() throws Exception {
String testString = "abcdefgh";
CharSource source = newCharSource(testString);
int[] counter = new int[1];
CharSource reader =
new CharSource() {
@Override
public Reader openStream() throws IOException {
if (counter[0]++ != 0) {
throw new IllegalStateException("More than one source open");
}
return new FilterReader(source.openStream()) {
@Override
public void close() throws IOException {
super.close();
counter[0]--;
}
};
}
};
Reader joinedReader = CharSource.concat(reader, reader, reader).openStream();
String result = CharStreams.toString(joinedReader);
assertEquals(testString.length() * 3, result.length());
}
public void testReady() throws Exception {
CharSource source = newCharSource("a");
Iterable<? extends CharSource> list = ImmutableList.of(source, source);
Reader joinedReader = CharSource.concat(list).openStream();
assertTrue(joinedReader.ready());
assertEquals('a', joinedReader.read());
assertEquals('a', joinedReader.read());
assertEquals(-1, joinedReader.read());
assertFalse(joinedReader.ready());
}
public void testSimple() throws Exception {
String testString = "abcdefgh";
CharSource source = newCharSource(testString);
Reader joinedReader = CharSource.concat(source, source).openStream();
String expectedString = testString + testString;
assertEquals(expectedString, CharStreams.toString(joinedReader));
}
private static CharSource newCharSource(String text) {
return new CharSource() {
@Override
public Reader openStream() {
return new StringReader(text);
}
};
}
public void testSkip() throws Exception {
String begin = "abcde";
String end = "fghij";
Reader joinedReader = CharSource.concat(newCharSource(begin), newCharSource(end)).openStream();
String expected = begin + end;
assertEquals(expected.charAt(0), joinedReader.read());
CharStreams.skipFully(joinedReader, 1);
assertEquals(expected.charAt(2), joinedReader.read());
CharStreams.skipFully(joinedReader, 4);
assertEquals(expected.charAt(7), joinedReader.read());
CharStreams.skipFully(joinedReader, 1);
assertEquals(expected.charAt(9), joinedReader.read());
assertEquals(-1, joinedReader.read());
}
public void testSkipZero() throws Exception {
CharSource source = newCharSource("a");
Iterable<CharSource> list = ImmutableList.of(source, source);
Reader joinedReader = CharSource.concat(list).openStream();
assertEquals(0, joinedReader.skip(0));
assertEquals('a', joinedReader.read());
}
}
| MultiReaderTest |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/LocalizedResource.java | {
"start": 9892,
"end": 10589
} | class ____ extends ResourceTransition {
@Override
public void transition(LocalizedResource rsrc, ResourceEvent event) {
ResourceFailedLocalizationEvent failedEvent =
(ResourceFailedLocalizationEvent) event;
Queue<ContainerId> containers = rsrc.ref;
for (ContainerId container : containers) {
rsrc.dispatcher.getEventHandler().handle(
new ContainerResourceFailedEvent(container, failedEvent
.getLocalResourceRequest(), failedEvent.getDiagnosticMessage()));
}
}
}
/**
* Resource already localized, notify immediately.
*/
@SuppressWarnings("unchecked") // dispatcher not typed
private static | FetchFailedTransition |
java | micronaut-projects__micronaut-core | core-processor/src/main/java/io/micronaut/inject/processing/DeclaredBeanElementCreator.java | {
"start": 7866,
"end": 18479
} | class ____ be processed as a properties bean
*/
protected boolean processAsProperties() {
return false;
}
private void build(BeanDefinitionVisitor visitor) {
Set<FieldElement> processedFields = new HashSet<>();
ElementQuery<MemberElement> memberQuery = ElementQuery.ALL_FIELD_AND_METHODS.includeHiddenElements();
if (processAsProperties()) {
memberQuery = memberQuery.excludePropertyElements();
for (PropertyElement propertyElement : classElement.getBeanProperties()) {
if (visitPropertyInternal(visitor, propertyElement)) {
propertyElement.getField().ifPresent(processedFields::add);
}
}
} else {
for (PropertyElement propertyElement : classElement.getSyntheticBeanProperties()) {
if (visitPropertyInternal(visitor, propertyElement)) {
propertyElement.getField().ifPresent(processedFields::add);
}
}
}
List<MemberElement> memberElements = new ArrayList<>(classElement.getEnclosedElements(memberQuery));
memberElements.removeAll(processedFields);
for (MemberElement memberElement : memberElements) {
if (memberElement instanceof FieldElement fieldElement) {
visitFieldInternal(visitor, fieldElement);
} else if (memberElement instanceof MethodElement methodElement) {
visitMethodInternal(visitor, methodElement);
} else if (!(memberElement instanceof PropertyElement)) {
throw new IllegalStateException("Unknown element");
}
}
}
private void visitFieldInternal(BeanDefinitionVisitor visitor, FieldElement fieldElement) {
boolean claimed = visitField(visitor, fieldElement);
if (claimed) {
addOriginatingElementIfNecessary(visitor, fieldElement);
}
}
private void visitMethodInternal(BeanDefinitionVisitor visitor, MethodElement methodElement) {
makeInterceptedForValidationIfNeeded(methodElement);
boolean claimed = visitMethod(visitor, methodElement);
if (claimed) {
addOriginatingElementIfNecessary(visitor, methodElement);
}
}
private boolean visitPropertyInternal(BeanDefinitionVisitor visitor, PropertyElement propertyElement) {
boolean claimed = visitProperty(visitor, propertyElement);
if (claimed) {
propertyElement.getReadMethod().ifPresent(element -> addOriginatingElementIfNecessary(visitor, element));
propertyElement.getWriteMethod().ifPresent(element -> addOriginatingElementIfNecessary(visitor, element));
propertyElement.getField().ifPresent(element -> addOriginatingElementIfNecessary(visitor, element));
}
return claimed;
}
/**
* Visit a property.
*
* @param visitor The visitor
* @param propertyElement The property
* @return true if processed
*/
protected boolean visitProperty(BeanDefinitionVisitor visitor, PropertyElement propertyElement) {
boolean claimed = false;
Optional<? extends MemberElement> writeMember = propertyElement.getWriteMember();
if (writeMember.isPresent()) {
claimed |= visitPropertyWriteElement(visitor, propertyElement, writeMember.get());
}
Optional<? extends MemberElement> readMember = propertyElement.getReadMember();
if (readMember.isPresent()) {
boolean readElementClaimed = visitPropertyReadElement(visitor, propertyElement, readMember.get());
claimed |= readElementClaimed;
}
// Process property's field if no methods were processed
Optional<FieldElement> field = propertyElement.getField();
if (!claimed && field.isPresent()) {
FieldElement writeElement = field.get();
claimed = visitField(visitor, writeElement);
}
return claimed;
}
/**
* Makes the method intercepted by the validation advice.
* @param element The method element
*/
protected void makeInterceptedForValidationIfNeeded(MethodElement element) {
// The method with constrains should be intercepted with the validation interceptor
if (element.hasDeclaredAnnotation(ANN_REQUIRES_VALIDATION)) {
element.annotate(ANN_VALIDATED);
}
}
/**
* Visit a property read element.
*
* @param visitor The visitor
* @param propertyElement The property
* @param readElement The read element
* @return true if processed
*/
protected boolean visitPropertyReadElement(BeanDefinitionVisitor visitor,
PropertyElement propertyElement,
MemberElement readElement) {
if (readElement instanceof MethodElement methodReadElement) {
return visitPropertyReadElement(visitor, propertyElement, methodReadElement);
}
return false;
}
/**
* Visit a property method read element.
*
* @param visitor The visitor
* @param propertyElement The property
* @param readElement The read element
* @return true if processed
*/
protected boolean visitPropertyReadElement(BeanDefinitionVisitor visitor,
PropertyElement propertyElement,
MethodElement readElement) {
return visitAopAndExecutableMethod(visitor, readElement);
}
/**
* Visit a property write element.
*
* @param visitor The visitor
* @param propertyElement The property
* @param writeElement The write element
* @return true if processed
*/
protected boolean visitPropertyWriteElement(BeanDefinitionVisitor visitor,
PropertyElement propertyElement,
MemberElement writeElement) {
if (writeElement instanceof MethodElement methodWriteElement) {
return visitPropertyWriteElement(visitor, propertyElement, methodWriteElement);
}
return false;
}
/**
* Visit a property write element.
*
* @param visitor The visitor
* @param propertyElement The property
* @param writeElement The write element
* @return true if processed
*/
@NextMajorVersion("Require @ReflectiveAccess for private methods in Micronaut 4")
protected boolean visitPropertyWriteElement(BeanDefinitionVisitor visitor,
PropertyElement propertyElement,
MethodElement writeElement) {
makeInterceptedForValidationIfNeeded(writeElement);
if (visitInjectAndLifecycleMethod(visitor, writeElement)) {
makeInterceptedForValidationIfNeeded(writeElement);
return true;
} else if (!writeElement.isStatic() && writeElement.getMethodAnnotationMetadata().hasStereotype(AnnotationUtil.QUALIFIER)) {
if (propertyElement.getReadMethod().isPresent() && writeElement.hasStereotype(ANN_REQUIRES_VALIDATION)) {
visitor.setValidated(true);
}
staticMethodCheck(writeElement);
// TODO: Require @ReflectiveAccess for private methods in Micronaut 4
visitMethodInjectionPoint(visitor, writeElement);
return true;
}
return visitAopAndExecutableMethod(visitor, writeElement);
}
/**
* Visit a method.
*
* @param visitor The visitor
* @param methodElement The method
* @return true if processed
*/
protected boolean visitMethod(BeanDefinitionVisitor visitor, MethodElement methodElement) {
if (visitInjectAndLifecycleMethod(visitor, methodElement)) {
return true;
}
return visitAopAndExecutableMethod(visitor, methodElement);
}
@NextMajorVersion("Require @ReflectiveAccess for private methods in Micronaut 4")
private boolean visitInjectAndLifecycleMethod(BeanDefinitionVisitor visitor, MethodElement methodElement) {
// All the cases above are using executable methods
boolean claimed = false;
if (methodElement.hasDeclaredAnnotation(AnnotationUtil.POST_CONSTRUCT)) {
staticMethodCheck(methodElement);
// TODO: Require @ReflectiveAccess for private methods in Micronaut 4
visitor.visitPostConstructMethod(
methodElement.getDeclaringType(),
methodElement,
methodElement.isReflectionRequired(classElement),
visitorContext);
claimed = true;
}
if (methodElement.hasDeclaredAnnotation(AnnotationUtil.PRE_DESTROY)) {
staticMethodCheck(methodElement);
// TODO: Require @ReflectiveAccess for private methods in Micronaut 4
visitor.visitPreDestroyMethod(
methodElement.getDeclaringType(),
methodElement,
methodElement.isReflectionRequired(classElement),
visitorContext
);
claimed = true;
}
if (claimed) {
return true;
}
if (!methodElement.isStatic() && isInjectPointMethod(methodElement)) {
staticMethodCheck(methodElement);
// TODO: Require @ReflectiveAccess for private methods in Micronaut 4
visitMethodInjectionPoint(visitor, methodElement);
return true;
}
return false;
}
/**
* Visit a method injection point.
* @param visitor The visitor
* @param methodElement The method element
*/
protected void visitMethodInjectionPoint(BeanDefinitionVisitor visitor, MethodElement methodElement) {
applyConfigurationInjectionIfNecessary(visitor, methodElement);
visitor.visitMethodInjectionPoint(
methodElement.getDeclaringType(),
methodElement,
methodElement.isReflectionRequired(classElement),
visitorContext
);
}
private boolean visitAopAndExecutableMethod(BeanDefinitionVisitor visitor, MethodElement methodElement) {
if (methodElement.isStatic() && isExplicitlyAnnotatedAsExecutable(methodElement)) {
// Only allow static executable methods when it's explicitly annotated with Executable. | should |
java | grpc__grpc-java | alts/src/main/java/io/grpc/alts/internal/ChannelCrypterNetty.java | {
"start": 1010,
"end": 1159
} | interface ____ provide integrity only and
* implementations that provide privacy and integrity. All methods should be thread-compatible.
*/
public | that |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/JGroupsEndpointBuilderFactory.java | {
"start": 1567,
"end": 3825
} | interface ____
extends
EndpointConsumerBuilder {
default AdvancedJGroupsEndpointConsumerBuilder advanced() {
return (AdvancedJGroupsEndpointConsumerBuilder) this;
}
/**
* Specifies configuration properties of the JChannel used by the
* endpoint.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param channelProperties the value to set
* @return the dsl builder
*/
default JGroupsEndpointConsumerBuilder channelProperties(String channelProperties) {
doSetProperty("channelProperties", channelProperties);
return this;
}
/**
* If set to true, the consumer endpoint will receive org.jgroups.View
* messages as well (not only org.jgroups.Message instances). By default
* only regular messages are consumed by the endpoint.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param enableViewMessages the value to set
* @return the dsl builder
*/
default JGroupsEndpointConsumerBuilder enableViewMessages(boolean enableViewMessages) {
doSetProperty("enableViewMessages", enableViewMessages);
return this;
}
/**
* If set to true, the consumer endpoint will receive org.jgroups.View
* messages as well (not only org.jgroups.Message instances). By default
* only regular messages are consumed by the endpoint.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param enableViewMessages the value to set
* @return the dsl builder
*/
default JGroupsEndpointConsumerBuilder enableViewMessages(String enableViewMessages) {
doSetProperty("enableViewMessages", enableViewMessages);
return this;
}
}
/**
* Advanced builder for endpoint consumers for the JGroups component.
*/
public | JGroupsEndpointConsumerBuilder |
java | apache__maven | impl/maven-testing/src/main/java/org/apache/maven/api/di/testing/MavenDIExtension.java | {
"start": 3177,
"end": 3613
} | class ____ unavailable,
* the required test instance is unavailable, or if container setup fails
*/
protected void setupContainer() {
if (context == null) {
throw new IllegalStateException("ExtensionContext must not be null");
}
final Class<?> testClass = context.getRequiredTestClass();
if (testClass == null) {
throw new IllegalStateException("Required test | is |
java | hibernate__hibernate-orm | hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/onetomany/detached/BasicDetachedList.java | {
"start": 995,
"end": 3905
} | class ____ {
private Integer str1_id;
private Integer str2_id;
private Integer coll1_id;
@BeforeClassTemplate
public void initData(EntityManagerFactoryScope scope) {
// Revision 1
scope.inTransaction( em -> {
StrTestEntity str1 = new StrTestEntity( "str1" );
StrTestEntity str2 = new StrTestEntity( "str2" );
em.persist( str1 );
em.persist( str2 );
ListRefCollEntity coll1 = new ListRefCollEntity( 3, "coll1" );
coll1.setCollection( new ArrayList<StrTestEntity>() );
coll1.getCollection().add( str1 );
em.persist( coll1 );
str1_id = str1.getId();
str2_id = str2.getId();
coll1_id = coll1.getId();
} );
// Revision 2
scope.inTransaction( em -> {
StrTestEntity str2 = em.find( StrTestEntity.class, str2_id );
ListRefCollEntity coll1 = em.find( ListRefCollEntity.class, coll1_id );
coll1.getCollection().add( str2 );
} );
// Revision 3
scope.inTransaction( em -> {
StrTestEntity str1 = em.find( StrTestEntity.class, str1_id );
ListRefCollEntity coll1 = em.find( ListRefCollEntity.class, coll1_id );
coll1.getCollection().remove( str1 );
} );
// Revision 4
scope.inTransaction( em -> {
ListRefCollEntity coll1 = em.find( ListRefCollEntity.class, coll1_id );
coll1.getCollection().clear();
} );
}
@Test
public void testRevisionsCounts(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
final var auditReader = AuditReaderFactory.get( em );
assertEquals( Arrays.asList( 1, 2, 3, 4 ), auditReader.getRevisions( ListRefCollEntity.class, coll1_id ) );
assertEquals( Arrays.asList( 1 ), auditReader.getRevisions( StrTestEntity.class, str1_id ) );
assertEquals( Arrays.asList( 1 ), auditReader.getRevisions( StrTestEntity.class, str2_id ) );
} );
}
@Test
public void testHistoryOfColl1(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
StrTestEntity str1 = em.find( StrTestEntity.class, str1_id );
StrTestEntity str2 = em.find( StrTestEntity.class, str2_id );
final var auditReader = AuditReaderFactory.get( em );
ListRefCollEntity rev1 = auditReader.find( ListRefCollEntity.class, coll1_id, 1 );
ListRefCollEntity rev2 = auditReader.find( ListRefCollEntity.class, coll1_id, 2 );
ListRefCollEntity rev3 = auditReader.find( ListRefCollEntity.class, coll1_id, 3 );
ListRefCollEntity rev4 = auditReader.find( ListRefCollEntity.class, coll1_id, 4 );
assertTrue( TestTools.checkCollection( rev1.getCollection(), str1 ) );
assertTrue( TestTools.checkCollection( rev2.getCollection(), str1, str2 ) );
assertTrue( TestTools.checkCollection( rev3.getCollection(), str2 ) );
assertTrue( TestTools.checkCollection( rev4.getCollection() ) );
assertEquals( "coll1", rev1.getData() );
assertEquals( "coll1", rev2.getData() );
assertEquals( "coll1", rev3.getData() );
assertEquals( "coll1", rev4.getData() );
} );
}
}
| BasicDetachedList |
java | apache__dubbo | dubbo-remoting/dubbo-remoting-netty/src/test/java/org/apache/dubbo/remoting/transport/netty/NettyClientTest.java | {
"start": 1559,
"end": 4519
} | class ____ {
static RemotingServer server;
static int port = NetUtils.getAvailablePort();
@BeforeAll
public static void setUp() throws Exception {
FrameworkModel.destroyAll();
URL url = URL.valueOf("exchange://localhost:" + port + "?server=netty3&codec=exchange");
ApplicationModel applicationModel = ApplicationModel.defaultModel();
ApplicationConfig applicationConfig = new ApplicationConfig("provider-app");
applicationConfig.setExecutorManagementMode(EXECUTOR_MANAGEMENT_MODE_DEFAULT);
applicationModel.getApplicationConfigManager().setApplication(applicationConfig);
url = url.setScopeModel(applicationModel);
server = Exchangers.bind(url, new TelnetServerHandler());
}
@AfterAll
public static void tearDown() {
try {
if (server != null) server.close();
} finally {
}
}
// public static void main(String[] args) throws RemotingException, InterruptedException {
// ExchangeChannel client =
// Exchangers.connect(URL.valueOf("exchange://10.20.153.10:20880?client=netty3&heartbeat=1000&codec=exchange"));
// Thread.sleep(60 * 1000 * 50);
// }
@Test
void testClientClose() throws Exception {
List<ExchangeChannel> clients = new ArrayList<ExchangeChannel>(100);
for (int i = 0; i < 100; i++) {
URL url = URL.valueOf("exchange://localhost:" + port + "?client=netty3&codec=exchange");
ApplicationModel applicationModel = ApplicationModel.defaultModel();
ApplicationConfig applicationConfig = new ApplicationConfig("provider-app");
applicationConfig.setExecutorManagementMode(EXECUTOR_MANAGEMENT_MODE_DEFAULT);
applicationModel.getApplicationConfigManager().setApplication(applicationConfig);
url = url.setScopeModel(applicationModel);
ExchangeChannel client = Exchangers.connect(url);
Thread.sleep(5);
clients.add(client);
}
for (ExchangeChannel client : clients) {
client.close();
}
Thread.sleep(1000);
}
@Test
void testServerClose() throws Exception {
for (int i = 0; i < 100; i++) {
URL url = URL.valueOf(
"exchange://localhost:" + NetUtils.getAvailablePort(6000) + "?server=netty3&codec=exchange");
ApplicationModel applicationModel = ApplicationModel.defaultModel();
ApplicationConfig applicationConfig = new ApplicationConfig("provider-app");
applicationConfig.setExecutorManagementMode(EXECUTOR_MANAGEMENT_MODE_DEFAULT);
applicationModel.getApplicationConfigManager().setApplication(applicationConfig);
url = url.setScopeModel(applicationModel);
RemotingServer aServer = Exchangers.bind(url, new TelnetServerHandler());
aServer.close();
}
}
}
| NettyClientTest |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/converter/myconverter/MyFallbackPromoteConverter.java | {
"start": 1037,
"end": 1393
} | class ____ {
@Converter(fallback = true, fallbackCanPromote = true)
public Object convertTo(Class<?> type, Exchange exchange, Object value, TypeConverterRegistry registry) {
if (MyCoolBean.class.isAssignableFrom(value.getClass())) {
return "This is cool: " + value;
}
return null;
}
}
| MyFallbackPromoteConverter |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/query/hql/instantiation/DynamicInstantiationWithJoinAndGroupAndOrderByByTest.java | {
"start": 1009,
"end": 2792
} | class ____ {
@BeforeAll
public void setUp(SessionFactoryScope scope) {
scope.inTransaction( session -> {
Item item1 = new Item();
item1.setName( "Item 1" );
session.persist( item1 );
Item item2 = new Item();
item2.setName( "Item 2" );
session.persist( item2 );
ItemSale itemSale11 = new ItemSale();
itemSale11.setItem( item1 );
itemSale11.setTotal( 1d );
session.persist( itemSale11 );
ItemSale itemSale12 = new ItemSale();
itemSale12.setItem( item1 );
itemSale12.setTotal( 2d );
session.persist( itemSale12 );
ItemSale itemSale21 = new ItemSale();
itemSale21.setItem( item2 );
itemSale21.setTotal( 5d );
session.persist( itemSale21 );
} );
}
@AfterAll
public void tearDown(SessionFactoryScope scope) {
scope.inTransaction( session -> {
session.createMutationQuery( "delete from ItemSale" ).executeUpdate();
session.createMutationQuery( "delete from Item" ).executeUpdate();
} );
}
@Test
public void testInstantiationGroupByAndOrderBy(SessionFactoryScope scope) {
scope.inTransaction( session -> {
TypedQuery<Summary> query = session.createQuery(
"select new " + getClass().getName() + "$Summary(i, sum(is.total))" +
" from ItemSale is" +
" join is.item i" +
" group by i" +
" order by i"
,
Summary.class
);
List<Summary> resultList = query.getResultList();
assertEquals( 2, resultList.size() );
assertEquals( "Item 1", resultList.get( 0 ).getItem().getName() );
assertEquals( 3d, resultList.get( 0 ).getTotal() );
assertEquals( "Item 2", resultList.get( 1 ).getItem().getName() );
assertEquals( 5d, resultList.get( 1 ).getTotal() );
} );
}
@Entity(name = "Item")
public static | DynamicInstantiationWithJoinAndGroupAndOrderByByTest |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/engine/Engine.java | {
"start": 76036,
"end": 77059
} | enum ____ {
INDEX,
DELETE,
NO_OP;
private final String lowercase;
TYPE() {
this.lowercase = this.toString().toLowerCase(Locale.ROOT);
}
public String getLowercase() {
return lowercase;
}
}
private final BytesRef uid;
private final long version;
private final long seqNo;
private final long primaryTerm;
private final VersionType versionType;
private final Origin origin;
private final long startTime;
public Operation(BytesRef uid, long seqNo, long primaryTerm, long version, VersionType versionType, Origin origin, long startTime) {
this.uid = uid;
this.seqNo = seqNo;
this.primaryTerm = primaryTerm;
this.version = version;
this.versionType = versionType;
this.origin = origin;
this.startTime = startTime;
}
public | TYPE |
java | apache__flink | flink-rpc/flink-rpc-core/src/main/java/org/apache/flink/runtime/rpc/messages/LocalFencedMessage.java | {
"start": 1218,
"end": 1831
} | class ____<F extends Serializable, P> implements FencedMessage<F, P> {
private final F fencingToken;
private final P payload;
public LocalFencedMessage(@Nullable F fencingToken, P payload) {
this.fencingToken = fencingToken;
this.payload = Preconditions.checkNotNull(payload);
}
@Override
public F getFencingToken() {
return fencingToken;
}
@Override
public P getPayload() {
return payload;
}
@Override
public String toString() {
return "LocalFencedMessage(" + fencingToken + ", " + payload + ')';
}
}
| LocalFencedMessage |
java | reactor__reactor-core | reactor-core/src/main/java/reactor/core/publisher/MonoFlatMapMany.java | {
"start": 2124,
"end": 5574
} | class ____<T, R> implements InnerOperator<T, R> {
final CoreSubscriber<? super R> actual;
final Function<? super T, ? extends Publisher<? extends R>> mapper;
@SuppressWarnings("NotNullFieldNotInitialized")
Subscription main;
volatile @Nullable Subscription inner;
// https://github.com/uber/NullAway/issues/1157
@SuppressWarnings({"rawtypes", "DataFlowIssue"})
static final AtomicReferenceFieldUpdater<FlatMapManyMain, @Nullable Subscription> INNER =
AtomicReferenceFieldUpdater.newUpdater(FlatMapManyMain.class,
Subscription.class,
"inner");
volatile long requested;
@SuppressWarnings("rawtypes")
static final AtomicLongFieldUpdater<FlatMapManyMain> REQUESTED =
AtomicLongFieldUpdater.newUpdater(FlatMapManyMain.class, "requested");
boolean hasValue;
FlatMapManyMain(CoreSubscriber<? super R> actual,
Function<? super T, ? extends Publisher<? extends R>> mapper) {
this.actual = actual;
this.mapper = mapper;
}
@Override
public @Nullable Object scanUnsafe(Attr key) {
if (key == Attr.PARENT) return main;
if (key == Attr.RUN_STYLE) return Attr.RunStyle.SYNC;
return InnerOperator.super.scanUnsafe(key);
}
@Override
public Stream<? extends Scannable> inners() {
return Stream.of(Scannable.from(inner));
}
@Override
public CoreSubscriber<? super R> actual() {
return actual;
}
@Override
public void request(long n) {
Subscription a = inner;
if (a != null) {
a.request(n);
}
else {
if (Operators.validate(n)) {
Operators.addCap(REQUESTED, this, n);
a = inner;
if (a != null) {
n = REQUESTED.getAndSet(this, 0L);
if (n != 0L) {
a.request(n);
}
}
}
}
}
@Override
public void cancel() {
main.cancel();
Operators.terminate(INNER, this);
}
@Override
public void onSubscribe(Subscription s) {
if (Operators.validate(this.main, s)) {
this.main = s;
actual.onSubscribe(this);
s.request(Long.MAX_VALUE);
}
}
void onSubscribeInner(Subscription s) {
if (Operators.setOnce(INNER, this, s)) {
long r = REQUESTED.getAndSet(this, 0L);
if (r != 0) {
s.request(r);
}
}
}
@SuppressWarnings("unchecked")
@Override
public void onNext(T t) {
hasValue = true;
Publisher<? extends R> p;
try {
p = Objects.requireNonNull(mapper.apply(t),
"The mapper returned a null Publisher.");
}
catch (Throwable ex) {
//if the mapping fails, then there is nothing to be continued, since the source is a Mono
actual.onError(Operators.onOperatorError(this, ex, t,
actual.currentContext()));
return;
}
if (p instanceof Callable) {
R v;
try {
v = ((Callable<@Nullable R>) p).call();
}
catch (Throwable ex) {
actual.onError(Operators.onOperatorError(this, ex, t,
actual.currentContext()));
return;
}
if (v == null) {
actual.onComplete();
}
else {
onSubscribeInner(Operators.scalarSubscription(actual, v));
}
return;
}
p = Operators.toFluxOrMono(p);
p.subscribe(new FlatMapManyInner<>(this, actual));
}
@Override
public void onError(Throwable t) {
if (hasValue) {
Operators.onErrorDropped(t, actual.currentContext());
return;
}
actual.onError(t);
}
@Override
public void onComplete() {
if (!hasValue) {
actual.onComplete();
}
}
}
static final | FlatMapManyMain |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/common/runtime/src/main/java/org/jboss/resteasy/reactive/common/util/MediaTypeHelper.java | {
"start": 2831,
"end": 13626
} | class ____ implements Comparator<MediaType>, Serializable {
private static final long serialVersionUID = -5828700121582498092L;
private final String parameterName;
public MediaTypeComparator(String parameterName) {
this.parameterName = parameterName;
}
public int compare(MediaType mediaType2, MediaType mediaType) {
float q = getQTypeWithParamInfo(mediaType, parameterName);
boolean wasQ = q != 2.0f;
if (q == 2.0f)
q = 1.0f;
float q2 = getQTypeWithParamInfo(mediaType2, parameterName);
boolean wasQ2 = q2 != 2.0f;
if (q2 == 2.0f)
q2 = 1.0f;
if (q < q2)
return -1;
if (q > q2)
return 1;
if (mediaType.isWildcardType() && !mediaType2.isWildcardType())
return -1;
if (!mediaType.isWildcardType() && mediaType2.isWildcardType())
return 1;
if (mediaType.isWildcardSubtype() && !mediaType2.isWildcardSubtype())
return -1;
if (!mediaType.isWildcardSubtype() && mediaType2.isWildcardSubtype())
return 1;
if (isComposite(mediaType.getSubtype()) && !isComposite(mediaType2.getSubtype()))
return -1;
if (!isComposite(mediaType.getSubtype()) && isComposite(mediaType2.getSubtype()))
return 1;
if (isCompositeWildcardSubtype(mediaType.getSubtype()) && !isCompositeWildcardSubtype(mediaType2.getSubtype()))
return -1;
if (!isCompositeWildcardSubtype(mediaType.getSubtype()) && isCompositeWildcardSubtype(mediaType2.getSubtype()))
return 1;
if (isWildcardCompositeSubtype(mediaType.getSubtype()) && !isWildcardCompositeSubtype(mediaType2.getSubtype()))
return -1;
if (!isWildcardCompositeSubtype(mediaType.getSubtype()) && isWildcardCompositeSubtype(mediaType2.getSubtype()))
return 1;
return 0;
}
}
public static int compareWeight(MediaType one, MediaType two) {
return Q_COMPARATOR.compare(one, two);
}
public static int compareMatchingMediaTypes(List<MediaType> produces, List<MediaType> mediaTypes1,
List<MediaType> mediaTypes2) {
int countMediaTypes1 = countMatchingMediaTypes(produces, mediaTypes1);
int countMediaTypes2 = countMatchingMediaTypes(produces, mediaTypes2);
return (countMediaTypes1 < countMediaTypes2) ? 1 : ((countMediaTypes1 == countMediaTypes2) ? 0 : -1);
}
public static void sortByWeight(List<MediaType> types) {
if (hasAtMostOneItem(types)) {
return;
}
// Stable sort: preserve original order when comparator returns 0
List<IndexedMediaType> indexed = new ArrayList<>(types.size());
for (int i = 0; i < types.size(); i++) {
indexed.add(new IndexedMediaType(types.get(i), i));
}
indexed.sort(new Comparator<IndexedMediaType>() {
@Override
public int compare(IndexedMediaType a, IndexedMediaType b) {
int cmp = Q_COMPARATOR.compare(a.mediaType, b.mediaType);
return cmp != 0 ? cmp : Integer.compare(a.originalIndex, b.originalIndex);
}
});
types.clear();
for (IndexedMediaType imt : indexed) {
types.add(imt.mediaType);
}
}
public static void sortByQSWeight(List<MediaType> types) {
if (hasAtMostOneItem(types)) {
return;
}
types.sort(QS_COMPARATOR);
}
private static boolean hasAtMostOneItem(List<MediaType> types) {
return types == null || types.size() <= 1;
}
/**
* Finds the best match according to the weight of the media types
* The parameters needs to be sorted, so a copy of these is made if necessary
* in order to avoid altering the input
*/
public static MediaType getBestMatch(List<MediaType> desired, List<MediaType> provided) {
if (!hasAtMostOneItem(desired)) {
desired = new ArrayList<>(desired);
sortByWeight(desired);
}
if (!hasAtMostOneItem(provided)) {
provided = new ArrayList<>(provided);
sortByWeight(provided);
}
return getFirstMatch(desired, provided);
}
public static MediaType getFirstMatch(List<MediaType> desired, List<MediaType> provided) {
boolean emptyDesired = desired == null || desired.size() == 0;
boolean emptyProvided = provided == null || provided.size() == 0;
if (emptyDesired && emptyProvided)
return null;
if (emptyDesired)
return provided.get(0);
if (emptyProvided)
return desired.get(0);
for (int i = 0; i < desired.size(); i++) {
for (int j = 0; j < provided.size(); j++) {
MediaType provide = provided.get(j);
if (provide.isCompatible(desired.get(i))) {
return provide;
}
}
}
return null;
}
public static List<MediaType> parseHeader(String header) {
ArrayList<MediaType> types = new ArrayList<>();
String[] medias = header.split(",");
for (String media : medias) {
types.add(valueOf(media.trim()));
}
return types;
}
public static boolean equivalent(MediaType m1, MediaType m2) {
if (m1 == m2)
return true;
if (!m1.getType().equals(m2.getType()))
return false;
if (!m1.getSubtype().equals(m2.getSubtype()))
return false;
return equivalentParams(m1, m2);
}
public static boolean equivalentParams(MediaType m1, MediaType m2) {
Map<String, String> params1 = m1.getParameters();
Map<String, String> params2 = m2.getParameters();
if (params1 == params2)
return true;
if (params1 == null || params2 == null)
return false;
if (params1.size() == 0 && params2.size() == 0)
return true;
int numParams1 = params1.size();
if (params1.containsKey("q"))
numParams1--;
int numParams2 = params2.size();
if (params2.containsKey("q"))
numParams2--;
if (numParams1 != numParams2)
return false;
if (numParams1 == 0)
return true;
for (Map.Entry<String, String> entry : params1.entrySet()) {
String key = entry.getKey();
if (key.equals("q"))
continue;
String value = entry.getValue();
String value2 = params2.get(key);
if (value == value2)
continue; // both null
if (value == null || value2 == null)
return false;
if (value.equals(value2) == false)
return false;
}
return true;
}
// TODO: does this need to be more complex?
public static boolean isTextLike(MediaType mediaType) {
String type = mediaType.getType();
String subtype = mediaType.getSubtype();
return (type.equals("application") && (subtype.contains("json") || subtype.contains("xml") || subtype.contains("yaml")))
|| type.equals("text");
}
public static boolean isUnsupportedWildcardSubtype(MediaType mediaType) {
if (mediaType.isWildcardSubtype()) {
return !mediaType.isWildcardType() && !"application".equals(mediaType.getType());
}
return false;
}
public static List<MediaType> toListOfMediaType(String[] mediaTypes) {
if (mediaTypes == null || mediaTypes.length == 0) {
return Collections.emptyList();
}
List<MediaType> list = new ArrayList<>(mediaTypes.length);
for (String mediaType : mediaTypes) {
list.add(valueOf(mediaType));
}
return Collections.unmodifiableList(list);
}
/**
* This method ungroups the media types with suffix in separated media types. For example, having the media type
* "application/one+two" will return a list containing ["application/one+two", "application/one", "application/two"].
* The Media Types without suffix remain as one media type.
*
* @param mediaTypes the list of media types to separate.
* @return the list of ungrouped media types.
*/
public static List<MediaType> getUngroupedMediaTypes(List<MediaType> mediaTypes) {
List<MediaType> effectiveMediaTypes = new ArrayList<>();
for (MediaType mediaType : mediaTypes) {
effectiveMediaTypes.addAll(getUngroupedMediaTypes(mediaType));
}
return Collections.unmodifiableList(effectiveMediaTypes);
}
/**
* This method ungroups the media type with suffix in separated media types. For example, having the media type
* "application/one+two" will return a list containing ["application/one+two", "application/one", "application/two"].
* If the Media Type does not have a suffix, then it's not modified.
*
* @param mediaType the media type to separate.
* @return the list of ungrouped media types.
*/
public static List<MediaType> getUngroupedMediaTypes(MediaType mediaType) {
if (mediaType == null) {
return Collections.emptyList();
}
if (mediaType.getSubtype() == null || !mediaType.getSubtype().contains(MEDIA_TYPE_SUFFIX_DELIM)) {
return Collections.singletonList(mediaType);
}
String[] subTypes = mediaType.getSubtype().split(Pattern.quote(MEDIA_TYPE_SUFFIX_DELIM));
List<MediaType> effectiveMediaTypes = new ArrayList<>(1 + subTypes.length);
effectiveMediaTypes.add(mediaType);
for (String subType : subTypes) {
effectiveMediaTypes.add(new MediaType(mediaType.getType(), subType, mediaType.getParameters()));
}
return Collections.unmodifiableList(effectiveMediaTypes);
}
private static int countMatchingMediaTypes(List<MediaType> produces, List<MediaType> mediaTypes) {
int count = 0;
for (int i = 0; i < mediaTypes.size(); i++) {
MediaType mediaType = mediaTypes.get(i);
for (int j = 0; j < produces.size(); j++) {
MediaType produce = produces.get(j);
if (mediaType.isCompatible(produce)) {
count++;
break;
}
}
}
return count;
}
private record IndexedMediaType(MediaType mediaType, int originalIndex) {
}
}
| MediaTypeComparator |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/web/bind/support/WebArgumentResolver.java | {
"start": 1658,
"end": 2346
} | interface ____ {
/**
* Marker to be returned when the resolver does not know how to
* handle the given method parameter.
*/
Object UNRESOLVED = new Object();
/**
* Resolve an argument for the given handler method parameter within the given web request.
* @param methodParameter the handler method parameter to resolve
* @param webRequest the current web request, allowing access to the native request as well
* @return the argument value, or {@code UNRESOLVED} if not resolvable
* @throws Exception in case of resolution failure
*/
@Nullable Object resolveArgument(MethodParameter methodParameter, NativeWebRequest webRequest) throws Exception;
}
| WebArgumentResolver |
java | spring-projects__spring-security | oauth2/oauth2-authorization-server/src/main/java/org/springframework/security/oauth2/server/authorization/converter/RegisteredClientOAuth2ClientRegistrationConverter.java | {
"start": 1423,
"end": 3220
} | class ____
implements Converter<RegisteredClient, OAuth2ClientRegistration> {
@Override
public OAuth2ClientRegistration convert(RegisteredClient registeredClient) {
// @formatter:off
OAuth2ClientRegistration.Builder builder = OAuth2ClientRegistration.builder()
.clientId(registeredClient.getClientId())
.clientIdIssuedAt(registeredClient.getClientIdIssuedAt())
.clientName(registeredClient.getClientName());
builder
.tokenEndpointAuthenticationMethod(registeredClient.getClientAuthenticationMethods().iterator().next().getValue());
if (registeredClient.getClientSecret() != null) {
builder.clientSecret(registeredClient.getClientSecret());
}
if (registeredClient.getClientSecretExpiresAt() != null) {
builder.clientSecretExpiresAt(registeredClient.getClientSecretExpiresAt());
}
if (!CollectionUtils.isEmpty(registeredClient.getRedirectUris())) {
builder.redirectUris((redirectUris) ->
redirectUris.addAll(registeredClient.getRedirectUris()));
}
builder.grantTypes((grantTypes) ->
registeredClient.getAuthorizationGrantTypes().forEach((authorizationGrantType) ->
grantTypes.add(authorizationGrantType.getValue())));
if (registeredClient.getAuthorizationGrantTypes().contains(AuthorizationGrantType.AUTHORIZATION_CODE)) {
builder.responseType(OAuth2AuthorizationResponseType.CODE.getValue());
}
if (!CollectionUtils.isEmpty(registeredClient.getScopes())) {
builder.scopes((scopes) ->
scopes.addAll(registeredClient.getScopes()));
}
ClientSettings clientSettings = registeredClient.getClientSettings();
if (clientSettings.getJwkSetUrl() != null) {
builder.jwkSetUrl(clientSettings.getJwkSetUrl());
}
return builder.build();
// @formatter:on
}
}
| RegisteredClientOAuth2ClientRegistrationConverter |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/tuple/IdentifierAttribute.java | {
"start": 364,
"end": 626
} | interface ____ extends Attribute {
boolean isVirtual();
boolean isEmbedded();
@Deprecated
IdentifierGenerator getIdentifierGenerator();
Generator getGenerator();
boolean isIdentifierAssignedByInsert();
boolean hasIdentifierMapper();
}
| IdentifierAttribute |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/http/HttpHeadersAssertTests.java | {
"start": 1401,
"end": 16404
} | class ____ {
@Test
void containsHeader() {
assertThat(Map.of("first", "1")).containsHeader("first");
}
@Test
void containsHeaderWithNameNotPresent() {
Map<String, String> map = Map.of("first", "1");
assertThatExceptionOfType(AssertionError.class)
.isThrownBy(() -> assertThat(map).containsHeader("wrong-name"))
.withMessageContainingAll("HTTP header", "first", "wrong-name");
}
@Test
void containsHeaders() {
assertThat(Map.of("first", "1", "second", "2", "third", "3"))
.containsHeaders("first", "third");
}
@Test
void containsHeadersWithSeveralNamesNotPresent() {
Map<String, String> map = Map.of("first", "1", "second", "2", "third", "3");
assertThatExceptionOfType(AssertionError.class)
.isThrownBy(() -> assertThat(map).containsHeaders("first", "wrong-name", "another-wrong-name", "third"))
.withMessageContainingAll("HTTP headers", "first", "wrong-name", "another-wrong-name");
}
@Test
void containsOnlyHeaders() {
HttpHeaders headers = new HttpHeaders();
headers.add("name1", "value1");
headers.add("name2", "value2");
assertThat(headers).containsOnlyHeaders("name2", "name1");
}
@Test
void containsOnlyHeadersWithMissingOne() {
HttpHeaders headers = new HttpHeaders();
headers.add("name1", "value1");
headers.add("name2", "value2");
assertThatExceptionOfType(AssertionError.class)
.isThrownBy(() -> assertThat(headers).containsOnlyHeaders("name1", "name2", "name3"))
.withMessageContainingAll("check headers contain only HTTP headers",
"could not find the following element(s)", "[\"name3\"]");
}
@Test
void containsOnlyHeadersWithExtraOne() {
HttpHeaders headers = new HttpHeaders();
headers.add("name1", "value1");
headers.add("name2", "value2");
headers.add("name3", "value3");
assertThatExceptionOfType(AssertionError.class)
.isThrownBy(() -> assertThat(headers).containsOnlyHeaders("name1", "name2"))
.withMessageContainingAll("check headers contain only HTTP headers",
"the following element(s) were unexpected", "[\"name3\"]");
}
@Test
void doesNotContainHeader() {
assertThat(Map.of("first", "1")).doesNotContainHeader("second");
}
@Test
void doesNotContainHeaderWithNamePresent() {
Map<String, String> map = Map.of("first", "1");
assertThatExceptionOfType(AssertionError.class)
.isThrownBy(() -> assertThat(map).doesNotContainHeader("first"))
.withMessageContainingAll("HTTP header", "first");
}
@Test
void doesNotContainHeaders() {
assertThat(Map.of("first", "1", "third", "3"))
.doesNotContainHeaders("second", "fourth");
}
@Test
void doesNotContainHeadersWithSeveralNamesPresent() {
Map<String, String> map = Map.of("first", "1", "second", "2", "third", "3");
assertThatExceptionOfType(AssertionError.class)
.isThrownBy(() -> assertThat(map).doesNotContainHeaders("first", "another-wrong-name", "second"))
.withMessageContainingAll("HTTP headers", "first", "second");
}
@Test
@SuppressWarnings("unchecked")
void hasHeaderSatisfying() {
HttpHeaders headers = new HttpHeaders();
headers.addAll("header", List.of("first", "second", "third"));
Consumer<List<String>> mock = mock(Consumer.class);
assertThatNoException().isThrownBy(() -> assertThat(headers).hasHeaderSatisfying("header", mock));
verify(mock).accept(List.of("first", "second", "third"));
}
@Test
void hasHeaderSatisfyingWithExceptionInConsumer() {
HttpHeaders headers = new HttpHeaders();
headers.addAll("header", List.of("first", "second", "third"));
IllegalStateException testException = new IllegalStateException("test");
assertThatIllegalStateException()
.isThrownBy(() -> assertThat(headers).hasHeaderSatisfying("header", values -> {
throw testException;
})).isEqualTo(testException);
}
@Test
void hasHeaderSatisfyingWithFailingAssertion() {
HttpHeaders headers = new HttpHeaders();
headers.addAll("header", List.of("first", "second", "third"));
assertThatExceptionOfType(AssertionError.class)
.isThrownBy(() -> assertThat(headers).hasHeaderSatisfying("header", values ->
Assertions.assertThat(values).hasSize(42)))
.withMessageContainingAll("HTTP header", "header", "first", "second", "third", "42", "3");
}
@Test
void hasValueWithStringMatch() {
HttpHeaders headers = new HttpHeaders();
headers.addAll("header", List.of("a", "b", "c"));
assertThat(headers).hasValue("header", "a");
}
@Test
void hasValueWithStringMatchOnSecondaryValue() {
HttpHeaders headers = new HttpHeaders();
headers.addAll("header", List.of("first", "second", "third"));
assertThatExceptionOfType(AssertionError.class)
.isThrownBy(() -> assertThat(headers).hasValue("header", "second"))
.withMessageContainingAll("check primary value for HTTP header 'header'", "first", "second");
}
@Test
void hasValueWithNoStringMatch() {
HttpHeaders headers = new HttpHeaders();
headers.addAll("header", List.of("first", "second", "third"));
assertThatExceptionOfType(AssertionError.class)
.isThrownBy(() -> assertThat(headers).hasValue("wrong-name", "second"))
.withMessageContainingAll("HTTP header", "header", "wrong-name");
}
@Test
void hasValueWithNonPresentHeader() {
HttpHeaders headers = new HttpHeaders();
headers.add("test-header", "a");
assertThatExceptionOfType(AssertionError.class)
.isThrownBy(() -> assertThat(headers).hasValue("wrong-name", "a"))
.withMessageContainingAll("HTTP header", "test-header", "wrong-name");
}
@Test
void hasValueWithLongMatch() {
HttpHeaders headers = new HttpHeaders();
headers.addAll("header", List.of("123", "456", "789"));
assertThat(headers).hasValue("header", 123);
}
@Test
void hasValueWithLongMatchOnSecondaryValue() {
HttpHeaders headers = new HttpHeaders();
headers.addAll("header", List.of("123", "456", "789"));
assertThatExceptionOfType(AssertionError.class)
.isThrownBy(() -> assertThat(headers).hasValue("header", 456))
.withMessageContainingAll("check primary long value for HTTP header 'header'", "123", "456");
}
@Test
void hasValueWithNoLongMatch() {
HttpHeaders headers = new HttpHeaders();
headers.addAll("header", List.of("123", "456", "789"));
assertThatExceptionOfType(AssertionError.class)
.isThrownBy(() -> assertThat(headers).hasValue("wrong-name", 456))
.withMessageContainingAll("HTTP header", "header", "wrong-name");
}
@Test
void hasValueWithInstantMatch() {
Instant instant = Instant.now();
HttpHeaders headers = new HttpHeaders();
headers.setInstant("header", instant);
assertThat(headers).hasValue("header", instant);
}
@Test
void hasValueWithNoInstantMatch() {
Instant instant = Instant.now();
HttpHeaders headers = new HttpHeaders();
headers.setInstant("header", instant);
assertThatExceptionOfType(AssertionError.class)
.isThrownBy(() -> assertThat(headers).hasValue("wrong-name", instant.minusSeconds(30)))
.withMessageContainingAll("HTTP header", "header", "wrong-name");
}
@Test
void hasValueWithNoInstantMatchOneSecOfDifference() {
Instant instant = Instant.now();
HttpHeaders headers = new HttpHeaders();
headers.setInstant("header", instant);
assertThatExceptionOfType(AssertionError.class)
.isThrownBy(() -> assertThat(headers).hasValue("wrong-name", instant.minusSeconds(1)))
.withMessageContainingAll("HTTP header", "header", "wrong-name");
}
@Test
void hasSingleValueWithStringMatch() {
HttpHeaders headers = new HttpHeaders();
headers.add("header", "a");
assertThat(headers).hasSingleValue("header", "a");
}
@Test
void hasSingleValueWithSecondaryValues() {
HttpHeaders headers = new HttpHeaders();
headers.addAll("header", List.of("first", "second", "third"));
assertThatExceptionOfType(AssertionError.class)
.isThrownBy(() -> assertThat(headers).hasSingleValue("header", "first"))
.withMessage("Expected HTTP header 'header' to be present without secondary values, " +
"but found <2> secondary value(s)");
}
@Test
void hasSingleValueWithLongMatch() {
HttpHeaders headers = new HttpHeaders();
headers.add("header", "123");
assertThat(headers).hasSingleValue("header", 123);
}
@Test
void hasSingleValueWithLongMatchButSecondaryValues() {
HttpHeaders headers = new HttpHeaders();
headers.addAll("header", List.of("123", "456", "789"));
assertThatExceptionOfType(AssertionError.class)
.isThrownBy(() -> assertThat(headers).hasSingleValue("header", 123))
.withMessage("Expected HTTP header 'header' to be present without secondary values, " +
"but found <2> secondary value(s)");
}
@Test
void hasSingleValueWithInstantMatch() {
Instant instant = Instant.now();
HttpHeaders headers = new HttpHeaders();
headers.setInstant("header", instant);
assertThat(headers).hasSingleValue("header", instant);
}
@Test
void hasSingleValueWithInstantAndSecondaryValues() {
Instant instant = Instant.now();
HttpHeaders headers = new HttpHeaders();
headers.setInstant("header", instant);
headers.add("header", "second");
assertThatExceptionOfType(AssertionError.class)
.isThrownBy(() -> assertThat(headers).hasSingleValue("header", instant.minusSeconds(30)))
.withMessage("Expected HTTP header 'header' to be present without secondary values, " +
"but found <1> secondary value(s)");
}
@Test
void hasExactlyValues() {
HttpHeaders headers = new HttpHeaders();
headers.addAll("name", List.of("value1", "value2"));
headers.add("otherName", "otherValue");
assertThat(headers).hasExactlyValues("name", List.of("value1", "value2"));
}
@Test
void hasExactlyValuesWithMissingOne() {
HttpHeaders headers = new HttpHeaders();
headers.addAll("name", List.of("value1", "value2"));
headers.add("otherName", "otherValue");
assertThatExceptionOfType(AssertionError.class)
.isThrownBy(() -> assertThat(headers).hasExactlyValues("name", List.of("value1", "value2", "value3")))
.withMessageContainingAll("check all values of HTTP header 'name'",
"to contain exactly (and in same order)",
"could not find the following elements", "[\"value3\"]");
}
@Test
void hasExactlyValuesWithExtraOne() {
HttpHeaders headers = new HttpHeaders();
headers.addAll("name", List.of("value1", "value2"));
headers.add("otherName", "otherValue");
assertThatExceptionOfType(AssertionError.class)
.isThrownBy(() -> assertThat(headers).hasExactlyValues("name", List.of("value1")))
.withMessageContainingAll("check all values of HTTP header 'name'",
"to contain exactly (and in same order)",
"some elements were not expected", "[\"value2\"]");
}
@Test
void hasExactlyValuesWithWrongOrder() {
HttpHeaders headers = new HttpHeaders();
headers.addAll("name", List.of("value1", "value2"));
headers.add("otherName", "otherValue");
assertThatExceptionOfType(AssertionError.class)
.isThrownBy(() -> assertThat(headers).hasExactlyValues("name", List.of("value2", "value1")))
.withMessageContainingAll("check all values of HTTP header 'name'",
"to contain exactly (and in same order)",
"there were differences at these indexes",
"element at index 0: expected \"value2\" but was \"value1\"",
"element at index 1: expected \"value1\" but was \"value2\"");
}
@Test
void hasExactlyValuesInAnyOrder() {
HttpHeaders headers = new HttpHeaders();
headers.addAll("name", List.of("value1", "value2"));
headers.add("otherName", "otherValue");
assertThat(headers).hasExactlyValuesInAnyOrder("name", List.of("value1", "value2"));
}
@Test
void hasExactlyValuesInAnyOrderWithDifferentOrder() {
HttpHeaders headers = new HttpHeaders();
headers.addAll("name", List.of("value1", "value2"));
headers.add("otherName", "otherValue");
assertThat(headers).hasExactlyValuesInAnyOrder("name", List.of("value2", "value1"));
}
@Test
void hasExactlyValuesInAnyOrderWithMissingOne() {
HttpHeaders headers = new HttpHeaders();
headers.addAll("name", List.of("value1", "value2"));
headers.add("otherName", "otherValue");
assertThatExceptionOfType(AssertionError.class)
.isThrownBy(() -> assertThat(headers).hasExactlyValuesInAnyOrder("name",
List.of("value1", "value2", "value3")))
.withMessageContainingAll("check all values of HTTP header 'name'",
"to contain exactly in any order",
"could not find the following elements", "[\"value3\"]");
}
@Test
void hasExactlyValuesInAnyOrderWithExtraOne() {
HttpHeaders headers = new HttpHeaders();
headers.addAll("name", List.of("value1", "value2"));
headers.add("otherName", "otherValue");
assertThatExceptionOfType(AssertionError.class)
.isThrownBy(() -> assertThat(headers).hasExactlyValuesInAnyOrder("name", List.of("value1")))
.withMessageContainingAll("check all values of HTTP header 'name'",
"to contain exactly in any order",
"the following elements were unexpected", "[\"value2\"]");
}
@Test
void isEmpty() {
assertThat(new HttpHeaders()).isEmpty();
}
@Test
void isEmptyWithHeaders() {
assertThatExceptionOfType(AssertionError.class)
.isThrownBy(() -> assertThat(Map.of("first", "1", "second", "2")).isEmpty())
.withMessageContainingAll("check headers are empty", "Expecting empty", "first", "second");
}
@Test
void isNotEmpty() {
assertThat(Map.of("header", "value")).isNotEmpty();
}
@Test
void isNotEmptyWithNoHeaders() {
assertThatExceptionOfType(AssertionError.class)
.isThrownBy(() -> assertThat(new HttpHeaders()).isNotEmpty())
.withMessageContainingAll("check headers are not empty", "Expecting actual not to be empty");
}
@Test
void hasSize() {
assertThat(Map.of("first", "1", "second", "2")).hasSize(2);
}
@Test
void hasSizeWithWrongSize() {
assertThatExceptionOfType(AssertionError.class)
.isThrownBy(() -> assertThat(Map.of("first", "1")).hasSize(42))
.withMessageContainingAll("check headers have size '42'", "1");
}
@Test
void hasSameSizeAs() {
HttpHeaders headers = new HttpHeaders();
headers.add("name1", "value1");
headers.add("name2", "value2");
HttpHeaders other = new HttpHeaders();
other.add("name3", "value3");
other.add("name4", "value4");
assertThat(headers).hasSameSizeAs(other);
}
@Test
void hasSameSizeAsWithSmallerOther() {
HttpHeaders headers = new HttpHeaders();
headers.add("name1", "value1");
headers.add("name2", "value2");
HttpHeaders other = new HttpHeaders();
other.add("name3", "value3");
assertThatExceptionOfType(AssertionError.class)
.isThrownBy(() -> assertThat(headers).hasSameSizeAs(other))
.withMessageContainingAll("check headers have same size as '", other.toString(),
"Expected size: 1 but was: 2");
}
private static HttpHeadersAssert assertThat(Map<String, String> values) {
MultiValueMap<String, String> map = new LinkedMultiValueMap<>();
values.forEach(map::add);
return assertThat(new HttpHeaders(map));
}
private static HttpHeadersAssert assertThat(HttpHeaders values) {
return new HttpHeadersAssert(values);
}
}
| HttpHeadersAssertTests |
java | spring-projects__spring-boot | core/spring-boot-test/src/test/java/org/springframework/boot/test/context/AnnotationsPropertySourceTests.java | {
"start": 14032,
"end": 14126
} | class ____ {
}
@TypeLevelAnnotation("outer")
static | PropertyMappedWithDeeplyNestedAnnotations |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/LossySumDoubleAggregatorFunctionSupplier.java | {
"start": 653,
"end": 1646
} | class ____ implements AggregatorFunctionSupplier {
public LossySumDoubleAggregatorFunctionSupplier() {
}
@Override
public List<IntermediateStateDesc> nonGroupingIntermediateStateDesc() {
return LossySumDoubleAggregatorFunction.intermediateStateDesc();
}
@Override
public List<IntermediateStateDesc> groupingIntermediateStateDesc() {
return LossySumDoubleGroupingAggregatorFunction.intermediateStateDesc();
}
@Override
public LossySumDoubleAggregatorFunction aggregator(DriverContext driverContext,
List<Integer> channels) {
return LossySumDoubleAggregatorFunction.create(driverContext, channels);
}
@Override
public LossySumDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext,
List<Integer> channels) {
return LossySumDoubleGroupingAggregatorFunction.create(channels, driverContext);
}
@Override
public String describe() {
return "lossy_sum of doubles";
}
}
| LossySumDoubleAggregatorFunctionSupplier |
java | apache__maven | compat/maven-model-builder/src/main/java/org/apache/maven/model/interpolation/reflection/ClassMap.java | {
"start": 12045,
"end": 12292
} | class ____ public, then try to get it
if ((clazz.getModifiers() & Modifier.PUBLIC) != 0) {
try {
return clazz.getMethod(name, paramTypes);
} catch (NoSuchMethodException e) {
// If the | is |
java | apache__kafka | connect/runtime/src/main/java/org/apache/kafka/connect/storage/KafkaConfigBackingStore.java | {
"start": 10834,
"end": 11719
} | class ____ not only the current set of configs, but also which connectors have inconsistent data.
* This allows users of this class (i.e., {@link Herder} implementations) to take action to resolve any inconsistencies. These
* inconsistencies should be rare (as described above, due to compaction combined with leader failures in the middle
* of updating task configurations).
* </p>
* <p>
* Note that the expectation is that this config storage system has only a single writer at a time.
* The caller (Herder) must ensure this is the case. In distributed mode this will require forwarding config change
* requests to the leader in the cluster (i.e. the worker group coordinated by the Kafka broker).
* </p>
* <p>
* Since processing of the config log occurs in a background thread, callers must take care when using accessors.
* To simplify handling this correctly, this | exposes |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/util/DateUtil_parse_date_Test.java | {
"start": 1054,
"end": 1572
} | class ____ {
@Test
void should_parse_string_with_date_time_format() {
Date date = parse("1994-08-26");
assertThat(formatAsDatetime(date)).isEqualTo("1994-08-26T00:00:00");
}
@Test
void should_return_null_if_string_to_parse_is_null() {
assertThat(parse(null)).isNull();
}
@Test
void should_fail_if_string_does_not_respect_date_format() {
assertThatExceptionOfType(RuntimeException.class).isThrownBy(() -> assertThat(parse("invalid date format")).isNull());
}
}
| DateUtil_parse_date_Test |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/v2/LogParams.java | {
"start": 856,
"end": 1811
} | class ____ {
private String containerId;
private String applicationId;
private String nodeId;
private String owner;
public LogParams(String containerIdStr, String applicationIdStr,
String nodeIdStr, String owner) {
this.containerId = containerIdStr;
this.applicationId = applicationIdStr;
this.nodeId = nodeIdStr;
this.owner = owner;
}
public String getContainerId() {
return containerId;
}
public void setContainerId(String containerId) {
this.containerId = containerId;
}
public String getApplicationId() {
return applicationId;
}
public void setApplicationId(String applicationId) {
this.applicationId = applicationId;
}
public String getNodeId() {
return nodeId;
}
public void setNodeId(String nodeId) {
this.nodeId = nodeId;
}
public String getOwner() {
return this.owner;
}
public String setOwner(String owner) {
return this.owner;
}
}
| LogParams |
java | quarkusio__quarkus | integration-tests/gradle/src/main/resources/annotation-processor-multi-module/common/src/main/java/org/acme/common/CarMapper.java | {
"start": 212,
"end": 396
} | interface ____ {
CarMapper INSTANCE = Mappers.getMapper( CarMapper.class );
@Mapping(source = "numberOfSeats", target = "seatNumber")
CarDTO carToCarDTO(Car car);
} | CarMapper |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/state/InternalPriorityQueueTestBase.java | {
"start": 15150,
"end": 17542
} | class ____ extends TypeSerializer<TestElement> {
private static final int REVISION = 1;
public static final TestElementSerializer INSTANCE = new TestElementSerializer();
protected TestElementSerializer() {}
@Override
public boolean isImmutableType() {
return true;
}
@Override
public TypeSerializer<TestElement> duplicate() {
return this;
}
@Override
public TestElement createInstance() {
throw new UnsupportedOperationException();
}
@Override
public TestElement copy(TestElement from) {
return new TestElement(from.key, from.priority);
}
@Override
public TestElement copy(TestElement from, TestElement reuse) {
return copy(from);
}
@Override
public int getLength() {
return 2 * Long.BYTES;
}
@Override
public void serialize(TestElement record, DataOutputView target) throws IOException {
// serialize priority first, so that we have correct order in RocksDB. We flip the sign
// bit for correct
// lexicographic order.
target.writeLong(MathUtils.flipSignBit(record.getPriority()));
target.writeLong(record.getKey());
}
@Override
public TestElement deserialize(DataInputView source) throws IOException {
long prio = MathUtils.flipSignBit(source.readLong());
long key = source.readLong();
return new TestElement(key, prio);
}
@Override
public TestElement deserialize(TestElement reuse, DataInputView source) throws IOException {
return deserialize(source);
}
@Override
public void copy(DataInputView source, DataOutputView target) throws IOException {
serialize(deserialize(source), target);
}
@Override
public boolean equals(Object obj) {
return false;
}
@Override
public int hashCode() {
return 4711;
}
protected int getRevision() {
return REVISION;
}
@Override
public Snapshot snapshotConfiguration() {
return new Snapshot(getRevision());
}
public static | TestElementSerializer |
java | apache__flink | flink-tests/src/test/java/org/apache/flink/test/streaming/runtime/MultipleInputITCase.java | {
"start": 9395,
"end": 9912
} | class ____<T> extends AbstractInput<T, Long> {
public SumInput(AbstractStreamOperatorV2<Long> owner, int inputId) {
super(owner, inputId);
}
@Override
public void processElement(StreamRecord<T> element) throws Exception {
sum += Long.valueOf(element.getValue().toString());
output.collect(new StreamRecord<>(sum));
}
}
}
/** Factory for {@link SumAllInputOperator}. */
public static | SumInput |
java | apache__camel | test-infra/camel-test-infra-zookeeper/src/main/java/org/apache/camel/test/infra/zookeeper/services/ZooKeeperInfraService.java | {
"start": 986,
"end": 1086
} | interface ____ extends InfrastructureService {
String getConnectionString();
}
| ZooKeeperInfraService |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/scheduling/annotation/ScheduledAnnotationBeanPostProcessorTests.java | {
"start": 36860,
"end": 37016
} | class ____ {
@Scheduled(fixedRate = 3, initialDelay = 5, timeUnit = TimeUnit.SECONDS)
void fixedRate() {
}
}
static | FixedRateWithInitialDelayInSeconds |
java | quarkusio__quarkus | independent-projects/tools/registry-client/src/main/java/io/quarkus/registry/client/spi/RegistryClientEnvironment.java | {
"start": 171,
"end": 276
} | interface ____ {
MessageWriter log();
MavenArtifactResolver resolver();
}
| RegistryClientEnvironment |
java | elastic__elasticsearch | x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/nulls/IsNull.java | {
"start": 1018,
"end": 2146
} | class ____ extends UnaryScalarFunction implements Negatable<UnaryScalarFunction> {
public IsNull(Source source, Expression field) {
super(source, field);
}
@Override
protected NodeInfo<IsNull> info() {
return NodeInfo.create(this, IsNull::new, field());
}
@Override
protected IsNull replaceChild(Expression newChild) {
return new IsNull(source(), newChild);
}
@Override
public Object fold() {
return field().fold() == null || DataTypes.isNull(field().dataType());
}
@Override
protected Processor makeProcessor() {
return new CheckNullProcessor(CheckNullOperation.IS_NULL);
}
@Override
public String processScript(String script) {
return Scripts.formatTemplate(Scripts.QL_SCRIPTS + ".isNull(" + script + ")");
}
@Override
public Nullability nullable() {
return Nullability.FALSE;
}
@Override
public DataType dataType() {
return DataTypes.BOOLEAN;
}
@Override
public UnaryScalarFunction negate() {
return new IsNotNull(source(), field());
}
}
| IsNull |
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/common/operators/util/UserCodeWrapper.java | {
"start": 2585,
"end": 2903
} | class ____ the user code object.
*/
Class<? extends T> getUserCodeClass();
/**
* Checks whether the wrapper already has an object, or whether it needs to instantiate it.
*
* @return True, if the wrapper has already an object, false if it has only a class.
*/
boolean hasObject();
}
| of |
java | apache__hadoop | hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptions.java | {
"start": 1519,
"end": 1735
} | class ____ all DistCp options.
*
* When you add a new option, please:
* - Add the field along with javadoc in DistCpOptions and its Builder
* - Add setter method in the {@link Builder} class
*
* This | encapsulates |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlus.java | {
"start": 8967,
"end": 9637
} | class ____ implements AbstractHyperLogLog.RunLenIterator {
private final HyperLogLog hll;
int pos;
final long start;
private byte value;
HyperLogLogIterator(HyperLogLog hll, long bucket) {
this.hll = hll;
start = bucket << hll.p;
}
@Override
public boolean next() {
if (pos < hll.m) {
value = hll.runLens.get(start + pos);
pos++;
return true;
}
return false;
}
@Override
public byte value() {
return value;
}
}
private static | HyperLogLogIterator |
java | spring-projects__spring-boot | module/spring-boot-sendgrid/src/main/java/org/springframework/boot/sendgrid/autoconfigure/SendGridProperties.java | {
"start": 1466,
"end": 1908
} | class ____ {
/**
* SendGrid proxy host.
*/
private @Nullable String host;
/**
* SendGrid proxy port.
*/
private @Nullable Integer port;
public @Nullable String getHost() {
return this.host;
}
public void setHost(@Nullable String host) {
this.host = host;
}
public @Nullable Integer getPort() {
return this.port;
}
public void setPort(@Nullable Integer port) {
this.port = port;
}
}
}
| Proxy |
java | apache__camel | components/camel-test/camel-test-main-junit5/src/test/java/org/apache/camel/test/main/junit5/legacy/ReplaceBeanWithMockTest.java | {
"start": 1600,
"end": 1819
} | class ____ extends CamelMainTestSupport {
@Mock
Greetings greetings;
@Override
protected void configure(MainConfigurationProperties configuration) {
// Add the configuration | ReplaceBeanWithMockTest |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/index/query/SpanNotQueryBuilderTests.java | {
"start": 1176,
"end": 12121
} | class ____ extends AbstractQueryTestCase<SpanNotQueryBuilder> {
@Override
protected SpanNotQueryBuilder doCreateTestQueryBuilder() {
SpanTermQueryBuilder[] spanTermQueries = new SpanTermQueryBuilderTests().createSpanTermQueryBuilders(2);
SpanNotQueryBuilder queryBuilder = new SpanNotQueryBuilder(spanTermQueries[0], spanTermQueries[1]);
if (randomBoolean()) {
// also test negative values, they should implicitly be changed to 0
queryBuilder.dist(randomIntBetween(-2, 10));
} else {
if (randomBoolean()) {
queryBuilder.pre(randomIntBetween(-2, 10));
}
if (randomBoolean()) {
queryBuilder.post(randomIntBetween(-2, 10));
}
}
return queryBuilder;
}
@Override
protected SpanNotQueryBuilder createQueryWithInnerQuery(QueryBuilder queryBuilder) {
if (queryBuilder instanceof SpanNotQueryBuilder) {
return new SpanNotQueryBuilder((SpanNotQueryBuilder) queryBuilder, (SpanNotQueryBuilder) queryBuilder);
}
return new SpanNotQueryBuilder(new SpanTermQueryBuilder("field", "value"), new SpanTermQueryBuilder("field", "value"));
}
@Override
protected void doAssertLuceneQuery(SpanNotQueryBuilder queryBuilder, Query query, SearchExecutionContext context) throws IOException {
assertThat(query, instanceOf(SpanNotQuery.class));
SpanNotQuery spanNotQuery = (SpanNotQuery) query;
assertThat(spanNotQuery.getExclude(), equalTo(queryBuilder.excludeQuery().toQuery(context)));
assertThat(spanNotQuery.getInclude(), equalTo(queryBuilder.includeQuery().toQuery(context)));
}
public void testIllegalArgument() {
SpanTermQueryBuilder spanTermQuery = new SpanTermQueryBuilder("field", "value");
expectThrows(IllegalArgumentException.class, () -> new SpanNotQueryBuilder(null, spanTermQuery));
expectThrows(IllegalArgumentException.class, () -> new SpanNotQueryBuilder(spanTermQuery, null));
}
public void testDist() {
SpanNotQueryBuilder builder = new SpanNotQueryBuilder(
new SpanTermQueryBuilder("name1", "value1"),
new SpanTermQueryBuilder("name2", "value2")
);
assertThat(builder.pre(), equalTo(0));
assertThat(builder.post(), equalTo(0));
builder.dist(-4);
assertThat(builder.pre(), equalTo(0));
assertThat(builder.post(), equalTo(0));
builder.dist(4);
assertThat(builder.pre(), equalTo(4));
assertThat(builder.post(), equalTo(4));
}
public void testPrePost() {
SpanNotQueryBuilder builder = new SpanNotQueryBuilder(
new SpanTermQueryBuilder("name1", "value1"),
new SpanTermQueryBuilder("name2", "value2")
);
assertThat(builder.pre(), equalTo(0));
assertThat(builder.post(), equalTo(0));
builder.pre(-4).post(-4);
assertThat(builder.pre(), equalTo(0));
assertThat(builder.post(), equalTo(0));
builder.pre(1).post(2);
assertThat(builder.pre(), equalTo(1));
assertThat(builder.post(), equalTo(2));
}
/**
* test correct parsing of `dist` parameter, this should create builder with pre/post set to same value
*/
public void testParseDist() throws IOException {
XContentBuilder builder = XContentFactory.jsonBuilder();
builder.startObject();
builder.startObject(SpanNotQueryBuilder.NAME);
builder.field("exclude");
spanTermQuery("description", "jumped").toXContent(builder, null);
builder.field("include");
spanNearQuery(QueryBuilders.spanTermQuery("description", "quick"), 1).addClause(QueryBuilders.spanTermQuery("description", "fox"))
.toXContent(builder, null);
builder.field("dist", 3);
builder.endObject();
builder.endObject();
SpanNotQueryBuilder query = (SpanNotQueryBuilder) parseQuery(Strings.toString(builder));
assertThat(query.pre(), equalTo(3));
assertThat(query.post(), equalTo(3));
assertNotNull(query.includeQuery());
assertNotNull(query.excludeQuery());
}
/**
* test exceptions for three types of broken json, missing include / exclude and both dist and pre/post specified
*/
public void testParserExceptions() throws IOException {
{
XContentBuilder builder = XContentFactory.jsonBuilder();
builder.startObject();
builder.startObject(SpanNotQueryBuilder.NAME);
builder.field("exclude");
spanTermQuery("description", "jumped").toXContent(builder, null);
builder.field("dist", 2);
builder.endObject();
builder.endObject();
ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(Strings.toString(builder)));
assertThat(e.getDetailedMessage(), containsString("span_not must have [include]"));
}
{
XContentBuilder builder = XContentFactory.jsonBuilder();
builder.startObject();
builder.startObject(SpanNotQueryBuilder.NAME);
builder.field("include");
spanNearQuery(QueryBuilders.spanTermQuery("description", "quick"), 1).addClause(
QueryBuilders.spanTermQuery("description", "fox")
).toXContent(builder, null);
builder.field("dist", 2);
builder.endObject();
builder.endObject();
ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(Strings.toString(builder)));
assertThat(e.getDetailedMessage(), containsString("span_not must have [exclude]"));
}
{
XContentBuilder builder = XContentFactory.jsonBuilder();
builder.startObject();
builder.startObject(SpanNotQueryBuilder.NAME);
builder.field("include");
spanNearQuery(QueryBuilders.spanTermQuery("description", "quick"), 1).addClause(
QueryBuilders.spanTermQuery("description", "fox")
).toXContent(builder, null);
builder.field("exclude");
spanTermQuery("description", "jumped").toXContent(builder, null);
builder.field("dist", 2);
builder.field("pre", 2);
builder.endObject();
builder.endObject();
ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(Strings.toString(builder)));
assertThat(e.getDetailedMessage(), containsString("span_not can either use [dist] or [pre] & [post] (or none)"));
}
}
public void testFromJson() throws IOException {
String json = """
{
"span_not" : {
"include" : {
"span_term" : {
"field1" : {
"value" : "hoya"
}
}
},
"exclude" : {
"span_near" : {
"clauses" : [ {
"span_term" : {
"field1" : {
"value" : "la"
}
}
}, {
"span_term" : {
"field1" : {
"value" : "hoya"
}
}
} ],
"slop" : 0,
"in_order" : true
}
},
"boost" : 2.0
}
}""";
SpanNotQueryBuilder parsed = (SpanNotQueryBuilder) parseQuery(json);
checkGeneratedJson(json, parsed);
assertEquals(json, "hoya", ((SpanTermQueryBuilder) parsed.includeQuery()).value());
assertEquals(json, 2, ((SpanNearQueryBuilder) parsed.excludeQuery()).clauses().size());
assertEquals(json, 2.0, parsed.boost(), 0.0);
}
public void testFromJsonWithNonDefaultBoostInIncludeQuery() {
String json = """
{
"span_not" : {
"exclude" : {
"span_term" : {
"field1" : {
"value" : "hoya",
"boost" : 1.0
}
}
},
"include" : {
"span_near" : {
"clauses" : [ {
"span_term" : {
"field1" : {
"value" : "la",
"boost" : 1.0
}
}
}, {
"span_term" : {
"field1" : {
"value" : "hoya",
"boost" : 1.0
}
}
} ],
"slop" : 0,
"in_order" : true,
"boost" : 2.0
}
},
"pre" : 0,
"post" : 0,
"boost" : 1.0
}
}""";
Exception exception = expectThrows(ParsingException.class, () -> parseQuery(json));
assertThat(exception.getMessage(), equalTo("span_not [include] as a nested span clause can't have non-default boost value [2.0]"));
}
public void testFromJsonWithNonDefaultBoostInExcludeQuery() {
String json = """
{
"span_not" : {
"include" : {
"span_term" : {
"field1" : {
"value" : "hoya",
"boost" : 1.0
}
}
},
"exclude" : {
"span_near" : {
"clauses" : [ {
"span_term" : {
"field1" : {
"value" : "la",
"boost" : 1.0
}
}
}, {
"span_term" : {
"field1" : {
"value" : "hoya",
"boost" : 1.0
}
}
} ],
"slop" : 0,
"in_order" : true,
"boost" : 2.0
}
},
"pre" : 0,
"post" : 0,
"boost" : 1.0
}
}""";
Exception exception = expectThrows(ParsingException.class, () -> parseQuery(json));
assertThat(exception.getMessage(), equalTo("span_not [exclude] as a nested span clause can't have non-default boost value [2.0]"));
}
}
| SpanNotQueryBuilderTests |
java | apache__camel | components/camel-aws/camel-aws-xray/src/main/java/org/apache/camel/component/aws/xray/decorators/LogSegmentDecorator.java | {
"start": 867,
"end": 1085
} | class ____ extends AbstractSegmentDecorator {
@Override
public String getComponent() {
return "log";
}
@Override
public boolean newSegment() {
return false;
}
}
| LogSegmentDecorator |
java | apache__kafka | streams/src/test/java/org/apache/kafka/streams/processor/FailOnInvalidTimestampTest.java | {
"start": 1064,
"end": 1540
} | class ____ extends TimestampExtractorTest {
@Test
public void extractMetadataTimestamp() {
testExtractMetadataTimestamp(new FailOnInvalidTimestamp());
}
@Test
public void failOnInvalidTimestamp() {
final TimestampExtractor extractor = new FailOnInvalidTimestamp();
assertThrows(StreamsException.class, () -> extractor.extract(new ConsumerRecord<>("anyTopic",
0, 0, null, null), 42));
}
}
| FailOnInvalidTimestampTest |
java | spring-projects__spring-security | oauth2/oauth2-core/src/main/java/org/springframework/security/oauth2/core/authorization/OAuth2ReactiveAuthorizationManagers.java | {
"start": 1051,
"end": 1225
} | class ____ creating OAuth 2.0-specific {@link AuthorizationManager}s.
*
* @author Josh Cummings
* @since 6.2
* @see AuthorityReactiveAuthorizationManager
*/
public final | for |
java | google__auto | value/src/test/java/com/google/auto/value/processor/AutoValueCompilationTest.java | {
"start": 99770,
"end": 100771
} | interface ____<T, U> {",
" Integer blimBuilder();",
" Baz<T, U> build();",
" }",
"}");
Compilation compilation =
javac()
.withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor())
.compile(javaFileObject);
assertThat(compilation)
.hadErrorContaining(
"Method looks like a property builder, but the type of property blim is not a class "
+ "or interface")
.inFile(javaFileObject)
.onLineContaining("Integer blimBuilder()");
}
@Test
public void autoValueBuilderPropertyBuilderHasNoBuild() {
JavaFileObject javaFileObject =
JavaFileObjects.forSourceLines(
"foo.bar.Baz",
"package foo.bar;",
"",
"import com.google.auto.value.AutoValue;",
"import com.google.common.collect.ImmutableSet;",
"",
"@AutoValue",
"public abstract | Builder |
java | apache__flink | flink-formats/flink-parquet/src/main/java/org/apache/flink/formats/parquet/vector/ParquetColumnarRowSplitReader.java | {
"start": 3032,
"end": 14989
} | class ____ implements Closeable {
private final boolean utcTimestamp;
private final MessageType fileSchema;
private final MessageType requestedSchema;
/**
* The total number of rows this RecordReader will eventually read. The sum of the rows of all
* the row groups.
*/
private final long totalRowCount;
private final WritableColumnVector[] writableVectors;
private final VectorizedColumnBatch columnarBatch;
private final ColumnarRowData row;
private final LogicalType[] selectedTypes;
private final int batchSize;
private final List<ParquetField> fieldList;
private ParquetFileReader reader;
/**
* For each request column, the reader to read this column. This is NULL if this column is
* missing from the file, in which case we populate the attribute with NULL.
*/
private ColumnReader[] columnReaders;
/** The number of rows that have been returned. */
private long rowsReturned;
/** The number of rows that have been reading, including the current in flight row group. */
private long totalCountLoadedSoFar;
// the index of the next row to return
private int nextRow;
// the number of rows in the current batch
private int rowsInBatch;
public ParquetColumnarRowSplitReader(
boolean utcTimestamp,
boolean caseSensitive,
Configuration conf,
LogicalType[] selectedTypes,
String[] selectedFieldNames,
ColumnBatchGenerator generator,
int batchSize,
Path path,
long splitStart,
long splitLength)
throws IOException {
this.utcTimestamp = utcTimestamp;
this.selectedTypes = selectedTypes;
this.batchSize = batchSize;
// then we need to apply the predicate push down filter
ParquetMetadata footer =
readFooter(conf, path, range(splitStart, splitStart + splitLength));
MessageType fileSchema = footer.getFileMetaData().getSchema();
FilterCompat.Filter filter = getFilter(conf);
List<BlockMetaData> blocks = filterRowGroups(filter, footer.getBlocks(), fileSchema);
this.fileSchema = footer.getFileMetaData().getSchema();
this.requestedSchema = clipParquetSchema(fileSchema, selectedFieldNames, caseSensitive);
this.reader =
new ParquetFileReader(
conf, footer.getFileMetaData(), path, blocks, requestedSchema.getColumns());
long totalRowCount = 0;
for (BlockMetaData block : blocks) {
totalRowCount += block.getRowCount();
}
this.totalRowCount = totalRowCount;
this.nextRow = 0;
this.rowsInBatch = 0;
this.rowsReturned = 0;
checkSchema();
this.writableVectors = createWritableVectors();
this.columnarBatch = generator.generate(createReadableVectors());
this.row = new ColumnarRowData(columnarBatch);
MessageColumnIO columnIO = new ColumnIOFactory().getColumnIO(requestedSchema);
RowType selectedType = RowType.of(selectedTypes, selectedFieldNames);
this.fieldList =
buildFieldsList(selectedType.getFields(), selectedType.getFieldNames(), columnIO);
}
/** Clips `parquetSchema` according to `fieldNames`. */
private static MessageType clipParquetSchema(
GroupType parquetSchema, String[] fieldNames, boolean caseSensitive) {
Type[] types = new Type[fieldNames.length];
if (caseSensitive) {
for (int i = 0; i < fieldNames.length; ++i) {
String fieldName = fieldNames[i];
if (parquetSchema.getFieldIndex(fieldName) < 0) {
throw new IllegalArgumentException(fieldName + " does not exist");
}
types[i] = parquetSchema.getType(fieldName);
}
} else {
Map<String, Type> caseInsensitiveFieldMap = new HashMap<>();
for (Type type : parquetSchema.getFields()) {
caseInsensitiveFieldMap.compute(
type.getName().toLowerCase(Locale.ROOT),
(key, previousType) -> {
if (previousType != null) {
throw new FlinkRuntimeException(
"Parquet with case insensitive mode should have no duplicate key: "
+ key);
}
return type;
});
}
for (int i = 0; i < fieldNames.length; ++i) {
Type type = caseInsensitiveFieldMap.get(fieldNames[i].toLowerCase(Locale.ROOT));
if (type == null) {
throw new IllegalArgumentException(fieldNames[i] + " does not exist");
}
// TODO clip for array,map,row types.
types[i] = type;
}
}
return Types.buildMessage().addFields(types).named("flink-parquet");
}
private WritableColumnVector[] createWritableVectors() {
WritableColumnVector[] columns = new WritableColumnVector[selectedTypes.length];
List<Type> types = requestedSchema.getFields();
for (int i = 0; i < selectedTypes.length; i++) {
columns[i] =
createWritableColumnVector(
batchSize,
selectedTypes[i],
types.get(i),
requestedSchema.getColumns(),
0);
}
return columns;
}
/**
* Create readable vectors from writable vectors. Especially for decimal, see {@link
* ParquetDecimalVector}.
*/
private ColumnVector[] createReadableVectors() {
ColumnVector[] vectors = new ColumnVector[writableVectors.length];
for (int i = 0; i < writableVectors.length; i++) {
vectors[i] =
selectedTypes[i].getTypeRoot() == LogicalTypeRoot.DECIMAL
? new ParquetDecimalVector(writableVectors[i])
: writableVectors[i];
}
return vectors;
}
private void checkSchema() throws IOException, UnsupportedOperationException {
if (selectedTypes.length != requestedSchema.getFieldCount()) {
throw new RuntimeException(
"The quality of field type is incompatible with the request schema!");
}
/*
* Check that the requested schema is supported.
*/
for (int i = 0; i < requestedSchema.getFieldCount(); ++i) {
String[] colPath = requestedSchema.getPaths().get(i);
if (fileSchema.containsPath(colPath)) {
ColumnDescriptor fd = fileSchema.getColumnDescription(colPath);
if (!fd.equals(requestedSchema.getColumns().get(i))) {
throw new UnsupportedOperationException("Schema evolution not supported.");
}
} else {
if (requestedSchema.getColumns().get(i).getMaxDefinitionLevel() == 0) {
// Column is missing in data but the required data is non-nullable. This file is
// invalid.
throw new IOException(
"Required column is missing in data file. Col: "
+ Arrays.toString(colPath));
}
}
}
}
/**
* Method used to check if the end of the input is reached.
*
* @return True if the end is reached, otherwise false.
* @throws IOException Thrown, if an I/O error occurred.
*/
public boolean reachedEnd() throws IOException {
return !ensureBatch();
}
public ColumnarRowData nextRecord() {
// return the next row
row.setRowId(this.nextRow++);
return row;
}
/**
* Checks if there is at least one row left in the batch to return. If no more row are
* available, it reads another batch of rows.
*
* @return Returns true if there is one more row to return, false otherwise.
* @throws IOException throw if an exception happens while reading a batch.
*/
private boolean ensureBatch() throws IOException {
if (nextRow >= rowsInBatch) {
// Try to read the next batch if rows from the file.
if (nextBatch()) {
// No more rows available in the Rows array.
nextRow = 0;
return true;
}
return false;
}
// there is at least one Row left in the Rows array.
return true;
}
/** Advances to the next batch of rows. Returns false if there are no more. */
private boolean nextBatch() throws IOException {
for (WritableColumnVector v : writableVectors) {
v.reset();
}
columnarBatch.setNumRows(0);
if (rowsReturned >= totalRowCount) {
return false;
}
if (rowsReturned == totalCountLoadedSoFar) {
readNextRowGroup();
}
int num = (int) Math.min(batchSize, totalCountLoadedSoFar - rowsReturned);
for (int i = 0; i < columnReaders.length; ++i) {
//noinspection unchecked
columnReaders[i].readToVector(num, writableVectors[i]);
}
rowsReturned += num;
columnarBatch.setNumRows(num);
rowsInBatch = num;
return true;
}
private void readNextRowGroup() throws IOException {
PageReadStore pages = reader.readNextRowGroup();
if (pages == null) {
throw new IOException(
"expecting more rows but reached last block. Read "
+ rowsReturned
+ " out of "
+ totalRowCount);
}
List<Type> types = requestedSchema.getFields();
columnReaders = new ColumnReader[types.size()];
for (int i = 0; i < types.size(); ++i) {
columnReaders[i] =
createColumnReader(
utcTimestamp,
selectedTypes[i],
types.get(i),
requestedSchema.getColumns(),
pages,
fieldList.get(i),
0);
}
totalCountLoadedSoFar += pages.getRowCount();
}
/** Seek to a particular row number. */
public void seekToRow(long rowCount) throws IOException {
if (totalCountLoadedSoFar != 0) {
throw new UnsupportedOperationException("Only support seek at first.");
}
List<BlockMetaData> blockMetaData = reader.getRowGroups();
for (BlockMetaData metaData : blockMetaData) {
if (metaData.getRowCount() > rowCount) {
break;
} else {
reader.skipNextRowGroup();
rowsReturned += metaData.getRowCount();
totalCountLoadedSoFar += metaData.getRowCount();
rowsInBatch = (int) metaData.getRowCount();
nextRow = (int) metaData.getRowCount();
rowCount -= metaData.getRowCount();
}
}
for (int i = 0; i < rowCount; i++) {
boolean end = reachedEnd();
if (end) {
throw new RuntimeException("Seek to many rows.");
}
nextRecord();
}
}
@Override
public void close() throws IOException {
if (reader != null) {
reader.close();
reader = null;
}
}
/** Interface to gen {@link VectorizedColumnBatch}. */
public | ParquetColumnarRowSplitReader |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromLongEvaluator.java | {
"start": 1053,
"end": 3817
} | class ____ extends AbstractConvertFunction.AbstractEvaluator {
private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToDoubleFromLongEvaluator.class);
private final EvalOperator.ExpressionEvaluator l;
public ToDoubleFromLongEvaluator(Source source, EvalOperator.ExpressionEvaluator l,
DriverContext driverContext) {
super(driverContext, source);
this.l = l;
}
@Override
public EvalOperator.ExpressionEvaluator next() {
return l;
}
@Override
public Block evalVector(Vector v) {
LongVector vector = (LongVector) v;
int positionCount = v.getPositionCount();
if (vector.isConstant()) {
return driverContext.blockFactory().newConstantDoubleBlockWith(evalValue(vector, 0), positionCount);
}
try (DoubleBlock.Builder builder = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) {
for (int p = 0; p < positionCount; p++) {
builder.appendDouble(evalValue(vector, p));
}
return builder.build();
}
}
private double evalValue(LongVector container, int index) {
long value = container.getLong(index);
return ToDouble.fromLong(value);
}
@Override
public Block evalBlock(Block b) {
LongBlock block = (LongBlock) b;
int positionCount = block.getPositionCount();
try (DoubleBlock.Builder builder = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) {
for (int p = 0; p < positionCount; p++) {
int valueCount = block.getValueCount(p);
int start = block.getFirstValueIndex(p);
int end = start + valueCount;
boolean positionOpened = false;
boolean valuesAppended = false;
for (int i = start; i < end; i++) {
double value = evalValue(block, i);
if (positionOpened == false && valueCount > 1) {
builder.beginPositionEntry();
positionOpened = true;
}
builder.appendDouble(value);
valuesAppended = true;
}
if (valuesAppended == false) {
builder.appendNull();
} else if (positionOpened) {
builder.endPositionEntry();
}
}
return builder.build();
}
}
private double evalValue(LongBlock container, int index) {
long value = container.getLong(index);
return ToDouble.fromLong(value);
}
@Override
public String toString() {
return "ToDoubleFromLongEvaluator[" + "l=" + l + "]";
}
@Override
public void close() {
Releasables.closeExpectNoException(l);
}
@Override
public long baseRamBytesUsed() {
long baseRamBytesUsed = BASE_RAM_BYTES_USED;
baseRamBytesUsed += l.baseRamBytesUsed();
return baseRamBytesUsed;
}
public static | ToDoubleFromLongEvaluator |
java | apache__flink | flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/operators/process/ProcessRowTableOperator.java | {
"start": 2008,
"end": 3729
} | class ____ extends AbstractProcessTableOperator
implements OneInputStreamOperator<RowData, RowData> {
private final @Nullable TableSemantics inputSemantics;
public ProcessRowTableOperator(
StreamOperatorParameters<RowData> parameters,
List<RuntimeTableSemantics> tableSemantics,
List<RuntimeStateInfo> stateInfos,
ProcessTableRunner processTableRunner,
HashFunction[] stateHashCode,
RecordEqualiser[] stateEquals,
RuntimeChangelogMode producedChangelogMode) {
super(
parameters,
tableSemantics,
stateInfos,
processTableRunner,
stateHashCode,
stateEquals,
producedChangelogMode);
if (tableSemantics.isEmpty()) {
inputSemantics = null;
} else {
inputSemantics = tableSemantics.get(0);
}
}
@Override
public void setKeyContextElement1(StreamRecord<?> record) {
// not applicable
}
@Override
public void processElement(StreamRecord<RowData> element) throws Exception {
if (inputSemantics != null) {
processTableRunner.ingestTableEvent(0, element.getValue(), inputSemantics.timeColumn());
}
processTableRunner.processEval();
}
@Override
public void processWatermarkStatus(WatermarkStatus watermarkStatus) throws Exception {
super.processWatermarkStatus(watermarkStatus, 1);
}
@Override
public void processLatencyMarker(LatencyMarker latencyMarker) throws Exception {
super.reportOrForwardLatencyMarker(latencyMarker);
}
}
| ProcessRowTableOperator |
java | google__guice | core/test/com/google/inject/BindingTest.java | {
"start": 20255,
"end": 20379
} | class ____ extends Bacon {
@Override
public Food getMaterial() { return Food.TURKEY; }
}
private static | TurkeyBacon |
java | elastic__elasticsearch | test/framework/src/main/java/org/elasticsearch/test/RetryRule.java | {
"start": 1119,
"end": 3444
} | class ____ implements TestRule {
private static final Logger logger = LogManager.getLogger(RetryRule.class);
private final int maxAttempts;
private final TimeValue retryDelay;
public RetryRule(int maxAttempts, TimeValue retryDelay) {
this.maxAttempts = maxAttempts;
this.retryDelay = Objects.requireNonNull(retryDelay);
}
@Override
public Statement apply(Statement statement, Description description) {
return new Statement() {
@Override
public void evaluate() throws Throwable {
Throwable lastThrowable = null;
for (int i = 0; i < maxAttempts; i++) {
try {
logger.info(Strings.format("Running test [%s] attempt [%d/%d]", description.getMethodName(), i + 1, maxAttempts));
statement.evaluate();
logger.info(
Strings.format("Test [%s] succeeded on attempt [%d/%d]", description.getMethodName(), i + 1, maxAttempts)
);
// Test succeeded so we'll return
return;
} catch (Throwable t) {
logger.info(
Strings.format(
"Test [%s] failed with exception: %s, attempt [%d/%d]",
description.getMethodName(),
t.getMessage(),
i + 1,
maxAttempts
)
);
lastThrowable = t;
// if this was the last iteration then let's skip sleeping
if (i < maxAttempts - 1) {
TimeUnit.MICROSECONDS.sleep(retryDelay.millis());
}
}
}
// if the test failed we should have the throwable, so let's bubble up that failure
if (lastThrowable != null) {
logger.info(Strings.format("Test [%s] failed and exceeded retry limit, failing test.", description.getMethodName()));
throw lastThrowable;
}
}
};
}
}
| RetryRule |
java | apache__rocketmq | example/src/main/java/org/apache/rocketmq/example/transaction/TransactionListenerImpl.java | {
"start": 1193,
"end": 2354
} | class ____ implements TransactionListener {
private AtomicInteger transactionIndex = new AtomicInteger(0);
private ConcurrentHashMap<String, Integer> localTrans = new ConcurrentHashMap<>();
@Override
public LocalTransactionState executeLocalTransaction(Message msg, Object arg) {
int value = transactionIndex.getAndIncrement();
int status = value % 3;
localTrans.put(msg.getTransactionId(), status);
return LocalTransactionState.UNKNOW;
}
@Override
public LocalTransactionState checkLocalTransaction(MessageExt msg) {
Integer status = localTrans.get(msg.getTransactionId());
if (null != status) {
switch (status) {
case 0:
return LocalTransactionState.UNKNOW;
case 1:
return LocalTransactionState.COMMIT_MESSAGE;
case 2:
return LocalTransactionState.ROLLBACK_MESSAGE;
default:
return LocalTransactionState.COMMIT_MESSAGE;
}
}
return LocalTransactionState.COMMIT_MESSAGE;
}
}
| TransactionListenerImpl |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/engine/jdbc/LobCreator.java | {
"start": 719,
"end": 4360
} | interface ____ {
/**
* Wrap the given blob in a serializable wrapper.
*
* @param blob The blob to be wrapped.
* @return The wrapped blob which will be castable to {@link Blob}
* as well as {@link org.hibernate.engine.jdbc.proxy.WrappedBlob}.
*/
Blob wrap(Blob blob);
/**
* Wrap the given clob in a serializable wrapper.
*
* @param clob The clob to be wrapped.
* @return The wrapped clob which will be castable to {@link Clob}
* as well as {@link org.hibernate.engine.jdbc.proxy.WrappedClob}.
*/
Clob wrap(Clob clob);
/**
* Wrap the given nclob in a serializable wrapper.
*
* @param nclob The nclob to be wrapped.
* @return The wrapped nclob which will be castable to {@link NClob}
* as well as {@link org.hibernate.engine.jdbc.proxy.WrappedNClob}.
*/
NClob wrap(NClob nclob);
/**
* Create a BLOB reference encapsulating the given byte array.
*
* @param bytes The byte array to wrap as a blob.
* @return The created blob, castable to {@link Blob} as well as {@link BlobImplementer}
*/
Blob createBlob(byte[] bytes);
/**
* Create a BLOB reference encapsulating the given binary stream.
*
* @param stream The binary stream to wrap as a blob.
* @param length The length of the stream.
* @return The created blob, castable to {@link Blob} as well as {@link BlobImplementer}
*/
Blob createBlob(InputStream stream, long length);
/**
* Create a CLOB reference encapsulating the given String data.
*
* @param string The String to wrap as a clob.
* @return The created clob, castable to {@link Clob} as well as {@link ClobImplementer}
*/
Clob createClob(String string);
/**
* Create a CLOB reference encapsulating the given character data.
*
* @param reader The character data reader.
* @param length The length of the reader data.
* @return The created clob, castable to {@link Clob} as well as {@link ClobImplementer}
*/
Clob createClob(Reader reader, long length);
/**
* Create a NCLOB reference encapsulating the given String data.
*
* @param string The String to wrap as a NCLOB.
* @return The created NCLOB, castable as {@link Clob} as well as {@link NClobImplementer}. In JDK 1.6
* environments, also castable to java.sql.NClob
*/
NClob createNClob(String string);
/**
* Create a NCLOB reference encapsulating the given character data.
*
* @param reader The character data reader.
* @param length The length of the reader data.
* @return The created NCLOB, castable as {@link Clob} as well as {@link NClobImplementer}. In JDK 1.6
* environments, also castable to java.sql.NClob
*/
NClob createNClob(Reader reader, long length);
/**
* Return an instance which can actually be written to a JDBC
* {@code PreparedStatement}.
*
* @see java.sql.PreparedStatement#setBlob(int, Blob)
*
* @apiNote This is needed for Oracle
*
* @see org.hibernate.dialect.Dialect#useConnectionToCreateLob
*
* @since 7.0
*/
Blob toJdbcBlob(Blob clob);
/**
* Return an instance which can actually be written to a JDBC
* {@code PreparedStatement}.
*
* @see java.sql.PreparedStatement#setClob(int, Clob)
*
* @apiNote This is needed for Oracle
*
* @see org.hibernate.dialect.Dialect#useConnectionToCreateLob
*
* @since 7.0
*/
Clob toJdbcClob(Clob clob);
/**
* Return an instance which can actually be written to a JDBC
* {@code PreparedStatement}.
*
* @see java.sql.PreparedStatement#setNClob(int, NClob)
*
* @apiNote This is needed for Oracle
*
* @see org.hibernate.dialect.Dialect#useConnectionToCreateLob
*
* @since 7.0
*/
NClob toJdbcNClob(NClob clob);
}
| LobCreator |
java | apache__flink | flink-tests/src/test/java/org/apache/flink/test/state/operator/restore/unkeyed/ChainBreakTest.java | {
"start": 1802,
"end": 3063
} | class ____ extends AbstractNonKeyedOperatorRestoreTestBase {
public ChainBreakTest(FlinkVersion flinkVersion) {
super(flinkVersion);
}
@SnapshotsGenerator
public void generateSnapshots(FlinkVersion targetVersion) throws Exception {
internalGenerateSnapshots(targetVersion);
}
@Override
public void createRestoredJob(StreamExecutionEnvironment env) {
/**
* Original job: Source -> StatefulMap1 -> CHAIN(StatefulMap2 -> Map -> StatefulMap3)
* Modified job: Source -> StatefulMap1 -> CHAIN(StatefulMap2 -> Map) -> StatefulMap3
*/
DataStream<Integer> source = createSource(env, ExecutionMode.RESTORE);
SingleOutputStreamOperator<Integer> first =
createFirstStatefulMap(ExecutionMode.RESTORE, source);
first.startNewChain();
SingleOutputStreamOperator<Integer> second =
createSecondStatefulMap(ExecutionMode.RESTORE, first);
second.startNewChain();
SingleOutputStreamOperator<Integer> stateless = createStatelessMap(second);
SingleOutputStreamOperator<Integer> third =
createThirdStatefulMap(ExecutionMode.RESTORE, stateless);
third.startNewChain();
}
}
| ChainBreakTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jpa/query/CachedQueryShallowPolymorphicTest.java | {
"start": 6677,
"end": 6916
} | class ____ extends Person {
String nr;
public Employee() {
}
public Employee(Integer id, String name) {
this.id = id;
this.name = name;
this.nr = id.toString();
}
public String getNr() {
return nr;
}
}
}
| Employee |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/mysql/createTable/MySqlCreateTableTest107.java | {
"start": 329,
"end": 2972
} | class ____ extends MysqlTest {
public void test_0() throws Exception {
String sql = "CREATE TABLE \n" +
"aliolap152578dbopt.aliolap152578dbopt_tbl1 (\n" +
"col_id_int int NOT NULL , \n" +
"col2_tinyint tinyint , \n" +
"col3_boolean boolean , \n" +
"col4_smallint smallint , \n" +
"col5_int int , \n" +
"col6_bigint bigint , \n" +
"col7_float float , \n" +
"col8_double double , \n" +
"col9_date date , \n" +
"col10_time time , \n" +
"col11_timestamp timestamp , \n" +
"col12_varchar varchar(1000) , \n" +
"col13_multivalue multivalue delimiter ',' , \n" +
"primary key (col_id_int,col6_bigint)\n" +
") \n" +
"PARTITION BY HASH KEY(col_id_int) PARTITION NUM 100\n" +
"SUBPARTITION BY LIST(col6_bigint BIGINT)\n" +
"SUBPARTITION OPTIONS(available_Partition_Num=100)\n" +
"TABLEGROUP aliolap152578dbopt_tg1\n" +
"OPTIONS(UPDATETYPE='realtime')\n" +
";";
MySqlStatementParser parser = new MySqlStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
MySqlCreateTableStatement stmt = (MySqlCreateTableStatement) statementList.get(0);
assertEquals(1, statementList.size());
assertEquals(14, stmt.getTableElementList().size());
assertEquals("CREATE TABLE aliolap152578dbopt.aliolap152578dbopt_tbl1 (\n"
+ "\tcol_id_int int NOT NULL,\n"
+ "\tcol2_tinyint tinyint,\n" + "\tcol3_boolean boolean,\n"
+ "\tcol4_smallint smallint,\n"
+ "\tcol5_int int,\n" + "\tcol6_bigint bigint,\n"
+ "\tcol7_float float,\n"
+ "\tcol8_double double,\n" + "\tcol9_date date,\n"
+ "\tcol10_time time,\n"
+ "\tcol11_timestamp timestamp,\n" + "\tcol12_varchar varchar(1000),\n"
+ "\tcol13_multivalue multivalue DELIMITER ',',\n"
+ "\tPRIMARY KEY (col_id_int, col6_bigint)\n"
+ ")\n"
+ "OPTIONS (UPDATETYPE = 'realtime')\n"
+ "PARTITION BY HASH KEY(col_id_int) PARTITION NUM 100\n"
+ "SUBPARTITION BY LIST (col6_bigint BIGINT)\n"
+ "SUBPARTITION OPTIONS (available_Partition_Num = 100)\n"
+ "TABLEGROUP aliolap152578dbopt_tg1;", stmt.toString());
}
}
| MySqlCreateTableTest107 |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/mysql/param/MySqlParameterizedOutputVisitorTest_4.java | {
"start": 787,
"end": 1317
} | class ____ extends MySQLParameterizedTest {
public void test_0() throws Exception {
String sql = "select user0_.id as id0_, user0_.email as email0_, "
+ "user0_.login_name as login3_0_, user0_.name as name0_, "//
+ "user0_.password as password0_ "
+ "from acct_user user0_ "//
+ "where user0_.login_name=? limit ?";
assertSame(ParameterizedOutputVisitorUtils.parameterize(sql, JdbcConstants.MYSQL), sql);
}
}
| MySqlParameterizedOutputVisitorTest_4 |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/watch/WatchStatus.java | {
"start": 17421,
"end": 17997
} | interface ____ {
ParseField STATE = new ParseField("state");
ParseField ACTIVE = new ParseField("active");
ParseField TIMESTAMP = new ParseField("timestamp");
ParseField LAST_CHECKED = new ParseField("last_checked");
ParseField LAST_MET_CONDITION = new ParseField("last_met_condition");
ParseField ACTIONS = new ParseField("actions");
ParseField VERSION = new ParseField("version");
ParseField EXECUTION_STATE = new ParseField("execution_state");
ParseField HEADERS = new ParseField("headers");
}
}
| Field |
java | spring-projects__spring-security | aspects/src/test/java/org/springframework/security/authorization/method/aspectj/PreAuthorizeAspectTests.java | {
"start": 5285,
"end": 5421
} | class ____ implements SecuredInterface, AnotherSecuredInterface {
@Override
public void securedMethod() {
}
}
}
| MultipleInterfaces |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/internal/StandardComparisonStrategy_areEqual_Test.java | {
"start": 11371,
"end": 11710
} | class ____ {
private final Object y, z;
NonTransitive(Object y, Object z) {
this.y = y;
this.z = z;
}
@Override
public boolean equals(Object obj) {
return obj == y || obj != z;
}
@Override
public String toString() {
return "non transitive";
}
}
private static | NonTransitive |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/component/bean/AmbiguousMethodCallExceptionSimplifiedTest.java | {
"start": 1510,
"end": 2224
} | class ____ extends SuperClazz {
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:in").choice().when(simple("${headers.bean.size} != 0")).to("mock:out");
}
};
}
@Test
public void testAmbiguousMethodCallException() throws Exception {
MockEndpoint out = getMockEndpoint("mock:out");
out.expectedMessageCount(1);
ExchangeBuilder exchangeBuilder = new ExchangeBuilder(context).withHeader("bean", new Clazz());
template.send("direct:in", exchangeBuilder.build());
out.assertIsSatisfied();
}
}
| Clazz |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/entrypoint/component/FileJobGraphRetriever.java | {
"start": 1642,
"end": 3499
} | class ____ extends AbstractUserClassPathJobGraphRetriever {
@Internal
public static final ConfigOption<String> JOB_GRAPH_FILE_PATH =
ConfigOptions.key("internal.jobgraph-path").stringType().defaultValue("job.graph");
@Nonnull private final String jobGraphFile;
public FileJobGraphRetriever(@Nonnull String jobGraphFile, @Nullable File usrLibDir)
throws IOException {
super(usrLibDir);
this.jobGraphFile = jobGraphFile;
}
@Override
public JobGraph retrieveJobGraph(Configuration configuration) throws FlinkException {
final File fp = new File(jobGraphFile);
try (FileInputStream input = new FileInputStream(fp);
ObjectInputStream obInput = new ObjectInputStream(input)) {
final JobGraph jobGraph = (JobGraph) obInput.readObject();
addUserClassPathsToJobGraph(jobGraph);
return jobGraph;
} catch (FileNotFoundException e) {
throw new FlinkException("Could not find the JobGraph file.", e);
} catch (ClassNotFoundException | IOException e) {
throw new FlinkException("Could not load the JobGraph from file.", e);
}
}
private void addUserClassPathsToJobGraph(JobGraph jobGraph) {
final List<URL> classPaths = new ArrayList<>();
if (jobGraph.getClasspaths() != null) {
classPaths.addAll(jobGraph.getClasspaths());
}
classPaths.addAll(getUserClassPaths());
jobGraph.setClasspaths(classPaths);
}
public static FileJobGraphRetriever createFrom(
Configuration configuration, @Nullable File usrLibDir) throws IOException {
checkNotNull(configuration, "configuration");
return new FileJobGraphRetriever(configuration.get(JOB_GRAPH_FILE_PATH), usrLibDir);
}
}
| FileJobGraphRetriever |
java | apache__flink | flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/utils/JavaUserDefinedScalarFunctions.java | {
"start": 1824,
"end": 2145
} | class ____ extends ScalarFunction {
public String eval(Integer a, int b, @DataTypeHint("TIMESTAMP(3)") TimestampData c) {
Long ts = (c == null) ? null : c.getMillisecond();
return a + " and " + b + " and " + ts;
}
}
/** Append product to string. */
public static | JavaFunc1 |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/jpa/boot/internal/EntityManagerFactoryBuilderImpl.java | {
"start": 15620,
"end": 50234
} | class ____, but none was given"
+ exceptionHeader() );
}
discoverTypesToTransform( metadataSources, classTransformer, classLoader );
}
}
}
private static void discoverTypesToTransform(
MetadataSources metadataSources, ClassTransformer classTransformer, ClassLoader classLoader) {
for ( var binding : metadataSources.getHbmXmlBindings() ) {
final var hibernateMapping = binding.getRoot();
final String packageName = hibernateMapping.getPackage();
for ( var clazz : hibernateMapping.getClazz() ) {
final String className =
packageName == null || packageName.isEmpty()
? clazz.getName()
: packageName + '.' + clazz.getName();
try {
classTransformer.discoverTypes(classLoader, className );
}
catch (EnhancementException ex) {
JPA_LOGGER.enhancementDiscoveryFailed( className, ex );
}
}
}
for ( String annotatedClassName : metadataSources.getAnnotatedClassNames() ) {
classTransformer.discoverTypes( classLoader, annotatedClassName );
}
for ( Class<?> annotatedClass : metadataSources.getAnnotatedClasses() ) {
classTransformer.discoverTypes( classLoader, annotatedClass.getName() );
}
}
private boolean readBooleanConfigurationValueDefaultTrue(String propertyName) {
final Object propertyValue = configurationValues.remove( propertyName );
return propertyValue == null || parseBoolean( propertyValue.toString() );
}
/**
* Extension point for subclasses. Used by Hibernate Reactive
*/
protected StandardServiceRegistryBuilder getStandardServiceRegistryBuilder(BootstrapServiceRegistry bsr) {
return StandardServiceRegistryBuilder.forJpa( bsr );
}
private void applyMetadataBuilderContributor() {
final Object metadataBuilderContributorSetting = configurationValues.get( METADATA_BUILDER_CONTRIBUTOR );
if ( metadataBuilderContributorSetting != null ) {
final var metadataBuilderContributor = loadSettingInstance(
METADATA_BUILDER_CONTRIBUTOR,
metadataBuilderContributorSetting,
MetadataBuilderContributor.class
);
if ( metadataBuilderContributor != null ) {
metadataBuilderContributor.contribute( metamodelBuilder );
}
}
metamodelBuilder.getBootstrapContext().getClassLoaderService()
.loadJavaServices( MetadataBuilderContributor.class )
.forEach( contributor -> contributor.contribute( metamodelBuilder ) );
}
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// temporary!
public Map<Object,Object> getConfigurationValues() {
return unmodifiableMap( configurationValues );
}
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
private boolean readBooleanConfigurationValue(String propertyName) {
final Object propertyValue = configurationValues.remove( propertyName );
return propertyValue != null && parseBoolean( propertyValue.toString() );
}
/**
* Builds the context to be used in runtime bytecode enhancement
*
* @param dirtyTrackingEnabled To enable dirty tracking feature
* @param lazyInitializationEnabled To enable lazy initialization feature
* @param associationManagementEnabled To enable association management feature
* @return An enhancement context for classes managed by this EM
*/
protected EnhancementContext getEnhancementContext(
final boolean dirtyTrackingEnabled,
final boolean lazyInitializationEnabled,
final boolean associationManagementEnabled ) {
final Object propValue = configurationValues.get( BYTECODE_PROVIDER_INSTANCE );
if ( propValue != null && ( ! ( propValue instanceof BytecodeProvider ) ) ) {
throw new PersistenceException( "Property " + BYTECODE_PROVIDER_INSTANCE + " was set to '" + propValue
+ "', which is not compatible with the expected type " + BytecodeProvider.class );
}
final var overriddenBytecodeProvider = (BytecodeProvider) propValue;
return new DefaultEnhancementContext() {
@Override
public boolean isEntityClass(UnloadedClass classDescriptor) {
return managedResources.getAnnotatedClassNames().contains( classDescriptor.getName() )
&& super.isEntityClass( classDescriptor );
}
@Override
public boolean isCompositeClass(UnloadedClass classDescriptor) {
return managedResources.getAnnotatedClassNames().contains( classDescriptor.getName() )
&& super.isCompositeClass( classDescriptor );
}
@Override
public boolean doBiDirectionalAssociationManagement(UnloadedField field) {
return associationManagementEnabled;
}
@Override
public boolean doDirtyCheckingInline(UnloadedClass classDescriptor) {
return dirtyTrackingEnabled;
}
@Override
public boolean hasLazyLoadableAttributes(UnloadedClass classDescriptor) {
return lazyInitializationEnabled;
}
@Override
public boolean isLazyLoadable(UnloadedField field) {
return lazyInitializationEnabled;
}
@Override
public boolean doExtendedEnhancement(UnloadedClass classDescriptor) {
// doesn't make any sense to have extended enhancement enabled at runtime. we only enhance entities anyway.
return false;
}
@Override
public BytecodeProvider getBytecodeProvider() {
return overriddenBytecodeProvider;
}
};
}
/**
* Builds the {@link BootstrapServiceRegistry} used to eventually build the {@link StandardServiceRegistryBuilder}; mainly
* used here during instantiation to define class-loading behavior.
*
* @param integrationSettings Any integration settings passed by the EE container or SE application
*
* @return The built BootstrapServiceRegistry
*/
private BootstrapServiceRegistry buildBootstrapServiceRegistry(
Map<?,?> integrationSettings,
ClassLoader providedClassLoader,
ClassLoaderService providedClassLoaderService) {
final var builder = new BootstrapServiceRegistryBuilder();
applyIntegrationProvider( integrationSettings, builder );
final var strategyRegistrationProviderList =
(StrategyRegistrationProviderList)
integrationSettings.get( STRATEGY_REGISTRATION_PROVIDERS );
if ( strategyRegistrationProviderList != null ) {
for ( var strategyRegistrationProvider :
strategyRegistrationProviderList.getStrategyRegistrationProviders() ) {
builder.applyStrategySelectors( strategyRegistrationProvider );
}
}
configureClassLoading( integrationSettings, providedClassLoader, providedClassLoaderService, builder );
return builder.build();
}
/**
* @implNote {@code providedClassLoaderService} and {@code providedClassLoaders}
* are mutually exclusive concepts, with priority given to the former.
*
* @see BootstrapServiceRegistryBuilder#build
*/
private void configureClassLoading(
Map<?, ?> integrationSettings,
ClassLoader providedClassLoader,
ClassLoaderService providedClassLoaderService,
BootstrapServiceRegistryBuilder registryBuilder) {
if ( providedClassLoaderService != null ) {
registryBuilder.applyClassLoaderService( providedClassLoaderService );
}
else {
if ( persistenceUnit.getClassLoader() != null ) {
registryBuilder.applyClassLoader( persistenceUnit.getClassLoader() );
}
if ( providedClassLoader != null ) {
registryBuilder.applyClassLoader( providedClassLoader );
}
applyConfiguredClassLoaders( integrationSettings, registryBuilder );
//configurationValues not assigned yet, using directly the properties of the PU
final var unitProperties = persistenceUnit.getProperties();
if ( unitProperties != null ) {
final TcclLookupPrecedence tcclLookupPrecedence = TcclLookupPrecedence.from( unitProperties );
if ( tcclLookupPrecedence != null ) {
registryBuilder.applyTcclLookupPrecedence( tcclLookupPrecedence );
}
}
}
}
private static void applyConfiguredClassLoaders(
Map<?, ?> integrationSettings, BootstrapServiceRegistryBuilder registryBuilder) {
final Object classLoadersSetting = integrationSettings.get( CLASSLOADERS );
if ( classLoadersSetting != null ) {
if ( classLoadersSetting instanceof Collection ) {
@SuppressWarnings("unchecked")
final var classLoaders = (Collection<ClassLoader>) classLoadersSetting;
for ( ClassLoader classLoader : classLoaders ) {
registryBuilder.applyClassLoader( classLoader );
}
}
else if ( classLoadersSetting.getClass().isArray() ) {
for ( ClassLoader classLoader : (ClassLoader[]) classLoadersSetting ) {
registryBuilder.applyClassLoader( classLoader );
}
}
else if ( classLoadersSetting instanceof ClassLoader classLoader ) {
registryBuilder.applyClassLoader( classLoader );
}
}
}
private void applyIntegrationProvider(
Map<?,?> integrationSettings, BootstrapServiceRegistryBuilder registryBuilder) {
final Object integrationSetting = integrationSettings.get( INTEGRATOR_PROVIDER );
if ( integrationSetting != null ) {
final var integratorProvider =
loadSettingInstance( INTEGRATOR_PROVIDER, integrationSetting, IntegratorProvider.class );
if ( integratorProvider != null ) {
for ( var integrator : integratorProvider.getIntegrators() ) {
registryBuilder.applyIntegrator( integrator );
}
}
}
}
private MergedSettings mergeSettings(
PersistenceUnitDescriptor persistenceUnit,
Map<String,Object> integrationSettings,
StandardServiceRegistryBuilder registryBuilder,
Consumer<MergedSettings> mergedSettingsBaseline) {
final var mergedSettings = new MergedSettings();
if ( mergedSettingsBaseline != null ) {
mergedSettingsBaseline.accept( mergedSettings );
}
mergedSettings.processPersistenceUnitDescriptorProperties( persistenceUnit );
// see if the persistence.xml settings named a Hibernate config file
final String cfgXmlResourceName = getCfgXmlResourceName( integrationSettings, mergedSettings );
if ( isNotEmpty( cfgXmlResourceName ) ) {
processHibernateConfigXmlResources( registryBuilder, mergedSettings, cfgXmlResourceName );
}
normalizeSettings( persistenceUnit, integrationSettings, mergedSettings );
// here we are going to iterate the merged config settings looking for:
// 1) additional JACC permissions
// 2) additional cache region declarations
//
// we will also clean up any references with null entries
final var iterator = mergedSettings.getConfigurationValues().entrySet().iterator();
while ( iterator.hasNext() ) {
final var entry = iterator.next();
final Object value = entry.getValue();
if ( value == null ) {
// remove entries with null values
iterator.remove();
break; //TODO: this looks wrong!
}
else if ( value instanceof String valueString ) {
handleCacheRegionDefinition( valueString, entry.getKey(), mergedSettings );
}
}
return mergedSettings;
}
private void handleCacheRegionDefinition(String valueString, String keyString, MergedSettings mergedSettings) {
if ( keyString.startsWith( CLASS_CACHE_PREFIX ) ) {
mergedSettings.addCacheRegionDefinition(
parseCacheRegionDefinitionEntry(
keyString.substring( CLASS_CACHE_PREFIX.length() + 1 ),
valueString,
CacheRegionType.ENTITY
)
);
}
else if ( keyString.startsWith( COLLECTION_CACHE_PREFIX ) ) {
mergedSettings.addCacheRegionDefinition(
parseCacheRegionDefinitionEntry(
keyString.substring( COLLECTION_CACHE_PREFIX.length() + 1 ),
valueString,
CacheRegionType.COLLECTION
)
);
}
}
private static String getCfgXmlResourceName(Map<String, Object> integrationSettings, MergedSettings mergedSettings) {
final String cfgXmlResourceName = (String) mergedSettings.getConfigurationValues().remove( CFG_XML_FILE );
if ( isEmpty( cfgXmlResourceName ) ) {
// see if integration settings named a Hibernate config file....
return (String) integrationSettings.get( CFG_XML_FILE );
}
else {
return cfgXmlResourceName;
}
}
/**
* Handles normalizing the settings coming from multiple sources, applying proper precedences
*/
private void normalizeSettings(
PersistenceUnitDescriptor persistenceUnit,
Map<String, Object> integrationSettings,
MergedSettings mergedSettings) {
// make a copy so that we can remove things as we process them
final Map<String, Object> integrationSettingsCopy = new HashMap<>( integrationSettings );
normalizeConnectionAccessUserAndPass( integrationSettingsCopy, mergedSettings );
normalizeTransactionCoordinator( persistenceUnit, integrationSettingsCopy, mergedSettings );
normalizeDataAccess( integrationSettingsCopy, mergedSettings, persistenceUnit );
normalizeValidationMode( persistenceUnit, integrationSettingsCopy, mergedSettings );
normalizeSharedCacheMode( persistenceUnit, integrationSettingsCopy, mergedSettings );
// Apply all "integration overrides" as the last step. By specification,
// these should have precedence.
// NOTE that this occurs after the specialized normalize calls above which
// remove any specially-handled settings.
for ( var entry : integrationSettingsCopy.entrySet() ) {
final String key = entry.getKey();
if ( key != null ) {
final Object value = entry.getValue();
if ( value == null ) {
mergedSettings.getConfigurationValues().remove( key );
}
else {
mergedSettings.getConfigurationValues().put( key, value );
}
}
}
}
private static void normalizeSharedCacheMode(
PersistenceUnitDescriptor persistenceUnit,
Map<String, Object> integrationSettingsCopy,
MergedSettings mergedSettings) {
final var configurationSettings = mergedSettings.getConfigurationValues();
// normalize SharedCacheMode
final Object intgCacheMode = integrationSettingsCopy.remove( JPA_SHARED_CACHE_MODE );
final Object jakartaIntgCacheMode = integrationSettingsCopy.remove( JAKARTA_SHARED_CACHE_MODE );
if ( jakartaIntgCacheMode != null ) {
configurationSettings.put( JAKARTA_SHARED_CACHE_MODE, jakartaIntgCacheMode );
}
else if ( intgCacheMode != null ) {
DEPRECATION_LOGGER.deprecatedSetting( JPA_SHARED_CACHE_MODE, JAKARTA_SHARED_CACHE_MODE );
configurationSettings.put( JPA_SHARED_CACHE_MODE, intgCacheMode );
}
else if ( persistenceUnit.getSharedCacheMode() != null ) {
configurationSettings.put( JAKARTA_SHARED_CACHE_MODE, persistenceUnit.getSharedCacheMode() );
}
}
private static void normalizeValidationMode(
PersistenceUnitDescriptor persistenceUnit,
Map<String, Object> integrationSettingsCopy,
MergedSettings mergedSettings) {
final var configurationSettings = mergedSettings.getConfigurationValues();
// normalize ValidationMode
final Object intgValidationMode = integrationSettingsCopy.remove( JPA_VALIDATION_MODE );
final Object jakartaIntgValidationMode = integrationSettingsCopy.remove( JAKARTA_VALIDATION_MODE );
if ( jakartaIntgValidationMode != null ) {
configurationSettings.put( JAKARTA_VALIDATION_MODE, jakartaIntgValidationMode );
}
else if ( intgValidationMode != null ) {
DEPRECATION_LOGGER.deprecatedSetting( JPA_VALIDATION_MODE, JAKARTA_VALIDATION_MODE );
configurationSettings.put( JPA_VALIDATION_MODE, intgValidationMode );
}
else if ( persistenceUnit.getValidationMode() != null ) {
configurationSettings.put( JAKARTA_VALIDATION_MODE, persistenceUnit.getValidationMode() );
}
}
/**
* Because a DataSource can be secured (requiring Hibernate to pass the USER/PASSWORD when accessing the DataSource)
* we apply precedence to the USER and PASS separately
*/
private void normalizeConnectionAccessUserAndPass(
Map<?, ?> integrationSettingsCopy,
MergedSettings mergedSettings) {
final Object effectiveUser = coalesceSuppliedValues(
() -> integrationSettingsCopy.remove( USER ),
() -> integrationSettingsCopy.remove( JAKARTA_JDBC_USER ),
() -> {
final Object setting = integrationSettingsCopy.remove( JPA_JDBC_USER );
if ( setting != null ) {
DEPRECATION_LOGGER.deprecatedSetting( JPA_JDBC_USER, JAKARTA_JDBC_USER );
}
return setting;
},
() -> extractPuProperty( persistenceUnit, USER ),
() -> extractPuProperty( persistenceUnit, JAKARTA_JDBC_USER ),
() -> {
final Object setting = extractPuProperty( persistenceUnit, JPA_JDBC_USER );
if ( setting != null ) {
DEPRECATION_LOGGER.deprecatedSetting( JPA_JDBC_USER, JAKARTA_JDBC_USER );
}
return setting;
}
);
final Object effectivePass = coalesceSuppliedValues(
() -> integrationSettingsCopy.remove( PASS ),
() -> integrationSettingsCopy.remove( JAKARTA_JDBC_PASSWORD ),
() -> {
final Object setting = integrationSettingsCopy.remove( JPA_JDBC_PASSWORD );
if ( setting != null ) {
DEPRECATION_LOGGER.deprecatedSetting( JPA_JDBC_PASSWORD, JAKARTA_JDBC_PASSWORD );
}
return setting;
},
() -> extractPuProperty( persistenceUnit, PASS ),
() -> extractPuProperty( persistenceUnit, JAKARTA_JDBC_PASSWORD ),
() -> {
{
final Object setting = extractPuProperty( persistenceUnit, JPA_JDBC_PASSWORD );
if ( setting != null ) {
DEPRECATION_LOGGER.deprecatedSetting( JPA_JDBC_PASSWORD, JAKARTA_JDBC_PASSWORD );
}
return setting;
}
}
);
if ( effectiveUser != null || effectivePass != null ) {
applyUserAndPass( effectiveUser, effectivePass, mergedSettings );
}
}
private <T> T extractPuProperty(PersistenceUnitDescriptor persistenceUnit, String propertyName) {
final var properties = persistenceUnit.getProperties();
//noinspection unchecked
return properties == null ? null : (T) properties.get( propertyName );
}
private void applyUserAndPass(Object effectiveUser, Object effectivePass, MergedSettings mergedSettings) {
final var configuration = mergedSettings.getConfigurationValues();
if ( effectiveUser != null ) {
configuration.put( USER, effectiveUser );
configuration.put( JAKARTA_JDBC_USER, effectiveUser );
configuration.put( JPA_JDBC_USER, effectiveUser );
}
if ( effectivePass != null ) {
configuration.put( PASS, effectivePass );
configuration.put( JAKARTA_JDBC_PASSWORD, effectivePass );
configuration.put( JPA_JDBC_PASSWORD, effectivePass );
}
}
private static final String IS_JTA_TXN_COORD = "local.setting.IS_JTA_TXN_COORD";
private void normalizeTransactionCoordinator(
PersistenceUnitDescriptor persistenceUnit,
Map<?, ?> integrationSettingsCopy,
MergedSettings mergedSettings) {
final var txnType = determineTransactionType( persistenceUnit, integrationSettingsCopy, mergedSettings );
final boolean definiteJtaCoordinator =
mergedSettings.getConfigurationValues().containsKey( TRANSACTION_COORDINATOR_STRATEGY )
? handeTransactionCoordinatorStrategy( mergedSettings )
: handleTransactionType( mergedSettings, txnType );
mergedSettings.getConfigurationValues().put( IS_JTA_TXN_COORD, definiteJtaCoordinator );
}
private static boolean handeTransactionCoordinatorStrategy(MergedSettings mergedSettings) {
JPA_LOGGER.overridingTransactionStrategyDangerous( TRANSACTION_COORDINATOR_STRATEGY );
// see if we can tell whether it is a JTA coordinator
final Object strategy = mergedSettings.getConfigurationValues().get( TRANSACTION_COORDINATOR_STRATEGY );
return strategy instanceof TransactionCoordinatorBuilder transactionCoordinatorBuilder
&& transactionCoordinatorBuilder.isJta();
}
private static boolean handleTransactionType(MergedSettings mergedSettings, PersistenceUnitTransactionType txnType) {
final boolean isJtaTransactionType = txnType == JTA;
mergedSettings.getConfigurationValues()
.put( TRANSACTION_COORDINATOR_STRATEGY, isJtaTransactionType
? JtaTransactionCoordinatorBuilderImpl.class
: JdbcResourceLocalTransactionCoordinatorBuilderImpl.class );
return isJtaTransactionType;
}
private static PersistenceUnitTransactionType determineTransactionType(
PersistenceUnitDescriptor persistenceUnit, Map<?, ?> integrationSettingsCopy, MergedSettings mergedSettings) {
final var txnType = configuredTransactionType( persistenceUnit, integrationSettingsCopy, mergedSettings );
if ( txnType == null ) {
// is it more appropriate to have this be based on bootstrap entry point (EE vs SE)?
JPA_LOGGER.fallingBackToResourceLocal();
return PersistenceUnitTransactionType.RESOURCE_LOCAL;
}
else {
return txnType;
}
}
private static PersistenceUnitTransactionType configuredTransactionType(
PersistenceUnitDescriptor persistenceUnit, Map<?, ?> integrationSettingsCopy, MergedSettings mergedSettings) {
Object intgTxnType = integrationSettingsCopy.remove( JAKARTA_TRANSACTION_TYPE );
if ( intgTxnType == null ) {
intgTxnType = integrationSettingsCopy.remove( JPA_TRANSACTION_TYPE );
if ( intgTxnType != null ) {
DEPRECATION_LOGGER.deprecatedSetting( JPA_TRANSACTION_TYPE, JAKARTA_TRANSACTION_TYPE );
}
}
if ( intgTxnType != null ) {
return interpretTransactionType( intgTxnType );
}
else {
final var persistenceUnitTransactionType = persistenceUnit.getPersistenceUnitTransactionType();
if ( persistenceUnitTransactionType != null ) {
return persistenceUnitTransactionType;
}
else {
Object puPropTxnType = mergedSettings.getConfigurationValues().get( JAKARTA_TRANSACTION_TYPE );
if ( puPropTxnType == null ) {
puPropTxnType = mergedSettings.getConfigurationValues().get( JPA_TRANSACTION_TYPE );
if ( puPropTxnType != null ) {
DEPRECATION_LOGGER.deprecatedSetting( JPA_TRANSACTION_TYPE, JAKARTA_TRANSACTION_TYPE );
}
}
return puPropTxnType == null ? null : interpretTransactionType( puPropTxnType );
}
}
}
private void normalizeDataAccess(
Map<?, ?> integrationSettingsCopy,
MergedSettings mergedSettings,
PersistenceUnitDescriptor persistenceUnit) {
if ( dataSource != null ) {
// we don't explicitly know if it's JTA
applyDataSource( dataSource, null, integrationSettingsCopy, mergedSettings );
// EARLY EXIT!!
return;
}
if ( integrationSettingsCopy.containsKey( DATASOURCE ) ) {
final Object dataSourceRef = integrationSettingsCopy.remove( DATASOURCE );
if ( dataSourceRef != null ) {
applyDataSource( dataSourceRef, null, integrationSettingsCopy, mergedSettings );
// EARLY EXIT!!
return;
}
}
if ( integrationSettingsCopy.containsKey( JAKARTA_JTA_DATASOURCE ) ) {
final Object dataSourceRef = integrationSettingsCopy.remove( JAKARTA_JTA_DATASOURCE );
if ( dataSourceRef != null ) {
applyDataSource( dataSourceRef, true, integrationSettingsCopy, mergedSettings );
// EARLY EXIT!!
return;
}
}
if ( integrationSettingsCopy.containsKey( JPA_JTA_DATASOURCE ) ) {
DEPRECATION_LOGGER.deprecatedSetting( JPA_JTA_DATASOURCE, JAKARTA_JTA_DATASOURCE );
final Object dataSourceRef = integrationSettingsCopy.remove( JPA_JTA_DATASOURCE );
if ( dataSourceRef != null ) {
applyDataSource( dataSourceRef, true,integrationSettingsCopy, mergedSettings );
// EARLY EXIT!!
return;
}
}
if ( integrationSettingsCopy.containsKey( JAKARTA_NON_JTA_DATASOURCE ) ) {
final Object dataSourceRef = integrationSettingsCopy.remove( JAKARTA_NON_JTA_DATASOURCE );
applyDataSource( dataSourceRef, false, integrationSettingsCopy, mergedSettings );
// EARLY EXIT!!
return;
}
if ( integrationSettingsCopy.containsKey( JPA_NON_JTA_DATASOURCE ) ) {
DEPRECATION_LOGGER.deprecatedSetting( JPA_NON_JTA_DATASOURCE, JAKARTA_NON_JTA_DATASOURCE );
final Object dataSourceRef = integrationSettingsCopy.remove( JPA_NON_JTA_DATASOURCE );
applyDataSource( dataSourceRef, false, integrationSettingsCopy, mergedSettings );
// EARLY EXIT!!
return;
}
final var configuration = mergedSettings.getConfigurationValues();
if ( integrationSettingsCopy.containsKey( URL ) ) {
// hibernate-specific settings have precedence over the JPA ones
final Object integrationJdbcUrl = integrationSettingsCopy.get( URL );
if ( integrationJdbcUrl != null ) {
applyJdbcSettings(
integrationJdbcUrl,
coalesceSuppliedValues(
() -> getString( DRIVER, integrationSettingsCopy ),
() -> getString( JAKARTA_JDBC_DRIVER, integrationSettingsCopy ),
() -> {
final String driver = getString( JPA_JDBC_DRIVER, integrationSettingsCopy );
if ( driver != null ) {
DEPRECATION_LOGGER.deprecatedSetting( JPA_JDBC_DRIVER, JAKARTA_JDBC_DRIVER );
}
return driver;
},
() -> getString( DRIVER, configuration ),
() -> getString( JAKARTA_JDBC_DRIVER, configuration ),
() -> {
final String driver = getString( JPA_JDBC_DRIVER, configuration );
if ( driver != null ) {
DEPRECATION_LOGGER.deprecatedSetting( JPA_JDBC_DRIVER, JAKARTA_JDBC_DRIVER );
}
return driver;
}
),
integrationSettingsCopy,
mergedSettings
);
// EARLY EXIT!!
return;
}
}
if ( integrationSettingsCopy.containsKey( JAKARTA_JDBC_URL ) ) {
final Object integrationJdbcUrl = integrationSettingsCopy.get( JAKARTA_JDBC_URL );
if ( integrationJdbcUrl != null ) {
applyJdbcSettings(
integrationJdbcUrl,
coalesceSuppliedValues(
() -> getString( JAKARTA_JDBC_DRIVER, integrationSettingsCopy ),
() -> getString( JAKARTA_JDBC_DRIVER, configuration )
),
integrationSettingsCopy,
mergedSettings
);
// EARLY EXIT!!
return;
}
}
if ( integrationSettingsCopy.containsKey( JPA_JDBC_URL ) ) {
DEPRECATION_LOGGER.deprecatedSetting( JPA_JDBC_URL, JAKARTA_JDBC_URL );
final Object integrationJdbcUrl = integrationSettingsCopy.get( JPA_JDBC_URL );
if ( integrationJdbcUrl != null ) {
applyJdbcSettings(
integrationJdbcUrl,
coalesceSuppliedValues(
() -> {
final String driver = getString( JPA_JDBC_DRIVER, integrationSettingsCopy );
if ( driver != null ) {
DEPRECATION_LOGGER.deprecatedSetting( JPA_JDBC_DRIVER, JAKARTA_JDBC_DRIVER );
}
return driver;
},
() -> {
final String driver = getString( JPA_JDBC_DRIVER, configuration );
if ( driver != null ) {
DEPRECATION_LOGGER.deprecatedSetting( JPA_JDBC_DRIVER, JAKARTA_JDBC_DRIVER );
}
return driver;
}
),
integrationSettingsCopy,
mergedSettings
);
// EARLY EXIT!!
return;
}
}
final Object jtaDataSource = persistenceUnit.getJtaDataSource();
if ( jtaDataSource != null ) {
applyDataSource( jtaDataSource, true, integrationSettingsCopy, mergedSettings );
// EARLY EXIT!!
return;
}
final Object nonJtaDataSource = persistenceUnit.getNonJtaDataSource();
if ( nonJtaDataSource != null ) {
applyDataSource( nonJtaDataSource, false, integrationSettingsCopy, mergedSettings );
// EARLY EXIT!!
return;
}
if ( configuration.containsKey( URL ) ) {
final Object url = configuration.get( URL );
if ( url != null && !( url instanceof String stringUrl && isEmpty( stringUrl ) ) ) {
applyJdbcSettings(
url,
getString( DRIVER, configuration ),
integrationSettingsCopy,
mergedSettings
);
// EARLY EXIT!!
return;
}
}
if ( configuration.containsKey( JAKARTA_JDBC_URL ) ) {
final Object url = configuration.get( JAKARTA_JDBC_URL );
if ( url != null && !( url instanceof String stringUrl && isEmpty( stringUrl ) ) ) {
applyJdbcSettings(
url,
getString( JAKARTA_JDBC_DRIVER, configuration ),
integrationSettingsCopy,
mergedSettings
);
// EARLY EXIT!!
return;
}
}
if ( configuration.containsKey( JPA_JDBC_URL ) ) {
DEPRECATION_LOGGER.deprecatedSetting( JPA_JDBC_URL, JAKARTA_JDBC_URL );
final Object url = configuration.get( JPA_JDBC_URL );
if ( url != null && !( url instanceof String stringUrl && isEmpty( stringUrl ) ) ) {
final String driver = getString( JPA_JDBC_DRIVER, configuration );
if ( driver != null ) {
DEPRECATION_LOGGER.deprecatedSetting( JPA_JDBC_DRIVER, JAKARTA_JDBC_DRIVER );
}
applyJdbcSettings( url, driver, integrationSettingsCopy, mergedSettings );
}
}
}
private void applyDataSource(
Object dataSourceRef,
Boolean useJtaDataSource,
Map<?, ?> integrationSettingsCopy,
MergedSettings mergedSettings) {
// `IS_JTA_TXN_COORD` is a value set during `#normalizeTransactionCoordinator` to indicate whether
// the execution environment "is JTA" as best as it can tell..
//
// we use this value when JTA was not explicitly specified in regards to the DataSource
final boolean isJtaTransactionCoordinator =
(Boolean) mergedSettings.getConfigurationValues().remove( IS_JTA_TXN_COORD );
final boolean isJta = useJtaDataSource == null ? isJtaTransactionCoordinator : useJtaDataSource;
// add to EMF properties (questionable - see HHH-13432)
final String emfKey;
final String inverseEmfKey;
final String jakartaEmfKey;
final String jakartaInverseEmfKey;
if ( isJta ) {
emfKey = JPA_JTA_DATASOURCE;
jakartaEmfKey = JAKARTA_JTA_DATASOURCE;
inverseEmfKey = JPA_NON_JTA_DATASOURCE;
jakartaInverseEmfKey = JAKARTA_NON_JTA_DATASOURCE;
}
else {
emfKey = JPA_NON_JTA_DATASOURCE;
jakartaEmfKey = JAKARTA_NON_JTA_DATASOURCE;
inverseEmfKey = JPA_JTA_DATASOURCE;
jakartaInverseEmfKey = JAKARTA_JTA_DATASOURCE;
}
mergedSettings.getConfigurationValues().put( emfKey, dataSourceRef );
mergedSettings.getConfigurationValues().put( jakartaEmfKey, dataSourceRef );
// clear any settings logically overridden by this datasource
cleanUpConfigKeys(
integrationSettingsCopy,
mergedSettings,
inverseEmfKey,
jakartaInverseEmfKey,
JPA_JDBC_DRIVER,
JAKARTA_JDBC_DRIVER,
DRIVER,
JPA_JDBC_URL,
JAKARTA_JDBC_URL,
URL
);
// clean-up the entries in the "integration overrides" so they do not get get picked
// up in the general "integration overrides" handling
cleanUpConfigKeys(
integrationSettingsCopy,
DATASOURCE,
JPA_JTA_DATASOURCE,
JAKARTA_JTA_DATASOURCE,
JPA_NON_JTA_DATASOURCE,
JAKARTA_NON_JTA_DATASOURCE
);
// add under Hibernate's DATASOURCE setting where the ConnectionProvider will find it
mergedSettings.getConfigurationValues().put( DATASOURCE, dataSourceRef );
}
private void cleanUpConfigKeys(Map<?, ?> integrationSettingsCopy, MergedSettings mergedSettings, String... keys) {
for ( String key : keys ) {
final Object removedSetting = integrationSettingsCopy.remove( key );
if ( removedSetting != null ) {
JPA_LOGGER.removedIntegrationOverride( key );
}
final Object removedMergedSetting = mergedSettings.getConfigurationValues().remove( key );
if ( removedMergedSetting != null ) {
JPA_LOGGER.removedMergedSetting( key );
}
}
}
private void cleanUpConfigKeys(Map<?, ?> settings, String... keys) {
for ( String key : keys ) {
settings.remove( key );
}
}
private void applyJdbcSettings(
Object url,
String driver,
Map<?, ?> integrationSettingsCopy,
MergedSettings mergedSettings) {
mergedSettings.getConfigurationValues().put( URL, url );
mergedSettings.getConfigurationValues().put( JPA_JDBC_URL, url );
mergedSettings.getConfigurationValues().put( JAKARTA_JDBC_URL, url );
if ( driver != null ) {
mergedSettings.getConfigurationValues().put( DRIVER, driver );
mergedSettings.getConfigurationValues().put( JPA_JDBC_DRIVER, driver );
mergedSettings.getConfigurationValues().put( JAKARTA_JDBC_DRIVER, driver );
}
else {
mergedSettings.getConfigurationValues().remove( DRIVER );
mergedSettings.getConfigurationValues().remove( JPA_JDBC_DRIVER );
mergedSettings.getConfigurationValues().remove( JAKARTA_JDBC_DRIVER );
}
// clean up the integration-map values
cleanUpConfigKeys(
integrationSettingsCopy,
DRIVER,
JPA_JDBC_DRIVER,
JAKARTA_JDBC_DRIVER,
URL,
JPA_JDBC_URL,
JAKARTA_JDBC_URL,
USER,
JPA_JDBC_USER,
JAKARTA_JDBC_USER,
PASS,
JPA_JDBC_PASSWORD,
JAKARTA_JDBC_PASSWORD
);
cleanUpConfigKeys(
integrationSettingsCopy,
mergedSettings,
DATASOURCE,
JPA_JTA_DATASOURCE,
JAKARTA_JTA_DATASOURCE,
JPA_NON_JTA_DATASOURCE,
JAKARTA_NON_JTA_DATASOURCE
);
}
private void processHibernateConfigXmlResources(
StandardServiceRegistryBuilder serviceRegistryBuilder,
MergedSettings mergedSettings,
String cfgXmlResourceName) {
final var loadedConfig =
serviceRegistryBuilder.getConfigLoader()
.loadConfigXmlResource( cfgXmlResourceName );
mergedSettings.processHibernateConfigXmlResources( loadedConfig );
serviceRegistryBuilder.getAggregatedCfgXml().merge( loadedConfig );
}
private CacheRegionDefinition parseCacheRegionDefinitionEntry(
String role, String value, CacheRegionType cacheType) {
final var params = new StringTokenizer( value, ";, " );
if ( !params.hasMoreTokens() ) {
throw illegalCacheRegionDefinitionException( role, value, cacheType );
}
else {
final String usage = params.nextToken();
final String region = params.hasMoreTokens() ? params.nextToken() : null;
final boolean lazyProperty =
cacheType == CacheRegionType.ENTITY
&& ( !params.hasMoreTokens() || "all".equalsIgnoreCase( params.nextToken() ) );
return new CacheRegionDefinition( cacheType, role, usage, region, lazyProperty );
}
}
private PersistenceException illegalCacheRegionDefinitionException(
String role, String value, CacheRegionType cacheType) {
final StringBuilder message =
new StringBuilder( "Cache region configuration '" )
.append( cacheType == CacheRegionType.ENTITY ? CLASS_CACHE_PREFIX : COLLECTION_CACHE_PREFIX )
.append( '.' )
.append(role)
.append( ' ' )
.append(value)
.append( "' not of form 'usage[,region[,lazy]]' " )
.append( exceptionHeader() );
return new PersistenceException( message.toString() );
}
private void applyMappingResources(MetadataSources metadataSources) {
// todo : where in the heck are `org.hibernate.jpa.boot.spi.PersistenceUnitDescriptor.getManagedClassNames` handled?!?
// final ClassLoaderService classLoaderService = ssr.getService( ClassLoaderService.class );
//
// // todo : make sure MetadataSources/Metadata are capable of handling duplicate sources
//
// // explicit persistence unit mapping files listings
// if ( persistenceUnit.getMappingFileNames() != null ) {
// for ( String name : persistenceUnit.getMappingFileNames() ) {
// metadataSources.addResource( name );
// }
// }
//
// // explicit persistence unit managed | loader |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/generated/sqldefault/ImmutableDefaultTest.java | {
"start": 2051,
"end": 2399
} | class ____ {
@Id
private BigDecimal unitPrice;
@Id @ColumnDefault(value = "1")
private int quantity;
@Generated @Immutable
@ColumnDefault(value = "'new'")
private String status;
public OrderLine() {}
public OrderLine(BigDecimal unitPrice, int quantity) {
this.unitPrice = unitPrice;
this.quantity = quantity;
}
}
}
| OrderLine |
java | apache__maven | compat/maven-model-builder/src/main/java/org/apache/maven/model/building/DefaultModelProblemCollector.java | {
"start": 1719,
"end": 5680
} | class ____ implements ModelProblemCollectorExt {
private final ModelBuildingResult result;
private List<ModelProblem> problems;
private String source;
private Model sourceModel;
private Model rootModel;
private Set<ModelProblem.Severity> severities = EnumSet.noneOf(ModelProblem.Severity.class);
DefaultModelProblemCollector(ModelBuildingResult result) {
this.result = result;
this.problems = result.getProblems();
for (ModelProblem problem : this.problems) {
severities.add(problem.getSeverity());
}
}
public boolean hasFatalErrors() {
return severities.contains(ModelProblem.Severity.FATAL);
}
public boolean hasErrors() {
return severities.contains(ModelProblem.Severity.ERROR) || severities.contains(ModelProblem.Severity.FATAL);
}
@Override
public List<ModelProblem> getProblems() {
return problems;
}
public void setSource(String source) {
this.source = source;
this.sourceModel = null;
}
public void setSource(Model source) {
this.sourceModel = source;
this.source = null;
if (rootModel == null) {
rootModel = source;
}
}
private String getSource() {
if (source == null && sourceModel != null) {
source = ModelProblemUtils.toPath(sourceModel);
}
return source;
}
private String getModelId() {
return ModelProblemUtils.toId(sourceModel);
}
public void setRootModel(Model rootModel) {
this.rootModel = rootModel;
}
public Model getRootModel() {
return rootModel;
}
public String getRootModelId() {
return ModelProblemUtils.toId(rootModel);
}
public void add(ModelProblem problem) {
problems.add(problem);
severities.add(problem.getSeverity());
}
public void addAll(List<ModelProblem> problems) {
this.problems.addAll(problems);
for (ModelProblem problem : problems) {
severities.add(problem.getSeverity());
}
}
@Override
public void add(ModelProblemCollectorRequest req) {
int line = -1;
int column = -1;
String source = null;
String modelId = null;
if (req.getLocation() != null) {
line = req.getLocation().getLineNumber();
column = req.getLocation().getColumnNumber();
if (req.getLocation().getSource() != null) {
modelId = req.getLocation().getSource().getModelId();
source = req.getLocation().getSource().getLocation();
}
}
if (modelId == null) {
modelId = getModelId();
source = getSource();
}
if (line <= 0 && column <= 0 && req.getException() instanceof ModelParseException e) {
line = e.getLineNumber();
column = e.getColumnNumber();
}
ModelProblem problem = new DefaultModelProblem(
req.getMessage(),
req.getSeverity(),
req.getVersion(),
source,
line,
column,
modelId,
req.getException());
add(problem);
}
public ModelBuildingException newModelBuildingException() {
ModelBuildingResult result = this.result;
if (result.getModelIds().isEmpty()) {
DefaultModelBuildingResult tmp = new DefaultModelBuildingResult();
tmp.setEffectiveModel(result.getEffectiveModel());
tmp.setProblems(getProblems());
tmp.setActiveExternalProfiles(result.getActiveExternalProfiles());
String id = getRootModelId();
tmp.addModelId(id);
tmp.setRawModel(id, getRootModel());
result = tmp;
}
return new ModelBuildingException(result);
}
}
| DefaultModelProblemCollector |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/memory/OpaqueMemoryResourceTest.java | {
"start": 1052,
"end": 1472
} | class ____ {
@Test
void testCloseIsIdempotent() throws Exception {
final CountingCloseable disposer = new CountingCloseable();
final OpaqueMemoryResource<Object> resource =
new OpaqueMemoryResource<>(new Object(), 10, disposer);
resource.close();
resource.close();
assertThat(disposer.count).isOne();
}
private static final | OpaqueMemoryResourceTest |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/resteasy/async/filters/AsyncPreMatchRequestFilter1.java | {
"start": 233,
"end": 382
} | class ____ extends AsyncRequestFilter {
public AsyncPreMatchRequestFilter1() {
super("PreMatchFilter1");
}
}
| AsyncPreMatchRequestFilter1 |
java | spring-projects__spring-framework | spring-beans/src/main/java/org/springframework/beans/factory/wiring/BeanWiringInfoResolver.java | {
"start": 922,
"end": 1159
} | interface ____ be driven by
* the AspectJ pointcut in the relevant concrete aspect.
*
* <p>Metadata resolution strategy can be pluggable. A good default is
* {@link ClassNameBeanWiringInfoResolver}, which uses the fully-qualified
* | will |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/context/properties/bind/BoundPropertiesTrackingBindHandlerTests.java | {
"start": 2309,
"end": 2447
} | class ____ {
private int age;
int getAge() {
return this.age;
}
void setAge(int age) {
this.age = age;
}
}
}
| ExampleBean |
java | spring-projects__spring-security | core/src/test/java/org/springframework/security/aot/hint/PrePostAuthorizeHintsRegistrarTests.java | {
"start": 8052,
"end": 8189
} | class ____ {
@PreAuthorize("@foo.bar()")
void method1() {
}
}
@PostAuthorize("@authz.check()")
static | PreAuthorizeOnTypeAndMethod |
java | apache__flink | flink-table/flink-sql-gateway/src/test/java/org/apache/flink/table/gateway/service/utils/MockHttpServer.java | {
"start": 2618,
"end": 3468
} | class ____ implements HttpHandler {
private static final String CONTENT_TYPE_KEY = "Content-Type";
private static final String CONTENT_TYPE_VALUE = "application/octet-stream";
private final File file;
public DownloadFileHttpHandler(File fileToDownload) {
Preconditions.checkArgument(
fileToDownload.exists(), "The file to be download not exists!");
this.file = fileToDownload;
}
@Override
public void handle(HttpExchange exchange) throws IOException {
exchange.getResponseHeaders().add(CONTENT_TYPE_KEY, CONTENT_TYPE_VALUE);
exchange.sendResponseHeaders(HttpURLConnection.HTTP_OK, file.length());
copy(this.file, exchange.getResponseBody());
exchange.close();
}
}
}
| DownloadFileHttpHandler |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestRetryCacheWithHA.java | {
"start": 9525,
"end": 10066
} | class ____ {
private final String name;
final DFSClient client;
int expectedUpdateCount = 0;
AtMostOnceOp(String name, DFSClient client) {
this.name = name;
this.client = client;
}
abstract void prepare() throws Exception;
abstract void invoke() throws Exception;
abstract boolean checkNamenodeBeforeReturn() throws Exception;
abstract Object getResult();
int getExpectedCacheUpdateCount() {
return expectedUpdateCount;
}
}
/** createSnapshot operaiton */
| AtMostOnceOp |
java | apache__camel | components/camel-atom/src/test/java/org/apache/camel/component/atom/AtomPollingConsumerTest.java | {
"start": 1529,
"end": 3248
} | class ____ extends CamelTestSupport {
@Test
void testNoSplitEntries() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
mock.assertIsSatisfied();
Exchange exchange = mock.getExchanges().get(0);
Message in = exchange.getIn();
assertNotNull(in);
assertInstanceOf(List.class, in.getBody());
assertInstanceOf(List.class, in.getHeader(AtomConstants.ATOM_FEED));
List feed = in.getHeader(AtomConstants.ATOM_FEED, List.class);
Item item = (Item) feed.get(0);
assertEquals("James Strachan", item.getAuthor().get());
List<?> entries = in.getBody(List.class);
assertEquals(7, entries.size());
}
@Test
void testUsingAtomUriParameter() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result2");
mock.expectedMessageCount(1);
mock.assertIsSatisfied();
}
@Test
void testNoCamelParametersInFeedUri() {
AtomEndpoint endpoint = context.getEndpoint("atom:file:src/test/data/feed.atom?splitEntries=false", AtomEndpoint.class);
assertEquals("file:src/test/data/feed.atom", endpoint.getFeedUri());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("atom:file:src/test/data/feed.atom?splitEntries=false").to("mock:result");
// this is a bit weird syntax that normally is not using the feedUri parameter
from("atom:?feedUri=file:src/test/data/feed.atom&splitEntries=false").to("mock:result2");
}
};
}
}
| AtomPollingConsumerTest |
java | micronaut-projects__micronaut-core | inject-java/src/test/groovy/io/micronaut/inject/any/PetOwner.java | {
"start": 342,
"end": 621
} | class ____ {
@Inject @Any Dog<?> dog;
@Inject @Any
BeanProvider<Dog<?>> dogBeanProvider;
@Inject @Any
BeanProvider<Cat> catBeanProvider;
@Inject @Named("poodle") Dog<?> poodle;
@Inject @Named("terrier") BeanProvider<Dog<?>> terrierProvider;
}
| PetOwner |
java | apache__maven | compat/maven-compat/src/main/java/org/apache/maven/toolchain/ToolchainsBuilder.java | {
"start": 1253,
"end": 1832
} | interface ____ {
/**
* Builds the toolchains model from the configured toolchain files.
*
* @param userToolchainsFile The path to the toolchains file, may be <code>null</code> to disable parsing.
* @return The toolchains model or <code>null</code> if no toolchain file was configured or the configured file does
* not exist.
* @throws MisconfiguredToolchainException If the toolchain file exists but cannot be parsed.
*/
PersistedToolchains build(File userToolchainsFile) throws MisconfiguredToolchainException;
}
| ToolchainsBuilder |
java | apache__flink | flink-filesystems/flink-hadoop-fs/src/test/java/org/apache/flink/runtime/fs/hdfs/HadoopFreeTests.java | {
"start": 1303,
"end": 1380
} | class ____ only instantiated via reflection
@SuppressWarnings("unused")
public | is |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/asm/Opcodes.java | {
"start": 14197,
"end": 20651
} | class
____ ACC_DEPRECATED = 0x20000; // class, field, method
// Possible values for the type operand of the NEWARRAY instruction.
// See https://docs.oracle.com/javase/specs/jvms/se9/html/jvms-6.html#jvms-6.5.newarray.
int T_BOOLEAN = 4;
int T_CHAR = 5;
int T_FLOAT = 6;
int T_DOUBLE = 7;
int T_BYTE = 8;
int T_SHORT = 9;
int T_INT = 10;
int T_LONG = 11;
// Possible values for the reference_kind field of CONSTANT_MethodHandle_info structures.
// See https://docs.oracle.com/javase/specs/jvms/se9/html/jvms-4.html#jvms-4.4.8.
int H_GETFIELD = 1;
int H_GETSTATIC = 2;
int H_PUTFIELD = 3;
int H_PUTSTATIC = 4;
int H_INVOKEVIRTUAL = 5;
int H_INVOKESTATIC = 6;
int H_INVOKESPECIAL = 7;
int H_NEWINVOKESPECIAL = 8;
int H_INVOKEINTERFACE = 9;
// ASM specific stack map frame types, used in {@link ClassVisitor#visitFrame}.
/** An expanded frame. See {@link ClassReader#EXPAND_FRAMES}. */
int F_NEW = -1;
/** A compressed frame with complete frame data. */
int F_FULL = 0;
/**
* A compressed frame where locals are the same as the locals in the previous frame, except that
* additional 1-3 locals are defined, and with an empty stack.
*/
int F_APPEND = 1;
/**
* A compressed frame where locals are the same as the locals in the previous frame, except that
* the last 1-3 locals are absent and with an empty stack.
*/
int F_CHOP = 2;
/**
* A compressed frame with exactly the same locals as the previous frame and with an empty stack.
*/
int F_SAME = 3;
/**
* A compressed frame with exactly the same locals as the previous frame and with a single value
* on the stack.
*/
int F_SAME1 = 4;
// Standard stack map frame element types, used in {@link ClassVisitor#visitFrame}.
Integer TOP = Frame.ITEM_TOP;
Integer INTEGER = Frame.ITEM_INTEGER;
Integer FLOAT = Frame.ITEM_FLOAT;
Integer DOUBLE = Frame.ITEM_DOUBLE;
Integer LONG = Frame.ITEM_LONG;
Integer NULL = Frame.ITEM_NULL;
Integer UNINITIALIZED_THIS = Frame.ITEM_UNINITIALIZED_THIS;
// The JVM opcode values (with the MethodVisitor method name used to visit them in comment, and
// where '-' means 'same method name as on the previous line').
// See https://docs.oracle.com/javase/specs/jvms/se9/html/jvms-6.html.
int NOP = 0; // visitInsn
int ACONST_NULL = 1; // -
int ICONST_M1 = 2; // -
int ICONST_0 = 3; // -
int ICONST_1 = 4; // -
int ICONST_2 = 5; // -
int ICONST_3 = 6; // -
int ICONST_4 = 7; // -
int ICONST_5 = 8; // -
int LCONST_0 = 9; // -
int LCONST_1 = 10; // -
int FCONST_0 = 11; // -
int FCONST_1 = 12; // -
int FCONST_2 = 13; // -
int DCONST_0 = 14; // -
int DCONST_1 = 15; // -
int BIPUSH = 16; // visitIntInsn
int SIPUSH = 17; // -
int LDC = 18; // visitLdcInsn
int ILOAD = 21; // visitVarInsn
int LLOAD = 22; // -
int FLOAD = 23; // -
int DLOAD = 24; // -
int ALOAD = 25; // -
int IALOAD = 46; // visitInsn
int LALOAD = 47; // -
int FALOAD = 48; // -
int DALOAD = 49; // -
int AALOAD = 50; // -
int BALOAD = 51; // -
int CALOAD = 52; // -
int SALOAD = 53; // -
int ISTORE = 54; // visitVarInsn
int LSTORE = 55; // -
int FSTORE = 56; // -
int DSTORE = 57; // -
int ASTORE = 58; // -
int IASTORE = 79; // visitInsn
int LASTORE = 80; // -
int FASTORE = 81; // -
int DASTORE = 82; // -
int AASTORE = 83; // -
int BASTORE = 84; // -
int CASTORE = 85; // -
int SASTORE = 86; // -
int POP = 87; // -
int POP2 = 88; // -
int DUP = 89; // -
int DUP_X1 = 90; // -
int DUP_X2 = 91; // -
int DUP2 = 92; // -
int DUP2_X1 = 93; // -
int DUP2_X2 = 94; // -
int SWAP = 95; // -
int IADD = 96; // -
int LADD = 97; // -
int FADD = 98; // -
int DADD = 99; // -
int ISUB = 100; // -
int LSUB = 101; // -
int FSUB = 102; // -
int DSUB = 103; // -
int IMUL = 104; // -
int LMUL = 105; // -
int FMUL = 106; // -
int DMUL = 107; // -
int IDIV = 108; // -
int LDIV = 109; // -
int FDIV = 110; // -
int DDIV = 111; // -
int IREM = 112; // -
int LREM = 113; // -
int FREM = 114; // -
int DREM = 115; // -
int INEG = 116; // -
int LNEG = 117; // -
int FNEG = 118; // -
int DNEG = 119; // -
int ISHL = 120; // -
int LSHL = 121; // -
int ISHR = 122; // -
int LSHR = 123; // -
int IUSHR = 124; // -
int LUSHR = 125; // -
int IAND = 126; // -
int LAND = 127; // -
int IOR = 128; // -
int LOR = 129; // -
int IXOR = 130; // -
int LXOR = 131; // -
int IINC = 132; // visitIincInsn
int I2L = 133; // visitInsn
int I2F = 134; // -
int I2D = 135; // -
int L2I = 136; // -
int L2F = 137; // -
int L2D = 138; // -
int F2I = 139; // -
int F2L = 140; // -
int F2D = 141; // -
int D2I = 142; // -
int D2L = 143; // -
int D2F = 144; // -
int I2B = 145; // -
int I2C = 146; // -
int I2S = 147; // -
int LCMP = 148; // -
int FCMPL = 149; // -
int FCMPG = 150; // -
int DCMPL = 151; // -
int DCMPG = 152; // -
int IFEQ = 153; // visitJumpInsn
int IFNE = 154; // -
int IFLT = 155; // -
int IFGE = 156; // -
int IFGT = 157; // -
int IFLE = 158; // -
int IF_ICMPEQ = 159; // -
int IF_ICMPNE = 160; // -
int IF_ICMPLT = 161; // -
int IF_ICMPGE = 162; // -
int IF_ICMPGT = 163; // -
int IF_ICMPLE = 164; // -
int IF_ACMPEQ = 165; // -
int IF_ACMPNE = 166; // -
int GOTO = 167; // -
int JSR = 168; // -
int RET = 169; // visitVarInsn
int TABLESWITCH = 170; // visiTableSwitchInsn
int LOOKUPSWITCH = 171; // visitLookupSwitch
int IRETURN = 172; // visitInsn
int LRETURN = 173; // -
int FRETURN = 174; // -
int DRETURN = 175; // -
int ARETURN = 176; // -
int RETURN = 177; // -
int GETSTATIC = 178; // visitFieldInsn
int PUTSTATIC = 179; // -
int GETFIELD = 180; // -
int PUTFIELD = 181; // -
int INVOKEVIRTUAL = 182; // visitMethodInsn
int INVOKESPECIAL = 183; // -
int INVOKESTATIC = 184; // -
int INVOKEINTERFACE = 185; // -
int INVOKEDYNAMIC = 186; // visitInvokeDynamicInsn
int NEW = 187; // visitTypeInsn
int NEWARRAY = 188; // visitIntInsn
int ANEWARRAY = 189; // visitTypeInsn
int ARRAYLENGTH = 190; // visitInsn
int ATHROW = 191; // -
int CHECKCAST = 192; // visitTypeInsn
int INSTANCEOF = 193; // -
int MONITORENTER = 194; // visitInsn
int MONITOREXIT = 195; // -
int MULTIANEWARRAY = 197; // visitMultiANewArrayInsn
int IFNULL = 198; // visitJumpInsn
int IFNONNULL = 199; // -
}
| int |
java | google__dagger | dagger-compiler/main/java/dagger/internal/codegen/xprocessing/XTypes.java | {
"start": 28106,
"end": 29470
} | class ____ extends SimpleTypeVisitor8<Void, Set<Element>> {
static final TypeResolutionVisitor INSTANCE = new TypeResolutionVisitor();
@Override
public Void visitDeclared(DeclaredType t, Set<Element> visited) {
if (!visited.add(t.asElement())) {
return null;
}
if (MoreElements.asType(t.asElement()).getQualifiedName().toString().contains("$")) {
// Force symbol completion/resolution on the type by calling Element#getKind().
t.asElement().getKind();
}
t.getTypeArguments().forEach(arg -> arg.accept(this, visited));
return null;
}
@Override
public Void visitError(ErrorType t, Set<Element> visited) {
visitDeclared(t, visited);
return null;
}
@Override
public Void visitArray(ArrayType t, Set<Element> visited) {
t.getComponentType().accept(this, visited);
return null;
}
@Override
public Void visitWildcard(WildcardType t, Set<Element> visited) {
if (t.getExtendsBound() != null) {
t.getExtendsBound().accept(this, visited);
}
if (t.getSuperBound() != null) {
t.getSuperBound().accept(this, visited);
}
return null;
}
@Override
protected Void defaultAction(TypeMirror e, Set<Element> visited) {
return null;
}
}
private XTypes() {}
}
| TypeResolutionVisitor |
java | elastic__elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointActionResponseTests.java | {
"start": 624,
"end": 1733
} | class ____ extends AbstractWireSerializingTestCase<Response> {
public static Response randomCheckpointResponse() {
Map<String, long[]> checkpointsByIndex = new TreeMap<>();
int indices = randomIntBetween(1, 10);
for (int i = 0; i < indices; ++i) {
List<Long> checkpoints = new ArrayList<>();
int shards = randomIntBetween(1, 20);
for (int j = 0; j < shards; ++j) {
checkpoints.add(randomLongBetween(0, 1_000_000));
}
checkpointsByIndex.put(randomAlphaOfLengthBetween(1, 10), checkpoints.stream().mapToLong(l -> l).toArray());
}
return new Response(checkpointsByIndex);
}
@Override
protected Reader<Response> instanceReader() {
return Response::new;
}
@Override
protected Response createTestInstance() {
return randomCheckpointResponse();
}
@Override
protected Response mutateInstance(Response instance) {
return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929
}
}
| GetCheckpointActionResponseTests |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/committer/manifest/stages/LoadManifestsStage.java | {
"start": 12547,
"end": 15249
} | class ____ implements IOStatisticsSource {
/**
* Aggregate IOStatistics.
*/
private final IOStatisticsSnapshot iostatistics = snapshotIOStatistics();
/**
* Task IDs.
*/
private final List<String> taskIDs = new ArrayList<>();
/**
* Task IDs.
*/
private final List<String> taskAttemptIDs = new ArrayList<>();
/**
* How many manifests were loaded.
*/
private AtomicLong manifestCount = new AtomicLong();
/**
* Total number of files to rename.
*/
private AtomicLong fileCount = new AtomicLong();
/**
* Total number of directories which may need
* to be created.
* As there is no dedup, this is likely to be
* a (major) overestimate.
*/
private AtomicLong directoryCount = new AtomicLong();
/**
* Total amount of data to be committed.
*/
private AtomicLong totalFileSize = new AtomicLong();
/**
* Get the IOStatistics.
* @return aggregate IOStatistics
*/
@Override
public IOStatisticsSnapshot getIOStatistics() {
return iostatistics;
}
public long getFileCount() {
return fileCount.get();
}
public long getDirectoryCount() {
return directoryCount.get();
}
public long getTotalFileSize() {
return totalFileSize.get();
}
public long getManifestCount() {
return manifestCount.get();
}
public List<String> getTaskIDs() {
return taskIDs;
}
public List<String> getTaskAttemptIDs() {
return taskAttemptIDs;
}
/**
* Add all statistics; synchronized.
* @param manifest manifest to add.
*/
public synchronized void add(TaskManifest manifest) {
manifestCount.incrementAndGet();
iostatistics.aggregate(manifest.getIOStatistics());
fileCount.addAndGet(manifest.getFilesToCommit().size());
directoryCount.addAndGet(manifest.getDestDirectories().size());
totalFileSize.addAndGet(manifest.getTotalFileSize());
taskIDs.add(manifest.getTaskID());
taskAttemptIDs.add(manifest.getTaskAttemptID());
}
/**
* To String includes all summary info except statistics.
* @return string value
*/
@Override
public String toString() {
final StringBuilder sb = new StringBuilder(
"SummaryInfo{");
sb.append("manifestCount=").append(getManifestCount());
sb.append(", fileCount=").append(getFileCount());
sb.append(", directoryCount=").append(getDirectoryCount());
sb.append(", totalFileSize=").append(
byteCountToDisplaySize(getTotalFileSize()));
sb.append('}');
return sb.toString();
}
}
}
| SummaryInfo |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/keymanytoone/bidir/ondelete/KeyManyToOneCascadeDeleteTest.java | {
"start": 916,
"end": 2104
} | class ____ {
@AfterEach
public void tearDown(SessionFactoryScope scope) {
scope.getSessionFactory().getSchemaManager().truncate();
}
@Test
@JiraKey(value = "HHH-7807")
public void testEmbeddedCascadeRemoval(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
Customer customer = new Customer( "Lukasz" );
Order order1 = new Order( customer, 1L );
order1.setItem( "laptop" );
Order order2 = new Order( customer, 2L );
order2.setItem( "printer" );
session.persist( customer );
session.persist( order1 );
session.persist( order2 );
session.getTransaction().commit();
// Removing customer cascades to associated orders.
session.getTransaction().begin();
customer = session.get( Customer.class, customer.getId() );
session.remove( customer );
session.getTransaction().commit();
session.getTransaction().begin();
assertThat(
session.createQuery( "select count(*) from Customer" ).uniqueResult(),
is( 0L )
);
assertThat(
session.createQuery( "select count(*) from Order" ).uniqueResult(),
is( 0L )
);
}
);
}
}
| KeyManyToOneCascadeDeleteTest |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.