language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
quarkusio__quarkus
|
extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/KubernetesCommonHelper.java
|
{
"start": 5836,
"end": 63086
}
|
class ____ {
private static final Logger LOG = Logger.getLogger(KubernetesCommonHelper.class);
private static final String ANY = null;
private static final String OUTPUT_ARTIFACT_FORMAT = "%s%s.jar";
private static final String[] PROMETHEUS_ANNOTATION_TARGETS = { "Service",
"Deployment", "DeploymentConfig" };
private static final String DEFAULT_ROLE_NAME_VIEW = "view";
private static final String SCHEME_HTTP = "HTTP";
private static final String SCHEME_HTTPS = "HTTPS";
public static Optional<Project> createProject(ApplicationInfoBuildItem app,
Optional<CustomProjectRootBuildItem> customProjectRoot, OutputTargetBuildItem outputTarget,
PackageConfig packageConfig) {
return createProject(app, customProjectRoot, outputTarget.getOutputDirectory()
.resolve(String.format(OUTPUT_ARTIFACT_FORMAT, outputTarget.getBaseName(),
packageConfig.computedRunnerSuffix())));
}
public static Optional<Project> createProject(ApplicationInfoBuildItem app,
Optional<CustomProjectRootBuildItem> customProjectRoot, Path artifactPath) {
//Let dekorate create a Project instance and then override with what is found in ApplicationInfoBuildItem.
final var name = app.getName();
try {
Project project = FileProjectFactory.create(artifactPath.toFile());
BuildInfo buildInfo = new BuildInfo(name, app.getVersion(),
"jar", project.getBuildInfo().getBuildTool(),
project.getBuildInfo().getBuildToolVersion(),
artifactPath.toAbsolutePath(),
project.getBuildInfo().getClassOutputDir(),
project.getBuildInfo().getResourceDir());
return Optional
.of(new Project(customProjectRoot.isPresent() ? customProjectRoot.get().getRoot() : project.getRoot(),
buildInfo, project.getScmInfo()));
} catch (Exception e) {
LOG.debugv(e, "Couldn't create project for {0} application", name);
return Optional.empty();
}
}
/**
* Creates the configurator build items.
*/
public static Optional<Port> getPort(List<KubernetesPortBuildItem> ports, KubernetesConfig config) {
return getPort(ports, config, config.ingress().targetPort());
}
/**
* Creates the configurator build items.
*/
public static Optional<Port> getPort(List<KubernetesPortBuildItem> ports, PlatformConfiguration config, String targetPort) {
return combinePorts(ports, config).values().stream()
.filter(distinct(Port::getName))
.filter(p -> p.getName().equals(targetPort))
.findFirst();
}
/**
* Creates the configurator build items.
*/
public static Map<String, Port> combinePorts(List<KubernetesPortBuildItem> ports,
PlatformConfiguration config) {
Map<String, Port> allPorts = new HashMap<>();
Map<String, Port> activePorts = new HashMap<>();
allPorts.putAll(ports.stream()
.map(p -> new PortBuilder().withName(p.getName()).withContainerPort(p.getPort()).build())
.collect(Collectors.toMap(Port::getName, Function.identity(), (first, second) -> first))); //prevent dublicate keys
activePorts.putAll(verifyPorts(ports)
.entrySet().stream()
.map(e -> new PortBuilder().withName(e.getKey()).withContainerPort(e.getValue()).build())
.collect(Collectors.toMap(Port::getName, Function.identity(), (first, second) -> first))); //prevent dublicate keys
config.ports().entrySet().forEach(e -> {
String name = e.getKey();
Port configuredPort = PortConverter.convert(e);
Port buildItemPort = allPorts.get(name);
Port combinedPort = buildItemPort == null ? configuredPort
: new PortBuilder()
.withName(name)
.withHostPort(configuredPort.getHostPort() != null && configuredPort.getHostPort() != 0
? configuredPort.getHostPort()
: buildItemPort.getHostPort())
.withContainerPort(
configuredPort.getContainerPort() != null && configuredPort.getContainerPort() != 0
? configuredPort.getContainerPort()
: buildItemPort.getContainerPort())
.withPath(Strings.isNotNullOrEmpty(configuredPort.getPath()) ? configuredPort.getPath()
: buildItemPort.getPath())
.build();
activePorts.put(name, combinedPort);
});
return activePorts;
}
/**
* Creates the configurator build items.
*/
public static void printMessageAboutPortsThatCantChange(String target, List<KubernetesPortBuildItem> ports,
PlatformConfiguration configuration) {
ports.forEach(port -> {
boolean enabled = port.isEnabled() || configuration.ports().containsKey(port.getName());
if (enabled) {
String name = "quarkus." + target + ".ports." + port.getName() + ".container-port";
Optional<Integer> value = Optional.ofNullable(configuration.ports().get(port.getName()))
.map(p -> p.containerPort())
.filter(OptionalInt::isPresent)
.map(OptionalInt::getAsInt);
@SuppressWarnings({ "rawtypes", "unchecked" })
Property<Integer> kubernetesPortProperty = new Property(name, Integer.class, value, null, false);
PropertyUtil.printMessages(String.format("The container port %s", port.getName()), target,
kubernetesPortProperty,
port.getSource());
}
});
}
/**
* Creates the common decorator build items.
*/
public static List<DecoratorBuildItem> createDecorators(Optional<Project> project, String target, String name,
Optional<KubernetesNamespaceBuildItem> namespace,
PlatformConfiguration config,
Optional<MetricsCapabilityBuildItem> metricsConfiguration,
Optional<KubernetesClientCapabilityBuildItem> kubernetesClientConfiguration,
List<KubernetesAnnotationBuildItem> annotations,
List<KubernetesLabelBuildItem> labels,
Optional<ContainerImageInfoBuildItem> image,
Optional<KubernetesCommandBuildItem> command,
Optional<Port> port,
Optional<KubernetesHealthLivenessPathBuildItem> livenessProbePath,
Optional<KubernetesHealthReadinessPathBuildItem> readinessProbePath,
Optional<KubernetesHealthStartupPathBuildItem> startupPath,
List<KubernetesRoleBuildItem> roles,
List<KubernetesClusterRoleBuildItem> clusterRoles,
List<KubernetesEffectiveServiceAccountBuildItem> serviceAccounts,
List<KubernetesRoleBindingBuildItem> roleBindings,
List<KubernetesClusterRoleBindingBuildItem> clusterRoleBindings) {
List<DecoratorBuildItem> result = new ArrayList<>();
result.addAll(createLabelDecorators(target, name, config, labels));
result.addAll(createAnnotationDecorators(project, target, name, config, metricsConfiguration, annotations, port));
result.addAll(createPodDecorators(target, name, config));
result.addAll(createContainerDecorators(target, name, namespace, config));
result.addAll(createMountAndVolumeDecorators(target, name, config));
result.addAll(createAppConfigVolumeAndEnvDecorators(target, name, config));
result.addAll(createCommandDecorator(target, name, config, command));
result.addAll(createArgsDecorator(target, name, config, command));
// Handle Pull Secrets
if (config.generateImagePullSecret()) {
image.ifPresent(i -> i.getRegistry().ifPresent(registry -> {
if (i.getUsername().isPresent() && i.getPassword().isPresent()) {
String imagePullSecret = name + "-pull-secret";
result.add(new DecoratorBuildItem(target, new AddImagePullSecretDecorator(name, imagePullSecret)));
result.add(new DecoratorBuildItem(target, new AddDockerConfigJsonSecretDecorator(imagePullSecret,
registry, i.username.get(), i.password.get())));
}
}));
}
// Handle Probes
if (port.isPresent()) {
result.addAll(createProbeDecorators(name, target, config.livenessProbe(), config.readinessProbe(),
config.startupProbe(), livenessProbePath, readinessProbePath, startupPath));
}
// Handle RBAC
result.addAll(createRbacDecorators(name, target, config, kubernetesClientConfiguration, roles, clusterRoles,
serviceAccounts, roleBindings, clusterRoleBindings));
return result;
}
private static Collection<DecoratorBuildItem> createRbacDecorators(String name, String target,
PlatformConfiguration config,
Optional<KubernetesClientCapabilityBuildItem> kubernetesClientConfiguration,
List<KubernetesRoleBuildItem> rolesFromExtensions,
List<KubernetesClusterRoleBuildItem> clusterRolesFromExtensions,
List<KubernetesEffectiveServiceAccountBuildItem> effectiveServiceAccounts,
List<KubernetesRoleBindingBuildItem> roleBindingsFromExtensions,
List<KubernetesClusterRoleBindingBuildItem> clusterRoleBindingsFromExtensions) {
List<DecoratorBuildItem> result = new ArrayList<>();
boolean kubernetesClientRequiresRbacGeneration = kubernetesClientConfiguration
.map(KubernetesClientCapabilityBuildItem::isGenerateRbac).orElse(false);
Set<String> roles = new HashSet<>();
Set<String> clusterRoles = new HashSet<>();
// Add roles from configuration
for (Map.Entry<String, RbacConfig.RoleConfig> roleFromConfig : config.rbac().roles().entrySet()) {
RbacConfig.RoleConfig role = roleFromConfig.getValue();
String roleName = role.name().orElse(roleFromConfig.getKey());
result.add(new DecoratorBuildItem(target, new AddRoleResourceDecorator(name,
roleName,
role.namespace().orElse(null),
role.labels(),
toPolicyRulesList(role.policyRules()))));
roles.add(roleName);
}
// Add roles from extensions
Targetable.filteredByTarget(rolesFromExtensions, target)
.map(role -> new DecoratorBuildItem(target, new AddRoleResourceDecorator(name,
role.getName(),
role.getNamespace(),
Collections.emptyMap(),
role.getRules().stream()
.map(RBACUtil::from)
.toList())))
.forEach(result::add);
// Add cluster roles from configuration
for (Map.Entry<String, RbacConfig.ClusterRoleConfig> clusterRoleFromConfig : config.rbac().clusterRoles().entrySet()) {
RbacConfig.ClusterRoleConfig clusterRole = clusterRoleFromConfig.getValue();
String clusterRoleName = clusterRole.name().orElse(clusterRoleFromConfig.getKey());
result.add(new DecoratorBuildItem(target, new AddClusterRoleResourceDecorator(name,
clusterRoleName,
clusterRole.labels(),
toPolicyRulesList(clusterRole.policyRules()))));
clusterRoles.add(clusterRoleName);
}
// Add cluster roles from extensions
Targetable.filteredByTarget(clusterRolesFromExtensions, target)
.map(role -> new DecoratorBuildItem(target, new AddClusterRoleResourceDecorator(name,
role.getName(),
Collections.emptyMap(),
role.getRules().stream()
.map(RBACUtil::from)
.toList())))
.forEach(result::add);
// Retrieve SA for current target
final var potentialSAs = Targetable.filteredByTarget(effectiveServiceAccounts, target).toList();
if (potentialSAs.isEmpty()) {
throw new RuntimeException("No effective service account found for application " + name);
}
if (potentialSAs.size() > 1) {
throw new RuntimeException("More than one effective service account found for application " + name);
}
final var effectiveServiceAccount = potentialSAs.get(0);
final var effectiveServiceAccountNamespace = effectiveServiceAccount.getNamespace();
final var effectiveServiceAccountName = effectiveServiceAccount.getServiceAccountName();
// Prepare default configuration
String defaultRoleName = null;
boolean defaultClusterWide = false;
boolean requiresServiceAccount = false;
if (!roles.isEmpty()) {
// generate a role binding using this first role.
defaultRoleName = roles.iterator().next();
} else if (!clusterRoles.isEmpty()) {
// generate a role binding using this first cluster role.
defaultClusterWide = true;
defaultRoleName = clusterRoles.iterator().next();
}
// Add role bindings from extensions
Targetable.filteredByTarget(roleBindingsFromExtensions, target)
.map(rb -> new DecoratorBuildItem(target, new AddRoleBindingResourceDecorator(name,
Strings.isNotNullOrEmpty(rb.getName()) ? rb.getName() : name + "-" + rb.getRoleRef().getName(),
rb.getNamespace(),
rb.getLabels(),
rb.getRoleRef(),
rb.getSubjects())))
.forEach(result::add);
// Add role bindings from configuration
for (Map.Entry<String, RbacConfig.RoleBindingConfig> rb : config.rbac().roleBindings().entrySet()) {
String rbName = rb.getValue().name().orElse(rb.getKey());
RbacConfig.RoleBindingConfig roleBinding = rb.getValue();
List<Subject> subjects = new ArrayList<>();
if (roleBinding.subjects().isEmpty()) {
requiresServiceAccount = true;
subjects.add(new Subject(null, SERVICE_ACCOUNT,
effectiveServiceAccountName,
effectiveServiceAccountNamespace));
} else {
for (Map.Entry<String, RbacConfig.SubjectConfig> s : roleBinding.subjects().entrySet()) {
String subjectName = s.getValue().name().orElse(s.getKey());
RbacConfig.SubjectConfig subject = s.getValue();
subjects.add(new Subject(subject.apiGroup().orElse(null),
subject.kind(),
subjectName,
subject.namespace().orElse(null)));
}
}
String roleName = roleBinding.roleName().orElse(defaultRoleName);
if (roleName == null) {
throw new IllegalStateException("No role has been set in the RoleBinding resource!");
}
boolean clusterWide = roleBinding.clusterWide().orElse(defaultClusterWide);
result.add(new DecoratorBuildItem(target, new AddRoleBindingResourceDecorator(name,
rbName,
null, // todo: should namespace be providable via config?
roleBinding.labels(),
new RoleRef(roleName, clusterWide),
subjects.toArray(new Subject[0]))));
}
// Add cluster role bindings from extensions
Targetable.filteredByTarget(clusterRoleBindingsFromExtensions, target)
.map(rb -> new DecoratorBuildItem(target, new AddClusterRoleBindingResourceDecorator(name,
Strings.isNotNullOrEmpty(rb.getName()) ? rb.getName() : name + "-" + rb.getRoleRef().getName(),
rb.getLabels(),
rb.getRoleRef(),
rb.getSubjects())))
.forEach(result::add);
// Add cluster role bindings from configuration
for (Map.Entry<String, RbacConfig.ClusterRoleBindingConfig> rb : config.rbac().clusterRoleBindings().entrySet()) {
String rbName = rb.getValue().name().orElse(rb.getKey());
RbacConfig.ClusterRoleBindingConfig clusterRoleBinding = rb.getValue();
List<Subject> subjects = new ArrayList<>();
if (clusterRoleBinding.subjects().isEmpty()) {
throw new IllegalStateException("No subjects have been set in the ClusterRoleBinding resource!");
}
for (Map.Entry<String, RbacConfig.SubjectConfig> s : clusterRoleBinding.subjects().entrySet()) {
String subjectName = s.getValue().name().orElse(s.getKey());
RbacConfig.SubjectConfig subject = s.getValue();
subjects.add(new Subject(subject.apiGroup().orElse(null),
subject.kind(),
subjectName,
subject.namespace().orElse(null)));
}
result.add(new DecoratorBuildItem(target, new AddClusterRoleBindingResourceDecorator(name,
rbName,
clusterRoleBinding.labels(),
new RoleRef(clusterRoleBinding.roleName(), true),
subjects.toArray(new Subject[0]))));
}
// if no role bindings were created, then automatically create one if:
if (config.rbac().roleBindings().isEmpty()) {
if (defaultRoleName != null) {
// generate a default role binding if a default role name was configured
requiresServiceAccount = true;
result.add(new DecoratorBuildItem(target, new AddRoleBindingResourceDecorator(name,
name,
null, // todo: should namespace be providable via config?
Collections.emptyMap(),
new RoleRef(defaultRoleName, defaultClusterWide),
new Subject(null, SERVICE_ACCOUNT,
effectiveServiceAccountName,
effectiveServiceAccountNamespace))));
} else if (kubernetesClientRequiresRbacGeneration) {
// the property `quarkus.kubernetes-client.generate-rbac` is enabled
// and the kubernetes-client extension is present
requiresServiceAccount = true;
result.add(new DecoratorBuildItem(target, new AddRoleBindingResourceDecorator(name,
name + "-" + DEFAULT_ROLE_NAME_VIEW,
null, // todo: should namespace be providable via config?
Collections.emptyMap(),
new RoleRef(DEFAULT_ROLE_NAME_VIEW, true),
new Subject(null, SERVICE_ACCOUNT,
effectiveServiceAccountName,
effectiveServiceAccountNamespace))));
}
}
// generate service account if none is set, and it's required by other resources
if (requiresServiceAccount) {
// and generate the resource
result.add(new DecoratorBuildItem(target,
new AddServiceAccountResourceDecorator(name, effectiveServiceAccountName,
effectiveServiceAccountNamespace,
Collections.emptyMap())));
}
// set service account in deployment resource if the user sets a service account,
// or it's required for a dependant resource.
if (effectiveServiceAccount.wasSet() || requiresServiceAccount) {
result.add(new DecoratorBuildItem(target, new ApplyServiceAccountNameDecorator(name, effectiveServiceAccountName)));
}
return result;
}
public static KubernetesEffectiveServiceAccountBuildItem computeEffectiveServiceAccount(String name, String target,
PlatformConfiguration config, List<KubernetesServiceAccountBuildItem> serviceAccountsFromExtensions,
BuildProducer<DecoratorBuildItem> decorators) {
Optional<String> effectiveServiceAccount = Optional.empty();
String effectiveServiceAccountNamespace = null;
for (KubernetesServiceAccountBuildItem sa : serviceAccountsFromExtensions) {
String saName = Optional.ofNullable(sa.getName()).orElse(name);
decorators.produce(new DecoratorBuildItem(target, new AddServiceAccountResourceDecorator(name, saName,
sa.getNamespace(),
sa.getLabels())));
if (sa.isUseAsDefault() || effectiveServiceAccount.isEmpty()) {
effectiveServiceAccount = Optional.of(saName);
effectiveServiceAccountNamespace = sa.getNamespace();
}
}
// Add service account from configuration
for (Map.Entry<String, RbacConfig.ServiceAccountConfig> sa : config.rbac().serviceAccounts().entrySet()) {
String saName = sa.getValue().name().orElse(sa.getKey());
decorators.produce(new DecoratorBuildItem(target, new AddServiceAccountResourceDecorator(name, saName,
sa.getValue().namespace().orElse(null),
sa.getValue().labels())));
if (sa.getValue().isUseAsDefault() || effectiveServiceAccount.isEmpty()) {
effectiveServiceAccount = Optional.of(saName);
effectiveServiceAccountNamespace = sa.getValue().namespace().orElse(null);
}
}
// The user provided service account should always take precedence
if (config.serviceAccount().isPresent()) {
effectiveServiceAccount = config.serviceAccount();
effectiveServiceAccountNamespace = null;
}
final var effectiveName = effectiveServiceAccount.orElse(name);
return new KubernetesEffectiveServiceAccountBuildItem(effectiveName, effectiveServiceAccountNamespace,
effectiveServiceAccount.isPresent(), target);
}
private static Collection<DecoratorBuildItem> createLabelDecorators(String target, String name,
PlatformConfiguration config, List<KubernetesLabelBuildItem> labels) {
List<DecoratorBuildItem> result = new ArrayList<>();
result.add(new DecoratorBuildItem(target, new AddMetadataToTemplateDecorator()));
result.add(new DecoratorBuildItem(target, new AddSelectorToDeploymentSpecDecorator()));
labels.forEach(l -> result.add(new DecoratorBuildItem(l.getTarget(),
new AddLabelDecorator(name, l.getKey(), l.getValue()))));
if (!config.addVersionToLabelSelectors() || config.idempotent()) {
result.add(new DecoratorBuildItem(target, new RemoveFromSelectorDecorator(name, Labels.VERSION)));
result.add(new DecoratorBuildItem(target, new RemoveFromMatchingLabelsDecorator(name, Labels.VERSION)));
}
if (config.idempotent()) {
result.add(new DecoratorBuildItem(target, new RemoveLabelDecorator(name, Labels.VERSION)));
}
if (!config.addNameToLabelSelectors()) {
result.add(new DecoratorBuildItem(target, new RemoveLabelDecorator(name, Labels.NAME)));
result.add(new DecoratorBuildItem(target, new RemoveFromSelectorDecorator(name, Labels.NAME)));
result.add(new DecoratorBuildItem(target, new RemoveFromMatchingLabelsDecorator(name, Labels.NAME)));
}
return result;
}
/**
* If user defines a custom command via configuration, this is used.
* If not, it will use the one from other extensions.
*
* @param target The deployment target (e.g. kubernetes, openshift, knative)
* @param name The name of the resource to accept the configuration
* @param config The {@link PlatformConfiguration} instance
* @param command Optional command item from other extensions
*/
private static List<DecoratorBuildItem> createCommandDecorator(String target, String name,
PlatformConfiguration config, Optional<KubernetesCommandBuildItem> command) {
List<DecoratorBuildItem> result = new ArrayList<>();
if (config.command().isPresent()) {
// If command has been set in configuration, we use it
result.add(new DecoratorBuildItem(target,
new ApplyCommandDecorator(name, config.command().get().toArray(new String[0]))));
} else if (command.isPresent()) {
// If not, we use the command that has been provided in other extensions (if any).
result.add(new DecoratorBuildItem(target,
new ApplyCommandDecorator(name, command.get().getCommand().toArray(new String[0]))));
}
return result;
}
/**
* If user defines arguments via configuration, then these will be merged to the ones from other extensions.
* If not, then only the arguments from other extensions will be used if any.
*
* @param target The deployment target (e.g. kubernetes, openshift, knative)
* @param name The name of the resource to accept the configuration
* @param config The {@link PlatformConfiguration} instance
* @param command Optional command item from other extensions
*/
private static List<DecoratorBuildItem> createArgsDecorator(String target, String name,
PlatformConfiguration config, Optional<KubernetesCommandBuildItem> command) {
List<DecoratorBuildItem> result = new ArrayList<>();
List<String> args = new ArrayList<>();
command.ifPresent(cmd -> args.addAll(cmd.getArgs()));
config.arguments().ifPresent(args::addAll);
if (!args.isEmpty()) {
result.add(new DecoratorBuildItem(target, new ApplyArgsDecorator(name, args.toArray(new String[0]))));
}
return result;
}
public static List<DecoratorBuildItem> createInitContainerDecorators(String target, String applicationName,
List<KubernetesInitContainerBuildItem> items, List<DecoratorBuildItem> decorators) {
List<DecoratorBuildItem> result = new ArrayList<>();
List<AddEnvVarDecorator> envVarDecorators = decorators.stream()
.filter(d -> d.getGroup() == null || d.getGroup().equals(target))
.map(d -> d.getDecorator(AddEnvVarDecorator.class))
.filter(Optional::isPresent)
.map(Optional::get)
.toList();
List<AddMountDecorator> mountDecorators = decorators.stream()
.filter(d -> d.getGroup() == null || d.getGroup().equals(target))
.map(d -> d.getDecorator(AddMountDecorator.class))
.filter(Optional::isPresent)
.map(Optional::get)
.toList();
items.stream().filter(item -> item.getTarget() == null || item.getTarget().equals(target)).forEach(item -> {
io.dekorate.kubernetes.config.ContainerBuilder containerBuilder = new io.dekorate.kubernetes.config.ContainerBuilder()
.withName(item.getName())
.withImage(item.getImage())
.withImagePullPolicy(ImagePullPolicy.valueOf(item.getImagePullPolicy()))
.withCommand(item.getCommand().toArray(new String[0]))
.withArguments(item.getArguments().toArray(new String[0]));
if (item.isSharedEnvironment()) {
for (final AddEnvVarDecorator delegate : envVarDecorators) {
result.add(new DecoratorBuildItem(target,
new ApplicationContainerDecorator<ContainerBuilder>(applicationName, item.getName()) {
@Override
public void andThenVisit(ContainerBuilder builder) {
delegate.andThenVisit(builder);
// Currently, we have no way to filter out provided env vars.
// So, we apply them on top of every change.
// This needs to be addressed in dekorate to make things more efficient
for (Map.Entry<String, String> e : item.getEnvVars().entrySet()) {
builder.removeMatchingFromEnv(p -> p.getName().equals(e.getKey()));
builder.addNewEnv()
.withName(e.getKey())
.withValue(e.getValue())
.endEnv();
}
}
}));
}
}
if (item.isSharedFilesystem()) {
for (final AddMountDecorator delegate : mountDecorators) {
result.add(new DecoratorBuildItem(target,
new ApplicationContainerDecorator<ContainerBuilder>(target, item.getName()) {
@Override
public void andThenVisit(ContainerBuilder builder) {
delegate.andThenVisit(builder);
}
}));
}
}
result.add(new DecoratorBuildItem(target,
new AddInitContainerDecorator(applicationName, containerBuilder
.addAllToEnvVars(item.getEnvVars().entrySet().stream().map(e -> new EnvBuilder()
.withName(e.getKey())
.withValue(e.getValue())
.build()).collect(Collectors.toList()))
.build())));
});
return result;
}
public static List<DecoratorBuildItem> createInitJobDecorators(String target,
String applicationName, List<KubernetesJobBuildItem> items, List<DecoratorBuildItem> decorators) {
List<DecoratorBuildItem> result = new ArrayList<>();
List<AddEnvVarDecorator> envVarDecorators = decorators.stream()
.filter(d -> d.getGroup() == null || d.getGroup().equals(target))
.map(d -> d.getDecorator(AddEnvVarDecorator.class))
.filter(Optional::isPresent)
.map(Optional::get)
.toList();
List<NamedResourceDecorator<?>> volumeDecorators = decorators.stream()
.filter(d -> d.getGroup() == null || d.getGroup().equals(target))
.filter(d -> d.getDecorator() instanceof AddEmptyDirVolumeDecorator
|| d.getDecorator() instanceof AddSecretVolumeDecorator
|| d.getDecorator() instanceof AddAzureDiskVolumeDecorator
|| d.getDecorator() instanceof AddAzureFileVolumeDecorator
|| d.getDecorator() instanceof AddAwsElasticBlockStoreVolumeDecorator)
.map(d -> (NamedResourceDecorator<?>) d.getDecorator())
.collect(Collectors.toList());
List<AddMountDecorator> mountDecorators = decorators.stream()
.filter(d -> d.getGroup() == null || d.getGroup().equals(target))
.map(d -> d.getDecorator(AddMountDecorator.class))
.filter(Optional::isPresent)
.map(Optional::get)
.toList();
List<AddImagePullSecretDecorator> imagePullSecretDecorators = decorators.stream()
.filter(d -> d.getGroup() == null || d.getGroup().equals(target))
.map(d -> d.getDecorator(AddImagePullSecretDecorator.class))
.filter(Optional::isPresent)
.map(Optional::get)
.toList();
List<ApplyServiceAccountNameDecorator> serviceAccountDecorators = decorators.stream()
.filter(d -> d.getGroup() == null || d.getGroup().equals(target))
.map(d -> d.getDecorator(ApplyServiceAccountNameDecorator.class))
.filter(Optional::isPresent)
.map(Optional::get)
.toList();
items.stream().filter(item -> item.getTarget() == null || item.getTarget().equals(target)).forEach(item -> {
for (final AddImagePullSecretDecorator delegate : imagePullSecretDecorators) {
result.add(new DecoratorBuildItem(target, new NamedResourceDecorator<PodSpecBuilder>("Job", item.getName()) {
@Override
public void andThenVisit(PodSpecBuilder builder, ObjectMeta meta) {
delegate.andThenVisit(builder, meta);
}
}));
}
for (final ApplyServiceAccountNameDecorator delegate : serviceAccountDecorators) {
result.add(new DecoratorBuildItem(target, new NamedResourceDecorator<PodSpecBuilder>("Job", item.getName()) {
@Override
public void andThenVisit(PodSpecBuilder builder, ObjectMeta meta) {
delegate.andThenVisit(builder, meta);
}
}));
}
result.add(new DecoratorBuildItem(target, new NamedResourceDecorator<ContainerBuilder>("Job", item.getName()) {
@Override
public void andThenVisit(ContainerBuilder builder, ObjectMeta meta) {
for (Map.Entry<String, String> e : item.getEnvVars().entrySet()) {
builder.removeMatchingFromEnv(p -> p.getName().equals(e.getKey()));
builder.addNewEnv()
.withName(e.getKey())
.withValue(e.getValue())
.endEnv();
}
}
}));
if (item.isSharedEnvironment()) {
for (final AddEnvVarDecorator delegate : envVarDecorators) {
result.add(
new DecoratorBuildItem(target, new NamedResourceDecorator<ContainerBuilder>("Job", item.getName()) {
@Override
public void andThenVisit(ContainerBuilder builder, ObjectMeta meta) {
delegate.andThenVisit(builder);
// Currently, we have no way to filter out provided env vars.
// So, we apply them on top of every change.
// This needs to be addressed in dekorate to make things more efficient
for (Map.Entry<String, String> e : item.getEnvVars().entrySet()) {
builder.removeMatchingFromEnv(p -> p.getName().equals(e.getKey()));
builder.addNewEnv()
.withName(e.getKey())
.withValue(e.getValue())
.endEnv();
}
}
}));
}
}
if (item.isSharedFilesystem()) {
for (final NamedResourceDecorator<?> delegate : volumeDecorators) {
result.add(
new DecoratorBuildItem(target, new NamedResourceDecorator<PodSpecBuilder>("Job", item.getName()) {
@Override
public void andThenVisit(PodSpecBuilder builder, ObjectMeta meta) {
delegate.visit(builder);
}
}));
}
for (final AddMountDecorator delegate : mountDecorators) {
result.add(
new DecoratorBuildItem(target, new NamedResourceDecorator<ContainerBuilder>("Job", item.getName()) {
@Override
public void andThenVisit(ContainerBuilder builder, ObjectMeta meta) {
delegate.andThenVisit(builder);
}
}));
}
}
result.add(new DecoratorBuildItem(target,
new CreateJobResourceFromImageDecorator(item.getName(), item.getImage(), item.getCommand(),
item.getArguments())));
});
return result;
}
/**
* Creates container decorator build items.
*
* @param target The deployment target (e.g. kubernetes, openshift, knative)
* @param name The name of the resource to accept the configuration
* param namespace The optional namespace to apply to the resource
* @param config The {@link PlatformConfiguration} instance
*/
private static List<DecoratorBuildItem> createContainerDecorators(String target, String name,
Optional<KubernetesNamespaceBuildItem> namespace,
PlatformConfiguration config) {
List<DecoratorBuildItem> result = new ArrayList<>();
namespace.ifPresent(n -> {
result.add(new DecoratorBuildItem(target, new AddNamespaceDecorator(n.getNamespace())));
result.add(new DecoratorBuildItem(target, new AddNamespaceToSubjectDecorator(n.getNamespace())));
});
config.workingDir()
.ifPresent(w -> result.add(new DecoratorBuildItem(target, new ApplyWorkingDirDecorator(name, w))));
return result;
}
/**
* Creates pod decorator build items.
*
* @param target The deployment target (e.g. kubernetes, openshift, knative)
* @param name The name of the resource to accept the configuration
* @param config The {@link PlatformConfiguration} instance
*/
private static List<DecoratorBuildItem> createPodDecorators(String target, String name,
PlatformConfiguration config) {
List<DecoratorBuildItem> result = new ArrayList<>();
config.imagePullSecrets().ifPresent(
l -> l.forEach(s -> result.add(new DecoratorBuildItem(target, new AddImagePullSecretDecorator(name, s)))));
config.hostAliases().entrySet().forEach(e -> result
.add(new DecoratorBuildItem(target, new AddHostAliasesDecorator(name, HostAliasConverter.convert(e)))));
config.nodeSelector()
.ifPresent(n -> result.add(
new DecoratorBuildItem(target, new AddNodeSelectorDecorator(name, n.key(), n.value()))));
config.initContainers().entrySet().forEach(e -> result
.add(new DecoratorBuildItem(target, new AddInitContainerDecorator(name, ContainerConverter.convert(e)))));
config.getSidecars().entrySet().forEach(
e -> result.add(new DecoratorBuildItem(target, new AddSidecarDecorator(name, ContainerConverter.convert(e)))));
config.resources().limits().cpu()
.ifPresent(c -> result.add(new DecoratorBuildItem(target, new ApplyLimitsCpuDecorator(name, c))));
config.resources().limits().memory()
.ifPresent(m -> result.add(new DecoratorBuildItem(target, new ApplyLimitsMemoryDecorator(name, m))));
config.resources().requests().cpu()
.ifPresent(c -> result.add(new DecoratorBuildItem(target, new ApplyRequestsCpuDecorator(name, c))));
config.resources().requests().memory()
.ifPresent(m -> result.add(new DecoratorBuildItem(target, new ApplyRequestsMemoryDecorator(name, m))));
if (config.securityContext().isAnyPropertySet()) {
result.add(new DecoratorBuildItem(target, new ApplySecuritySettingsDecorator(name, config.securityContext())));
}
return result;
}
private static List<DecoratorBuildItem> createAppConfigVolumeAndEnvDecorators(String target,
String name,
PlatformConfiguration config) {
List<DecoratorBuildItem> result = new ArrayList<>();
Set<String> paths = new HashSet<>();
config.appSecret().ifPresent(s -> {
result.add(new DecoratorBuildItem(target, new AddSecretVolumeDecorator(new SecretVolumeBuilder()
.withSecretName(s)
.withVolumeName("app-secret")
.build())));
result.add(new DecoratorBuildItem(target, new AddMountDecorator(new MountBuilder()
.withName("app-secret")
.withPath("/mnt/app-secret")
.build())));
paths.add("/mnt/app-secret");
});
config.appConfigMap().ifPresent(s -> {
result.add(new DecoratorBuildItem(target, new AddConfigMapVolumeDecorator(new ConfigMapVolumeBuilder()
.withConfigMapName(s)
.withVolumeName("app-config-map")
.build())));
result.add(new DecoratorBuildItem(target, new AddMountDecorator(new MountBuilder()
.withName("app-config-map")
.withPath("/mnt/app-config-map")
.build())));
paths.add("/mnt/app-config-map");
});
if (!paths.isEmpty()) {
result.add(new DecoratorBuildItem(target,
new AddEnvVarDecorator(ApplicationContainerDecorator.ANY, name, new EnvBuilder()
.withName("SMALLRYE_CONFIG_LOCATIONS")
.withValue(String.join(",", paths))
.build())));
}
return result;
}
private static List<DecoratorBuildItem> createMountAndVolumeDecorators(String target,
String name,
PlatformConfiguration config) {
List<DecoratorBuildItem> result = new ArrayList<>();
config.mounts().entrySet().forEach(
e -> result.add(new DecoratorBuildItem(target, new AddMountDecorator(ANY, name, MountConverter.convert(e)))));
config.secretVolumes().entrySet().forEach(e -> result
.add(new DecoratorBuildItem(target, new AddSecretVolumeDecorator(SecretVolumeConverter.convert(e)))));
config.configMapVolumes().entrySet().forEach(e -> result
.add(new DecoratorBuildItem(target, new AddConfigMapVolumeDecorator(ConfigMapVolumeConverter.convert(e)))));
config.emptyDirVolumes().ifPresent(volumes -> volumes.forEach(e -> result
.add(new DecoratorBuildItem(target, new AddEmptyDirVolumeDecorator(EmptyDirVolumeConverter.convert(e))))));
config.pvcVolumes().entrySet().forEach(
e -> result.add(new DecoratorBuildItem(target, new AddPvcVolumeDecorator(PvcVolumeConverter.convert(e)))));
config.awsElasticBlockStoreVolumes().entrySet().forEach(e -> result.add(new DecoratorBuildItem(target,
new AddAwsElasticBlockStoreVolumeDecorator(AwsElasticBlockStoreVolumeConverter.convert(e)))));
config.azureFileVolumes().entrySet().forEach(e -> result
.add(new DecoratorBuildItem(target, new AddAzureFileVolumeDecorator(AzureFileVolumeConverter.convert(e)))));
config.azureDiskVolumes().entrySet().forEach(e -> result
.add(new DecoratorBuildItem(target, new AddAzureDiskVolumeDecorator(AzureDiskVolumeConverter.convert(e)))));
return result;
}
private static List<DecoratorBuildItem> createAnnotationDecorators(Optional<Project> project, String target, String name,
PlatformConfiguration config,
Optional<MetricsCapabilityBuildItem> metricsConfiguration,
List<KubernetesAnnotationBuildItem> annotations,
Optional<Port> port) {
List<DecoratorBuildItem> result = new ArrayList<>();
annotations.forEach(a -> result.add(new DecoratorBuildItem(a.getTarget(),
new AddAnnotationDecorator(name, a.getKey(), a.getValue()))));
ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC);
project.ifPresent(p -> {
ScmInfo scm = p.getScmInfo();
String vcsUri = parseVCSUri(config.vcsUri(), scm);
String commitId = scm != null ? scm.getCommit() : null;
// Dekorate uses its own annotations. Let's replace them with the quarkus ones.
result.add(new DecoratorBuildItem(target, new RemoveAnnotationDecorator(Annotations.VCS_URL)));
result.add(new DecoratorBuildItem(target, new RemoveAnnotationDecorator(Annotations.COMMIT_ID)));
result.add(new DecoratorBuildItem(target, new AddAnnotationDecorator(name,
new Annotation(QUARKUS_ANNOTATIONS_QUARKUS_VERSION, Version.getVersion(), new String[0]))));
//Add quarkus vcs annotations
if (commitId != null && !config.idempotent()) {
result.add(new DecoratorBuildItem(target, new AddAnnotationDecorator(name,
new Annotation(QUARKUS_ANNOTATIONS_COMMIT_ID, commitId, new String[0]))));
}
if (vcsUri != null) {
result.add(new DecoratorBuildItem(target,
new AddAnnotationDecorator(name,
new Annotation(QUARKUS_ANNOTATIONS_VCS_URL, vcsUri, new String[0]))));
}
});
if (config.addBuildTimestamp() && !config.idempotent()) {
result.add(new DecoratorBuildItem(target,
new AddAnnotationDecorator(name, new Annotation(QUARKUS_ANNOTATIONS_BUILD_TIMESTAMP,
now.format(DateTimeFormatter.ofPattern("yyyy-MM-dd - HH:mm:ss Z")), new String[0]))));
}
// Add metrics annotations
metricsConfiguration.ifPresent(m -> {
String path = m.metricsEndpoint();
String prefix = config.prometheus().prefix();
if (port.isPresent() && path != null) {
if (config.prometheus().generateServiceMonitor()) {
result.add(new DecoratorBuildItem(target, new AddServiceMonitorResourceDecorator(
config.prometheus().scheme().orElse("http"),
config.prometheus().port().orElse(String.valueOf(port.get().getContainerPort())),
config.prometheus().path().orElse(path),
10,
true)));
}
if (config.prometheus().annotations()) {
result.add(new DecoratorBuildItem(target, new AddAnnotationDecorator(name,
config.prometheus().scrape().orElse(prefix + "/scrape"), "true",
PROMETHEUS_ANNOTATION_TARGETS)));
result.add(new DecoratorBuildItem(target, new AddAnnotationDecorator(name,
config.prometheus().path().orElse(prefix + "/path"), path, PROMETHEUS_ANNOTATION_TARGETS)));
final var managementPort = ConfigProvider.getConfig()
.getOptionalValue("quarkus.management.port", Integer.class).orElse(9000);
final var prometheusPort = KubernetesConfigUtil.managementPortIsEnabled() ? managementPort
: port.get().getContainerPort();
result.add(new DecoratorBuildItem(target, new AddAnnotationDecorator(name,
config.prometheus().port().orElse(prefix + "/port"), "" + prometheusPort,
PROMETHEUS_ANNOTATION_TARGETS)));
result.add(new DecoratorBuildItem(target, new AddAnnotationDecorator(name,
config.prometheus().scheme().orElse(prefix + "/scheme"), "http",
PROMETHEUS_ANNOTATION_TARGETS)));
}
}
});
//Add metrics annotations
return result;
}
/**
* Create a decorator that sets the port to the http probe.
* The rules for setting the probe are the following:
* 1. if 'http-action-port' is set, use that.
* 2. if 'http-action-port-name' is set, use that to lookup the port value.
* 3. if a `KubernetesPorbePortBuild` is set, then use that to lookup the port.
* 4. if we still haven't found a port fallback to 8080.
*
* @param name The name of the deployment / container.
* @param target The deployment target
* @param probeKind The probe kind (e.g. readinessProbe, livenessProbe etc)
* @param portName the probe port name build item
* @param ports a list of kubernetes port build items
* @return a decorator for configures the port of the http action of the probe.
*/
public static DecoratorBuildItem createProbeHttpPortDecorator(String name, String target, String probeKind,
ProbeConfig probeConfig,
Optional<KubernetesProbePortNameBuildItem> portName,
List<KubernetesPortBuildItem> ports,
Map<String, PortConfig> portsFromConfig) {
//1. check if `httpActionPort` is defined
//2. lookup port by `httpPortName`
//3. fallback to DEFAULT_HTTP_PORT
String httpPortName = probeConfig.httpActionPortName()
.or(() -> portName.map(KubernetesProbePortNameBuildItem::getName))
.orElse(HTTP_PORT);
Integer port;
PortConfig portFromConfig = portsFromConfig.get(httpPortName);
if (probeConfig.httpActionPort().isPresent()) {
port = probeConfig.httpActionPort().get();
} else if (portFromConfig != null && portFromConfig.containerPort().isPresent()) {
port = portFromConfig.containerPort().getAsInt();
} else {
port = ports.stream().filter(p -> httpPortName.equals(p.getName()))
.map(KubernetesPortBuildItem::getPort).findFirst().orElse(DEFAULT_HTTP_PORT);
}
// Resolve scheme property from:
String scheme;
if (probeConfig.httpActionScheme().isPresent()) {
// 1. User in Probe config
scheme = probeConfig.httpActionScheme().get();
} else if (portFromConfig != null && portFromConfig.tls()) {
// 2. User in Ports config
scheme = SCHEME_HTTPS;
} else if (portName.isPresent()
&& portName.get().getScheme() != null
&& portName.get().getName().equals(httpPortName)) {
// 3. Extensions
scheme = portName.get().getScheme();
} else {
// 4. Using the port number.
scheme = port != null && (port == 443 || port == 8443) ? SCHEME_HTTPS : SCHEME_HTTP;
}
// Applying to all deployments to mimic the same logic as the rest of probes in the method createProbeDecorators.
return new DecoratorBuildItem(target,
new ApplyHttpGetActionPortDecorator(ANY, name, httpPortName, port, probeKind, scheme));
}
/**
* Create the decorators needed for setting up probes.
* The method will not create decorators related to ports, as they are not supported by all targets (e.g. knative)
* Port related decorators are created by `applyProbePort` instead.
*
* @return a list of decorators that configure the probes
*/
private static List<DecoratorBuildItem> createProbeDecorators(String name, String target, ProbeConfig livenessProbe,
ProbeConfig readinessProbe,
ProbeConfig startupProbe,
Optional<KubernetesHealthLivenessPathBuildItem> livenessPath,
Optional<KubernetesHealthReadinessPathBuildItem> readinessPath,
Optional<KubernetesHealthStartupPathBuildItem> startupPath) {
List<DecoratorBuildItem> result = new ArrayList<>();
createLivenessProbe(name, target, livenessProbe, livenessPath).ifPresent(result::add);
createReadinessProbe(name, target, readinessProbe, readinessPath).ifPresent(result::add);
if (!KNATIVE.equals(target)) { // see https://github.com/quarkusio/quarkus/issues/33944
createStartupProbe(name, target, startupProbe, startupPath).ifPresent(result::add);
}
return result;
}
private static Optional<DecoratorBuildItem> createLivenessProbe(String name, String target, ProbeConfig livenessProbe,
Optional<KubernetesHealthLivenessPathBuildItem> livenessPath) {
if (livenessProbe.hasUserSuppliedAction()) {
return Optional.of(
new DecoratorBuildItem(target,
new AddLivenessProbeDecorator(name, ProbeConverter.convert(name, livenessProbe))));
} else if (livenessPath.isPresent()) {
return Optional.of(new DecoratorBuildItem(target, new AddLivenessProbeDecorator(name,
ProbeConverter.builder(name, livenessProbe).withHttpActionPath(livenessPath.get().getPath()).build())));
}
return Optional.empty();
}
private static Optional<DecoratorBuildItem> createReadinessProbe(String name, String target, ProbeConfig readinessProbe,
Optional<KubernetesHealthReadinessPathBuildItem> readinessPath) {
if (readinessProbe.hasUserSuppliedAction()) {
return Optional.of(new DecoratorBuildItem(target,
new AddReadinessProbeDecorator(name, ProbeConverter.convert(name, readinessProbe))));
} else if (readinessPath.isPresent()) {
return Optional.of(new DecoratorBuildItem(target, new AddReadinessProbeDecorator(name,
ProbeConverter.builder(name, readinessProbe).withHttpActionPath(readinessPath.get().getPath()).build())));
}
return Optional.empty();
}
private static Optional<DecoratorBuildItem> createStartupProbe(String name, String target, ProbeConfig startupProbe,
Optional<KubernetesHealthStartupPathBuildItem> startupPath) {
if (startupProbe.hasUserSuppliedAction()) {
return Optional.of(new DecoratorBuildItem(target,
new AddStartupProbeDecorator(name, ProbeConverter.convert(name, startupProbe))));
} else if (startupPath.isPresent()) {
return Optional.of(new DecoratorBuildItem(target, new AddStartupProbeDecorator(name,
ProbeConverter.builder(name, startupProbe).withHttpActionPath(startupPath.get().getPath()).build())));
}
return Optional.empty();
}
private static Map<String, Integer> verifyPorts(List<KubernetesPortBuildItem> kubernetesPortBuildItems) {
final Map<String, Integer> result = new HashMap<>();
final Set<Integer> usedPorts = new HashSet<>();
for (KubernetesPortBuildItem entry : kubernetesPortBuildItems) {
if (!entry.isEnabled()) {
continue;
}
final String name = entry.getName();
if (result.containsKey(name)) {
throw new IllegalArgumentException(
"All Kubernetes ports must have unique names - " + name + " has been used multiple times");
}
final Integer port = entry.getPort();
if (usedPorts.contains(port)) {
throw new IllegalArgumentException(
"All Kubernetes ports must be unique - " + port + " has been used multiple times");
}
result.put(name, port);
usedPorts.add(port);
}
return result;
}
private static List<PolicyRule> toPolicyRulesList(Map<String, RbacConfig.PolicyRuleConfig> policyRules) {
return policyRules.values().stream().map(RBACUtil::from).toList();
}
private static String parseVCSUri(VCSUriConfig config, ScmInfo scm) {
if (!config.enabled()) {
return null;
}
if (config.override().isPresent()) {
return config.override().get();
}
if (scm == null) {
return null;
}
String originRemote = scm.getRemote().get("origin");
if (originRemote == null || originRemote.isBlank()) {
return null;
}
return Git.sanitizeRemoteUrl(originRemote);
}
}
|
KubernetesCommonHelper
|
java
|
quarkusio__quarkus
|
extensions/jdbc/jdbc-postgresql/runtime/src/main/java/io/quarkus/jdbc/postgresql/runtime/PostgreSQLAgroalConnectionConfigurer.java
|
{
"start": 424,
"end": 1106
}
|
class ____ implements AgroalConnectionConfigurer {
@Override
public void disableSslSupport(String databaseKind, AgroalDataSourceConfigurationSupplier dataSourceConfiguration,
Map<String, String> additionalProperties) {
dataSourceConfiguration.connectionPoolConfiguration().connectionFactoryConfiguration().jdbcProperty("sslmode",
"disable");
}
@Override
public void setExceptionSorter(String databaseKind, AgroalDataSourceConfigurationSupplier dataSourceConfiguration) {
dataSourceConfiguration.connectionPoolConfiguration().exceptionSorter(new PostgreSQLExceptionSorter());
}
}
|
PostgreSQLAgroalConnectionConfigurer
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/bugs/_2101/Issue2101Mapper.java
|
{
"start": 1095,
"end": 1203
}
|
class ____ {
public CodeValuePair codeValue1;
public CodeValuePair codeValue2;
}
|
Target
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/amlauncher/AMLauncher.java
|
{
"start": 3585,
"end": 15306
}
|
class ____ implements Runnable {
private static final Logger LOG =
LoggerFactory.getLogger(AMLauncher.class);
private ContainerManagementProtocol containerMgrProxy;
private final RMAppAttempt application;
private final Configuration conf;
private final AMLauncherEventType eventType;
private final RMContext rmContext;
private final Container masterContainer;
private boolean timelineServiceV2Enabled;
@SuppressWarnings("rawtypes")
private final EventHandler handler;
public AMLauncher(RMContext rmContext, RMAppAttempt application,
AMLauncherEventType eventType, Configuration conf) {
this.application = application;
this.conf = conf;
this.eventType = eventType;
this.rmContext = rmContext;
this.handler = rmContext.getDispatcher().getEventHandler();
this.masterContainer = application.getMasterContainer();
this.timelineServiceV2Enabled = YarnConfiguration.
timelineServiceV2Enabled(conf);
}
private void connect() throws IOException {
ContainerId masterContainerID = masterContainer.getId();
containerMgrProxy = getContainerMgrProxy(masterContainerID);
}
private void launch() throws IOException, YarnException {
connect();
ContainerId masterContainerID = masterContainer.getId();
ApplicationSubmissionContext applicationContext =
application.getSubmissionContext();
LOG.info("Setting up container " + masterContainer
+ " for AM " + application.getAppAttemptId());
ContainerLaunchContext launchContext =
createAMContainerLaunchContext(applicationContext, masterContainerID);
StartContainerRequest scRequest =
StartContainerRequest.newInstance(launchContext,
masterContainer.getContainerToken());
List<StartContainerRequest> list = new ArrayList<StartContainerRequest>();
list.add(scRequest);
StartContainersRequest allRequests =
StartContainersRequest.newInstance(list);
StartContainersResponse response =
containerMgrProxy.startContainers(allRequests);
if (response.getFailedRequests() != null
&& response.getFailedRequests().containsKey(masterContainerID)) {
Throwable t =
response.getFailedRequests().get(masterContainerID).deSerialize();
parseAndThrowException(t);
} else {
LOG.info("Done launching container " + masterContainer + " for AM "
+ application.getAppAttemptId());
}
}
private void cleanup() throws IOException, YarnException {
connect();
ContainerId containerId = masterContainer.getId();
List<ContainerId> containerIds = new ArrayList<ContainerId>();
containerIds.add(containerId);
StopContainersRequest stopRequest =
StopContainersRequest.newInstance(containerIds);
StopContainersResponse response =
containerMgrProxy.stopContainers(stopRequest);
if (response.getFailedRequests() != null
&& response.getFailedRequests().containsKey(containerId)) {
Throwable t = response.getFailedRequests().get(containerId).deSerialize();
parseAndThrowException(t);
}
}
// Protected. For tests.
protected ContainerManagementProtocol getContainerMgrProxy(
final ContainerId containerId) {
final NodeId node = masterContainer.getNodeId();
final InetSocketAddress containerManagerConnectAddress =
NetUtils.createSocketAddrForHost(node.getHost(), node.getPort());
final YarnRPC rpc = getYarnRPC();
UserGroupInformation currentUser =
UserGroupInformation.createRemoteUser(containerId
.getApplicationAttemptId().toString());
String user =
rmContext.getRMApps()
.get(containerId.getApplicationAttemptId().getApplicationId())
.getUser();
org.apache.hadoop.yarn.api.records.Token token =
rmContext.getNMTokenSecretManager().createNMToken(
containerId.getApplicationAttemptId(), node, user);
currentUser.addToken(ConverterUtils.convertFromYarn(token,
containerManagerConnectAddress));
return NMProxy.createNMProxy(conf, ContainerManagementProtocol.class,
currentUser, rpc, containerManagerConnectAddress);
}
@VisibleForTesting
protected YarnRPC getYarnRPC() {
return YarnRPC.create(conf); // TODO: Don't create again and again.
}
private ContainerLaunchContext createAMContainerLaunchContext(
ApplicationSubmissionContext applicationMasterContext,
ContainerId containerID) throws IOException {
// Construct the actual Container
ContainerLaunchContext container =
applicationMasterContext.getAMContainerSpec();
if (container == null){
throw new IOException(containerID +
" has been cleaned before launched");
}
// Finalize the container
setupTokens(container, containerID);
// set the flow context optionally for timeline service v.2
setFlowContext(container);
return container;
}
@Private
@VisibleForTesting
protected void setupTokens(
ContainerLaunchContext container, ContainerId containerID)
throws IOException {
Map<String, String> environment = container.getEnvironment();
environment.put(ApplicationConstants.APPLICATION_WEB_PROXY_BASE_ENV,
application.getWebProxyBase());
// Set AppSubmitTime to be consumable by the AM.
ApplicationId applicationId =
application.getAppAttemptId().getApplicationId();
environment.put(
ApplicationConstants.APP_SUBMIT_TIME_ENV,
String.valueOf(rmContext.getRMApps()
.get(applicationId)
.getSubmitTime()));
Credentials credentials = new Credentials();
DataInputByteBuffer dibb = new DataInputByteBuffer();
ByteBuffer tokens = container.getTokens();
if (tokens != null) {
// TODO: Don't do this kind of checks everywhere.
dibb.reset(tokens);
credentials.readTokenStorageStream(dibb);
tokens.rewind();
}
// Add AMRMToken
Token<AMRMTokenIdentifier> amrmToken = createAndSetAMRMToken();
if (amrmToken != null) {
credentials.addToken(amrmToken.getService(), amrmToken);
}
// Setup Keystore and Truststore
String httpsPolicy = conf.get(YarnConfiguration.RM_APPLICATION_HTTPS_POLICY,
YarnConfiguration.DEFAULT_RM_APPLICATION_HTTPS_POLICY);
if (httpsPolicy.equals("LENIENT") || httpsPolicy.equals("STRICT")) {
ProxyCA proxyCA = rmContext.getProxyCAManager().getProxyCA();
try {
String kPass = proxyCA.generateKeyStorePassword();
byte[] keyStore = proxyCA.createChildKeyStore(applicationId, kPass);
credentials.addSecretKey(
AMSecretKeys.YARN_APPLICATION_AM_KEYSTORE, keyStore);
credentials.addSecretKey(
AMSecretKeys.YARN_APPLICATION_AM_KEYSTORE_PASSWORD,
kPass.getBytes(StandardCharsets.UTF_8));
String tPass = proxyCA.generateKeyStorePassword();
byte[] trustStore = proxyCA.getChildTrustStore(tPass);
credentials.addSecretKey(
AMSecretKeys.YARN_APPLICATION_AM_TRUSTSTORE, trustStore);
credentials.addSecretKey(
AMSecretKeys.YARN_APPLICATION_AM_TRUSTSTORE_PASSWORD,
tPass.getBytes(StandardCharsets.UTF_8));
} catch (Exception e) {
throw new IOException(e);
}
}
DataOutputBuffer dob = new DataOutputBuffer();
credentials.writeTokenStorageToStream(dob);
container.setTokens(ByteBuffer.wrap(dob.getData(), 0, dob.getLength()));
}
private void setFlowContext(ContainerLaunchContext container) {
if (timelineServiceV2Enabled) {
Map<String, String> environment = container.getEnvironment();
ApplicationId applicationId =
application.getAppAttemptId().getApplicationId();
RMApp app = rmContext.getRMApps().get(applicationId);
// initialize the flow in the environment with default values for those
// that do not specify the flow tags
// flow name: app name (or app id if app name is missing),
// flow version: "1", flow run id: start time
setFlowTags(environment, TimelineUtils.FLOW_NAME_TAG_PREFIX,
TimelineUtils.generateDefaultFlowName(app.getName(), applicationId));
setFlowTags(environment, TimelineUtils.FLOW_VERSION_TAG_PREFIX,
TimelineUtils.DEFAULT_FLOW_VERSION);
setFlowTags(environment, TimelineUtils.FLOW_RUN_ID_TAG_PREFIX,
String.valueOf(app.getStartTime()));
// Set flow context info: the flow context is received via the application
// tags
for (String tag : app.getApplicationTags()) {
String[] parts = tag.split(":", 2);
if (parts.length != 2 || parts[1].isEmpty()) {
continue;
}
switch (parts[0].toUpperCase()) {
case TimelineUtils.FLOW_NAME_TAG_PREFIX:
setFlowTags(environment, TimelineUtils.FLOW_NAME_TAG_PREFIX,
parts[1]);
break;
case TimelineUtils.FLOW_VERSION_TAG_PREFIX:
setFlowTags(environment, TimelineUtils.FLOW_VERSION_TAG_PREFIX,
parts[1]);
break;
case TimelineUtils.FLOW_RUN_ID_TAG_PREFIX:
setFlowTags(environment, TimelineUtils.FLOW_RUN_ID_TAG_PREFIX,
parts[1]);
break;
default:
break;
}
}
}
}
private static void setFlowTags(
Map<String, String> environment, String tagPrefix, String value) {
if (!value.isEmpty()) {
environment.put(tagPrefix, value);
}
}
@VisibleForTesting
protected Token<AMRMTokenIdentifier> createAndSetAMRMToken() {
Token<AMRMTokenIdentifier> amrmToken =
this.rmContext.getAMRMTokenSecretManager().createAndGetAMRMToken(
application.getAppAttemptId());
((RMAppAttemptImpl)application).setAMRMToken(amrmToken);
return amrmToken;
}
@SuppressWarnings("unchecked")
public void run() {
switch (eventType) {
case LAUNCH:
try {
LOG.info("Launching master" + application.getAppAttemptId());
launch();
handler.handle(new RMAppAttemptEvent(application.getAppAttemptId(),
RMAppAttemptEventType.LAUNCHED, System.currentTimeMillis()));
} catch(Exception ie) {
onAMLaunchFailed(masterContainer.getId(), ie);
}
break;
case CLEANUP:
try {
LOG.info("Cleaning master " + application.getAppAttemptId());
cleanup();
} catch(IOException ie) {
LOG.info("Error cleaning master ", ie);
} catch (YarnException e) {
StringBuilder sb = new StringBuilder("Container ");
sb.append(masterContainer.getId().toString())
.append(" is not handled by this NodeManager");
if (!e.getMessage().contains(sb.toString())) {
// Ignoring if container is already killed by Node Manager.
LOG.info("Error cleaning master ", e);
}
}
break;
default:
LOG.warn("Received unknown event-type " + eventType + ". Ignoring.");
break;
}
}
private void parseAndThrowException(Throwable t) throws YarnException,
IOException {
if (t instanceof YarnException) {
throw (YarnException) t;
} else if (t instanceof InvalidToken) {
throw (InvalidToken) t;
} else {
throw (IOException) t;
}
}
@SuppressWarnings("unchecked")
protected void onAMLaunchFailed(ContainerId containerId, Exception ie) {
String message = "Error launching " + application.getAppAttemptId()
+ ". Got exception: " + StringUtils.stringifyException(ie);
LOG.info(message);
handler.handle(new RMAppAttemptEvent(application
.getAppAttemptId(), RMAppAttemptEventType.LAUNCH_FAILED, message));
}
}
|
AMLauncher
|
java
|
quarkusio__quarkus
|
extensions/tls-registry/deployment/src/test/java/io/quarkus/tls/P12KeyStoreWithSniTest.java
|
{
"start": 940,
"end": 1848
}
|
class ____ {
private static final String configuration = """
quarkus.tls.key-store.p12.path=target/certs/test-sni-p12-keystore.p12
quarkus.tls.key-store.p12.password=sni
quarkus.tls.key-store.p12.alias-password=sni
quarkus.tls.key-store.sni=true
""";
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest().setArchiveProducer(
() -> ShrinkWrap.create(JavaArchive.class)
.add(new StringAsset(configuration), "application.properties"));
@Inject
TlsConfigurationRegistry registry;
@Test
void test() throws KeyStoreException {
TlsConfiguration tlsConfiguration = registry.getDefault().orElseThrow();
assertThat(tlsConfiguration.usesSni()).isTrue();
assertThat(tlsConfiguration.getKeyStore().size()).isEqualTo(3);
}
}
|
P12KeyStoreWithSniTest
|
java
|
apache__flink
|
flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/PredictFunction.java
|
{
"start": 1273,
"end": 2157
}
|
class ____ extends TableFunction<RowData> {
/**
* Synchronously predict result based on input row.
*
* @param inputRow - A {@link RowData} that wraps input for predict function.
* @return A collection of predicted results.
*/
public abstract Collection<RowData> predict(RowData inputRow);
/** Invoke {@link #predict} and handle exceptions. */
public final void eval(Object... args) {
GenericRowData argsData = GenericRowData.of(args);
try {
Collection<RowData> results = predict(argsData);
if (results == null) {
return;
}
results.forEach(this::collect);
} catch (Exception e) {
throw new FlinkRuntimeException(
String.format("Failed to execute prediction with input row %s.", argsData), e);
}
}
}
|
PredictFunction
|
java
|
quarkusio__quarkus
|
extensions/smallrye-jwt/deployment/src/test/java/io/quarkus/jwt/test/RolesEndpoint.java
|
{
"start": 831,
"end": 5133
}
|
class ____ {
@Inject
JsonWebToken jwtPrincipal;
@Inject
@Claim("raw_token")
ClaimValue<String> rawToken;
@GET
@Path("/echo")
@RolesAllowed("Echoer")
public String echoInput(@Context SecurityContext sec, @QueryParam("input") String input) {
Principal user = sec.getUserPrincipal();
return input + ", user=" + user.getName();
}
@GET
@Path("/echo2")
@RolesAllowed("NoSuchUser")
public String echoInput2(@Context SecurityContext sec, @QueryParam("input") String input) {
Principal user = sec.getUserPrincipal();
String name = user != null ? user.getName() : "<null>";
return input + ", user=" + name;
}
@GET
@Path("/echoNeedsToken2Role")
@RolesAllowed("Token2Role")
public String echoNeedsToken2Role(@Context SecurityContext sec, @QueryParam("input") String input) {
Principal user = sec.getUserPrincipal();
return input + ", user=" + user.getName();
}
/**
* Validate that the SecurityContext#getUserPrincipal is a JsonWebToken
*
* @param sec
* @return
*/
@GET
@Path("/getPrincipalClass")
@RolesAllowed("Tester")
public String getPrincipalClass(@Context SecurityContext sec) {
Principal user = sec.getUserPrincipal();
boolean isJsonWebToken = user instanceof JsonWebToken;
return "isJsonWebToken:" + isJsonWebToken;
}
/**
* This endpoint requires a role that is mapped to the group1 role
*
* @return principal name
*/
@GET
@Path("/needsGroup1Mapping")
@RolesAllowed("Group1MappedRole")
public String needsGroup1Mapping(@Context SecurityContext sec) {
Principal user = sec.getUserPrincipal();
if (sec.isUserInRole("group1")) {
return user.getName();
} else {
return "User not in role group1";
}
}
/**
* This endpoint requires a Tester role, and also validates that the caller has the role Echoer by calling
* {@linkplain SecurityContext#isUserInRole(String)}.
*
* @return principal name or FORBIDDEN error
*/
@GET
@Path("/checkIsUserInRole")
@RolesAllowed("Tester")
public Response checkIsUserInRole(@Context SecurityContext sec) {
Principal user = sec.getUserPrincipal();
Response response;
if (!sec.isUserInRole("Echoer")) {
response = Response.status(new Response.StatusType() {
@Override
public int getStatusCode() {
return Response.Status.FORBIDDEN.getStatusCode();
}
@Override
public Response.Status.Family getFamily() {
return Response.Status.FORBIDDEN.getFamily();
}
@Override
public String getReasonPhrase() {
return "SecurityContext.isUserInRole(Echoer) was false";
}
}).build();
} else {
response = Response.ok(user.getName(), MediaType.TEXT_PLAIN).build();
}
return response;
}
@GET
@Path("/authenticated")
@Authenticated
public String checkAuthenticated(@Context SecurityContext sec) {
if (sec.getUserPrincipal() != null) {
return sec.getUserPrincipal().getName();
}
return "FAILED";
}
@GET
@Path("/getInjectedPrincipal")
@RolesAllowed("Tester")
public String getInjectedPrincipal(@Context SecurityContext sec) {
boolean isJsonWebToken = this.jwtPrincipal instanceof JsonWebToken;
return "isJsonWebToken:" + isJsonWebToken;
}
@POST
@Path("/postInjectedPrincipal")
@RolesAllowed("Tester")
public String postInjectedPrincipal(String body) {
return body + jwtPrincipal.getName();
}
@POST
@Path("/postInjectedPrincipalJson")
@RolesAllowed("Tester")
@Consumes("application/json")
public String postInjectedPrincipalJson(User user) {
return "name:" + user.getName() + ",principal:" + jwtPrincipal.getName();
}
@GET
@Path("/heartbeat")
@PermitAll
public String heartbeat() {
return "Heartbeat: " + new Date(System.currentTimeMillis()).toString();
}
}
|
RolesEndpoint
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/issues/SplitterUsingBeanReturningCloseableIteratorTest.java
|
{
"start": 2420,
"end": 3147
}
|
class ____ implements Iterator<String>, Closeable {
private static MyCloseableIterator singleton;
private boolean closed;
private MyCloseableIterator() {
}
public static MyCloseableIterator getInstance() {
if (singleton == null) {
singleton = new MyCloseableIterator();
}
return singleton;
}
@Override
public void close() {
closed = true;
}
public boolean isClosed() {
return closed;
}
@Override
public boolean hasNext() {
return true;
}
@Override
public String next() {
throw new RuntimeException("will be closed");
}
@Override
public void remove() {
}
}
|
MyCloseableIterator
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/junit/jupiter/SoftAssertionsExtension_InjectionSanityChecking_Test.java
|
{
"start": 3843,
"end": 4227
}
|
class ____ extends TestBase {
@InjectSoftAssertions
String usp;
@Override
@Test
void myTest() {}
}
@Test
void wrong_type_throws_exception() {
assertThatTest(WrongType.class).isInstanceOf(ExtensionConfigurationException.class)
.hasMessage("[usp] field is not a SoftAssertionsProvider (java.lang.String).");
}
}
|
WrongType
|
java
|
spring-projects__spring-security
|
web/src/main/java/org/springframework/security/web/server/authentication/ott/DefaultServerGenerateOneTimeTokenRequestResolver.java
|
{
"start": 1158,
"end": 2106
}
|
class ____
implements ServerGenerateOneTimeTokenRequestResolver {
private static final String USERNAME = "username";
private static final Duration DEFAULT_EXPIRES_IN = Duration.ofMinutes(5);
private Duration expiresIn = DEFAULT_EXPIRES_IN;
@Override
@SuppressWarnings("NullAway") // https://github.com/uber/NullAway/issues/1290
public Mono<GenerateOneTimeTokenRequest> resolve(ServerWebExchange exchange) {
// @formatter:off
return exchange.getFormData()
.mapNotNull((data) -> data.getFirst(USERNAME))
.switchIfEmpty(Mono.empty())
.map((username) -> new GenerateOneTimeTokenRequest(username, this.expiresIn));
// @formatter:on
}
/**
* Sets one-time token expiration time
* @param expiresIn one-time token expiration time
*/
public void setExpiresIn(Duration expiresIn) {
Assert.notNull(expiresIn, "expiresIn cannot be null");
this.expiresIn = expiresIn;
}
}
|
DefaultServerGenerateOneTimeTokenRequestResolver
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/RetryableUpdateModelSnapshotActionTests.java
|
{
"start": 1419,
"end": 6963
}
|
class ____ extends ESTestCase {
private static final String JOB_ID = "valid_id";
private Client client;
private ThreadPool threadPool;
@Before
public void setUpMocks() {
client = mock(Client.class);
threadPool = mock(ThreadPool.class);
when(threadPool.executor(any())).thenReturn(DIRECT_EXECUTOR_SERVICE);
doAnswer(invocationOnMock -> {
Runnable runnable = (Runnable) invocationOnMock.getArguments()[0];
runnable.run();
return null;
}).when(threadPool).schedule(any(), any(), any());
when(client.threadPool()).thenReturn(threadPool);
when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY));
}
public void testFirstTimeSuccess() {
PutJobAction.Response response = mock(PutJobAction.Response.class);
doAnswer(invocationOnMock -> {
@SuppressWarnings("unchecked")
ActionListener<PutJobAction.Response> listener = (ActionListener<PutJobAction.Response>) invocationOnMock.getArguments()[2];
listener.onResponse(response);
return null;
}).when(client).execute(any(), any(), any());
AtomicReference<PutJobAction.Response> storedResponse = new AtomicReference<>();
UpdateJobAction.Request updateRequest = new UpdateJobAction.Request(JOB_ID, new JobUpdate.Builder(JOB_ID).build());
RetryableUpdateModelSnapshotAction updateModelSnapshotAction = new RetryableUpdateModelSnapshotAction(
client,
updateRequest,
new ActionListener<>() {
@Override
public void onResponse(PutJobAction.Response response) {
storedResponse.set(response);
}
@Override
public void onFailure(Exception e) {
fail(e);
}
}
);
updateModelSnapshotAction.run();
verify(threadPool, never()).schedule(any(), any(), any());
assertSame(response, storedResponse.get());
}
public void testRetriesNeeded() {
int numRetries = randomIntBetween(1, 5);
PutJobAction.Response response = mock(PutJobAction.Response.class);
AtomicInteger callCount = new AtomicInteger(0);
doAnswer(invocationOnMock -> {
@SuppressWarnings("unchecked")
ActionListener<PutJobAction.Response> listener = (ActionListener<PutJobAction.Response>) invocationOnMock.getArguments()[2];
if (callCount.incrementAndGet() > numRetries) {
listener.onResponse(response);
} else {
listener.onFailure(new Exception());
}
return null;
}).when(client).execute(any(), any(), any());
AtomicReference<PutJobAction.Response> storedResponse = new AtomicReference<>();
UpdateJobAction.Request updateRequest = new UpdateJobAction.Request(JOB_ID, new JobUpdate.Builder(JOB_ID).build());
RetryableUpdateModelSnapshotAction updateModelSnapshotAction = new RetryableUpdateModelSnapshotAction(
client,
updateRequest,
new ActionListener<>() {
@Override
public void onResponse(PutJobAction.Response response) {
storedResponse.set(response);
}
@Override
public void onFailure(Exception e) {
fail(e);
}
}
);
updateModelSnapshotAction.run();
verify(threadPool, times(numRetries)).schedule(any(), any(), any());
assertSame(response, storedResponse.get());
}
public void testCompleteFailure() {
int numRetries = randomIntBetween(1, 5);
AtomicInteger callCount = new AtomicInteger(0);
AtomicLong relativeTimeMs = new AtomicLong(0);
doAnswer(invocationOnMock -> {
@SuppressWarnings("unchecked")
ActionListener<PutJobAction.Response> listener = (ActionListener<PutJobAction.Response>) invocationOnMock.getArguments()[2];
if (callCount.incrementAndGet() > numRetries) {
relativeTimeMs.set(TimeValue.timeValueMinutes(5).millis() + 1);
}
listener.onFailure(new Exception(Long.toString(relativeTimeMs.get())));
return null;
}).when(client).execute(any(), any(), any());
doAnswer(invocationOnMock -> relativeTimeMs.get()).when(threadPool).relativeTimeInMillis();
AtomicReference<Exception> storedFailure = new AtomicReference<>();
UpdateJobAction.Request updateRequest = new UpdateJobAction.Request(JOB_ID, new JobUpdate.Builder(JOB_ID).build());
RetryableUpdateModelSnapshotAction updateModelSnapshotAction = new RetryableUpdateModelSnapshotAction(
client,
updateRequest,
new ActionListener<>() {
@Override
public void onResponse(PutJobAction.Response response) {
fail("this should not be called");
}
@Override
public void onFailure(Exception e) {
storedFailure.set(e);
}
}
);
updateModelSnapshotAction.run();
verify(threadPool, times(numRetries)).schedule(any(), any(), any());
assertEquals(Long.toString(relativeTimeMs.get()), storedFailure.get().getMessage());
}
}
|
RetryableUpdateModelSnapshotActionTests
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/AuthorizationException.java
|
{
"start": 1109,
"end": 1298
}
|
class ____ authorization-related issues.
*
* This class <em>does not</em> provide the stack trace for security purposes.
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public
|
for
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/interop/JDKProxyTest.java
|
{
"start": 607,
"end": 1019
}
|
class ____ implements IPlanet {
private String name;
public Planet() { }
public Planet(String s) { name = s; }
@Override
public String getName(){return name;}
@Override
public String setName(String iName) {name = iName;
return name;
}
}
// [databind#5416]
// IMPORTANT! Must be "public" for problem to occur
public
|
Planet
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/inheritance/joined/JoinedInheritanceTest.java
|
{
"start": 5439,
"end": 5926
}
|
class ____ {
private Integer id;
private String name;
public Customer() {
}
public Customer(Integer id, String name) {
this.id = id;
this.name = name;
}
@Id
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
@Entity(name = "DomesticCustomer")
@Table(name = "DomesticCustomer")
public static
|
Customer
|
java
|
spring-projects__spring-boot
|
module/spring-boot-jackson2/src/main/java/org/springframework/boot/jackson2/JsonMixinModuleEntriesBeanRegistrationAotProcessor.java
|
{
"start": 2002,
"end": 2465
}
|
class ____ implements BeanRegistrationAotProcessor {
@Override
public @Nullable BeanRegistrationAotContribution processAheadOfTime(RegisteredBean registeredBean) {
if (registeredBean.getBeanClass().equals(JsonMixinModuleEntries.class)) {
return BeanRegistrationAotContribution
.withCustomCodeFragments((codeFragments) -> new AotContribution(codeFragments, registeredBean));
}
return null;
}
static
|
JsonMixinModuleEntriesBeanRegistrationAotProcessor
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/common/time/LocalDateTimeUtils.java
|
{
"start": 605,
"end": 1278
}
|
class ____ {
public static LocalDateTime truncateToMonths(LocalDateTime dateTime, int months) {
int totalMonths = (dateTime.getYear() - 1) * 12 + dateTime.getMonthValue() - 1;
int truncatedMonths = Math.floorDiv(totalMonths, months) * months;
return LocalDateTime.of(LocalDate.of(truncatedMonths / 12 + 1, truncatedMonths % 12 + 1, 1), LocalTime.MIDNIGHT);
}
public static LocalDateTime truncateToYears(LocalDateTime dateTime, int years) {
int truncatedYear = Math.floorDiv(dateTime.getYear() - 1, years) * years + 1;
return LocalDateTime.of(LocalDate.of(truncatedYear, 1, 1), LocalTime.MIDNIGHT);
}
}
|
LocalDateTimeUtils
|
java
|
ReactiveX__RxJava
|
src/test/java/io/reactivex/rxjava3/internal/operators/flowable/FlowablePublishTest.java
|
{
"start": 1675,
"end": 53100
}
|
class ____ extends RxJavaTest {
@Test
public void publish() throws InterruptedException {
final AtomicInteger counter = new AtomicInteger();
ConnectableFlowable<String> f = Flowable.unsafeCreate(new Publisher<String>() {
@Override
public void subscribe(final Subscriber<? super String> subscriber) {
subscriber.onSubscribe(new BooleanSubscription());
new Thread(new Runnable() {
@Override
public void run() {
counter.incrementAndGet();
subscriber.onNext("one");
subscriber.onComplete();
}
}).start();
}
}).publish();
final CountDownLatch latch = new CountDownLatch(2);
// subscribe once
f.subscribe(new Consumer<String>() {
@Override
public void accept(String v) {
assertEquals("one", v);
latch.countDown();
}
});
// subscribe again
f.subscribe(new Consumer<String>() {
@Override
public void accept(String v) {
assertEquals("one", v);
latch.countDown();
}
});
Disposable connection = f.connect();
try {
if (!latch.await(1000, TimeUnit.MILLISECONDS)) {
fail("subscriptions did not receive values");
}
assertEquals(1, counter.get());
} finally {
connection.dispose();
}
}
@Test
public void backpressureFastSlow() {
ConnectableFlowable<Integer> is = Flowable.range(1, Flowable.bufferSize() * 2).publish();
Flowable<Integer> fast = is.observeOn(Schedulers.computation())
.doOnComplete(new Action() {
@Override
public void run() {
System.out.println("^^^^^^^^^^^^^ completed FAST");
}
});
Flowable<Integer> slow = is.observeOn(Schedulers.computation()).map(new Function<Integer, Integer>() {
int c;
@Override
public Integer apply(Integer i) {
if (c == 0) {
try {
Thread.sleep(500);
} catch (InterruptedException e) {
}
}
c++;
return i;
}
}).doOnComplete(new Action() {
@Override
public void run() {
System.out.println("^^^^^^^^^^^^^ completed SLOW");
}
});
TestSubscriber<Integer> ts = new TestSubscriber<>();
Flowable.merge(fast, slow).subscribe(ts);
is.connect();
ts.awaitDone(5, TimeUnit.SECONDS);
ts.assertNoErrors();
assertEquals(Flowable.bufferSize() * 4, ts.values().size());
}
// use case from https://github.com/ReactiveX/RxJava/issues/1732
@Test
public void takeUntilWithPublishedStreamUsingSelector() {
final AtomicInteger emitted = new AtomicInteger();
Flowable<Integer> xs = Flowable.range(0, Flowable.bufferSize() * 2).doOnNext(new Consumer<Integer>() {
@Override
public void accept(Integer t1) {
emitted.incrementAndGet();
}
});
TestSubscriber<Integer> ts = new TestSubscriber<>();
xs.publish(new Function<Flowable<Integer>, Flowable<Integer>>() {
@Override
public Flowable<Integer> apply(Flowable<Integer> xs) {
return xs.takeUntil(xs.skipWhile(new Predicate<Integer>() {
@Override
public boolean test(Integer i) {
return i <= 3;
}
}));
}
}).subscribe(ts);
ts.awaitDone(5, TimeUnit.SECONDS);
ts.assertNoErrors();
ts.assertValues(0, 1, 2, 3);
assertEquals(5, emitted.get());
System.out.println(ts.values());
}
// use case from https://github.com/ReactiveX/RxJava/issues/1732
@Test
public void takeUntilWithPublishedStream() {
Flowable<Integer> xs = Flowable.range(0, Flowable.bufferSize() * 2);
TestSubscriber<Integer> ts = new TestSubscriber<>();
ConnectableFlowable<Integer> xsp = xs.publish();
xsp.takeUntil(xsp.skipWhile(new Predicate<Integer>() {
@Override
public boolean test(Integer i) {
return i <= 3;
}
})).subscribe(ts);
xsp.connect();
System.out.println(ts.values());
}
@Test
public void backpressureTwoConsumers() {
final AtomicInteger sourceEmission = new AtomicInteger();
final AtomicBoolean sourceUnsubscribed = new AtomicBoolean();
final Flowable<Integer> source = Flowable.range(1, 100)
.doOnNext(new Consumer<Integer>() {
@Override
public void accept(Integer t1) {
sourceEmission.incrementAndGet();
}
})
.doOnCancel(new Action() {
@Override
public void run() {
sourceUnsubscribed.set(true);
}
}).share();
;
final AtomicBoolean child1Unsubscribed = new AtomicBoolean();
final AtomicBoolean child2Unsubscribed = new AtomicBoolean();
final TestSubscriber<Integer> ts2 = new TestSubscriber<>();
final TestSubscriber<Integer> ts1 = new TestSubscriber<Integer>() {
@Override
public void onNext(Integer t) {
if (values().size() == 2) {
source.doOnCancel(new Action() {
@Override
public void run() {
child2Unsubscribed.set(true);
}
}).take(5).subscribe(ts2);
}
super.onNext(t);
}
};
source.doOnCancel(new Action() {
@Override
public void run() {
child1Unsubscribed.set(true);
}
}).take(5)
.subscribe(ts1);
ts1.awaitDone(5, TimeUnit.SECONDS);
ts2.awaitDone(5, TimeUnit.SECONDS);
ts1.assertNoErrors();
ts2.assertNoErrors();
assertTrue(sourceUnsubscribed.get());
assertTrue(child1Unsubscribed.get());
assertTrue(child2Unsubscribed.get());
ts1.assertValues(1, 2, 3, 4, 5);
ts2.assertValues(4, 5, 6, 7, 8);
assertEquals(8, sourceEmission.get());
}
@Test
public void connectWithNoSubscriber() {
TestScheduler scheduler = new TestScheduler();
ConnectableFlowable<Long> cf = Flowable.interval(10, 10, TimeUnit.MILLISECONDS, scheduler).take(3).publish();
cf.connect();
// Emit 0
scheduler.advanceTimeBy(15, TimeUnit.MILLISECONDS);
TestSubscriber<Long> subscriber = new TestSubscriber<>();
cf.subscribe(subscriber);
// Emit 1 and 2
scheduler.advanceTimeBy(50, TimeUnit.MILLISECONDS);
// 3.x: Flowable.publish no longer drains the input buffer if there are no subscribers
subscriber.assertResult(0L, 1L, 2L);
}
@Test
public void subscribeAfterDisconnectThenConnect() {
ConnectableFlowable<Integer> source = Flowable.just(1).publish();
TestSubscriberEx<Integer> ts1 = new TestSubscriberEx<>();
source.subscribe(ts1);
Disposable connection = source.connect();
ts1.assertValue(1);
ts1.assertNoErrors();
ts1.assertTerminated();
source.reset();
TestSubscriberEx<Integer> ts2 = new TestSubscriberEx<>();
source.subscribe(ts2);
Disposable connection2 = source.connect();
ts2.assertValue(1);
ts2.assertNoErrors();
ts2.assertTerminated();
System.out.println(connection);
System.out.println(connection2);
}
@Test
public void noSubscriberRetentionOnCompleted() {
FlowablePublish<Integer> source = (FlowablePublish<Integer>)Flowable.just(1).publish();
TestSubscriberEx<Integer> ts1 = new TestSubscriberEx<>();
source.subscribe(ts1);
ts1.assertNoValues();
ts1.assertNoErrors();
ts1.assertNotComplete();
source.connect();
ts1.assertValue(1);
ts1.assertNoErrors();
ts1.assertTerminated();
assertEquals(0, source.current.get().subscribers.get().length);
}
@Test
public void nonNullConnection() {
ConnectableFlowable<Object> source = Flowable.never().publish();
assertNotNull(source.connect());
assertNotNull(source.connect());
}
@Test
public void noDisconnectSomeoneElse() {
ConnectableFlowable<Object> source = Flowable.never().publish();
Disposable connection1 = source.connect();
Disposable connection2 = source.connect();
connection1.dispose();
Disposable connection3 = source.connect();
connection2.dispose();
assertTrue(checkPublishDisposed(connection1));
assertTrue(checkPublishDisposed(connection2));
assertFalse(checkPublishDisposed(connection3));
}
@SuppressWarnings("unchecked")
static boolean checkPublishDisposed(Disposable d) {
return ((FlowablePublish.PublishConnection<Object>)d).isDisposed();
}
@Test
public void zeroRequested() {
ConnectableFlowable<Integer> source = Flowable.just(1).publish();
TestSubscriberEx<Integer> ts = new TestSubscriberEx<>(0L);
source.subscribe(ts);
ts.assertNoValues();
ts.assertNoErrors();
ts.assertNotComplete();
source.connect();
ts.assertNoValues();
ts.assertNoErrors();
ts.assertNotComplete();
ts.request(5);
ts.assertValue(1);
ts.assertNoErrors();
ts.assertTerminated();
}
@Test
public void connectIsIdempotent() {
final AtomicInteger calls = new AtomicInteger();
Flowable<Integer> source = Flowable.unsafeCreate(new Publisher<Integer>() {
@Override
public void subscribe(Subscriber<? super Integer> t) {
t.onSubscribe(new BooleanSubscription());
calls.getAndIncrement();
}
});
ConnectableFlowable<Integer> conn = source.publish();
assertEquals(0, calls.get());
conn.connect();
conn.connect();
assertEquals(1, calls.get());
conn.connect().dispose();
conn.connect();
conn.connect();
assertEquals(2, calls.get());
}
@Test
public void syncFusedObserveOn() {
ConnectableFlowable<Integer> cf = Flowable.range(0, 1000).publish();
Flowable<Integer> obs = cf.observeOn(Schedulers.computation());
for (int i = 0; i < 1000; i++) {
for (int j = 1; j < 6; j++) {
List<TestSubscriberEx<Integer>> tss = new ArrayList<>();
for (int k = 1; k < j; k++) {
TestSubscriberEx<Integer> ts = new TestSubscriberEx<>();
tss.add(ts);
obs.subscribe(ts);
}
Disposable connection = cf.connect();
for (TestSubscriberEx<Integer> ts : tss) {
ts.awaitDone(5, TimeUnit.SECONDS)
.assertSubscribed()
.assertValueCount(1000)
.assertNoErrors()
.assertComplete();
}
connection.dispose();
}
}
}
@Test
public void syncFusedObserveOn2() {
ConnectableFlowable<Integer> cf = Flowable.range(0, 1000).publish();
Flowable<Integer> obs = cf.observeOn(ImmediateThinScheduler.INSTANCE);
for (int i = 0; i < 1000; i++) {
for (int j = 1; j < 6; j++) {
List<TestSubscriberEx<Integer>> tss = new ArrayList<>();
for (int k = 1; k < j; k++) {
TestSubscriberEx<Integer> ts = new TestSubscriberEx<>();
tss.add(ts);
obs.subscribe(ts);
}
Disposable connection = cf.connect();
for (TestSubscriberEx<Integer> ts : tss) {
ts.awaitDone(5, TimeUnit.SECONDS)
.assertSubscribed()
.assertValueCount(1000)
.assertNoErrors()
.assertComplete();
}
connection.dispose();
}
}
}
@Test
public void asyncFusedObserveOn() {
ConnectableFlowable<Integer> cf = Flowable.range(0, 1000).observeOn(ImmediateThinScheduler.INSTANCE).publish();
for (int i = 0; i < 1000; i++) {
for (int j = 1; j < 6; j++) {
List<TestSubscriberEx<Integer>> tss = new ArrayList<>();
for (int k = 1; k < j; k++) {
TestSubscriberEx<Integer> ts = new TestSubscriberEx<>();
tss.add(ts);
cf.subscribe(ts);
}
Disposable connection = cf.connect();
for (TestSubscriberEx<Integer> ts : tss) {
ts.awaitDone(5, TimeUnit.SECONDS)
.assertSubscribed()
.assertValueCount(1000)
.assertNoErrors()
.assertComplete();
}
connection.dispose();
}
}
}
@Test
public void observeOn() {
ConnectableFlowable<Integer> cf = Flowable.range(0, 1000).hide().publish();
Flowable<Integer> obs = cf.observeOn(Schedulers.computation());
for (int i = 0; i < 1000; i++) {
for (int j = 1; j < 6; j++) {
List<TestSubscriberEx<Integer>> tss = new ArrayList<>();
for (int k = 1; k < j; k++) {
TestSubscriberEx<Integer> ts = new TestSubscriberEx<>();
tss.add(ts);
obs.subscribe(ts);
}
Disposable connection = cf.connect();
for (TestSubscriberEx<Integer> ts : tss) {
ts.awaitDone(5, TimeUnit.SECONDS)
.assertSubscribed()
.assertValueCount(1000)
.assertNoErrors()
.assertComplete();
}
connection.dispose();
}
}
}
@Test
public void source() {
Flowable<Integer> f = Flowable.never();
assertSame(f, (((HasUpstreamPublisher<?>)f.publish()).source()));
}
@Test
public void connectThrows() {
ConnectableFlowable<Integer> cf = Flowable.<Integer>empty().publish();
try {
cf.connect(new Consumer<Disposable>() {
@Override
public void accept(Disposable d) throws Exception {
throw new TestException();
}
});
} catch (TestException ex) {
// expected
}
}
@Test
public void addRemoveRace() {
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
final ConnectableFlowable<Integer> cf = Flowable.<Integer>empty().publish();
final TestSubscriber<Integer> ts = cf.test();
final TestSubscriber<Integer> ts2 = new TestSubscriber<>();
Runnable r1 = new Runnable() {
@Override
public void run() {
cf.subscribe(ts2);
}
};
Runnable r2 = new Runnable() {
@Override
public void run() {
ts.cancel();
}
};
TestHelper.race(r1, r2);
}
}
@Test
public void disposeOnArrival() {
ConnectableFlowable<Integer> cf = Flowable.<Integer>empty().publish();
cf.test(Long.MAX_VALUE, true).assertEmpty();
}
@Test
public void disposeOnArrival2() {
Flowable<Integer> co = Flowable.<Integer>never().publish().autoConnect();
co.test(Long.MAX_VALUE, true).assertEmpty();
}
@Test
public void dispose() {
TestHelper.checkDisposed(Flowable.never().publish());
TestHelper.checkDisposed(Flowable.never().publish(Functions.<Flowable<Object>>identity()));
}
@Test
public void empty() {
ConnectableFlowable<Integer> cf = Flowable.<Integer>empty().publish();
cf.connect();
}
@Test
public void take() {
ConnectableFlowable<Integer> cf = Flowable.range(1, 2).publish();
TestSubscriber<Integer> ts = cf.take(1).test();
cf.connect();
ts.assertResult(1);
}
@Test
public void just() {
final PublishProcessor<Integer> pp = PublishProcessor.create();
ConnectableFlowable<Integer> cf = pp.publish();
TestSubscriber<Integer> ts = new TestSubscriber<Integer>() {
@Override
public void onNext(Integer t) {
super.onNext(t);
pp.onComplete();
}
};
cf.subscribe(ts);
cf.connect();
pp.onNext(1);
ts.assertResult(1);
}
@Test
public void nextCancelRace() {
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
final PublishProcessor<Integer> pp = PublishProcessor.create();
final ConnectableFlowable<Integer> cf = pp.publish();
final TestSubscriber<Integer> ts = cf.test();
Runnable r1 = new Runnable() {
@Override
public void run() {
pp.onNext(1);
}
};
Runnable r2 = new Runnable() {
@Override
public void run() {
ts.cancel();
}
};
TestHelper.race(r1, r2);
}
}
@Test
public void badSource() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
new Flowable<Integer>() {
@Override
protected void subscribeActual(Subscriber<? super Integer> subscriber) {
subscriber.onSubscribe(new BooleanSubscription());
subscriber.onNext(1);
subscriber.onComplete();
subscriber.onNext(2);
subscriber.onError(new TestException());
subscriber.onComplete();
}
}
.publish()
.autoConnect()
.test()
.assertResult(1);
TestHelper.assertUndeliverable(errors, 0, TestException.class);
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void noErrorLoss() {
ConnectableFlowable<Object> cf = Flowable.error(new TestException()).publish();
cf.connect();
// 3.x: terminal events are always kept until reset.
cf.test()
.assertFailure(TestException.class);
}
@Test
public void subscribeDisconnectRace() {
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
final PublishProcessor<Integer> pp = PublishProcessor.create();
final ConnectableFlowable<Integer> cf = pp.publish();
final Disposable d = cf.connect();
final TestSubscriber<Integer> ts = new TestSubscriber<>();
Runnable r1 = new Runnable() {
@Override
public void run() {
d.dispose();
}
};
Runnable r2 = new Runnable() {
@Override
public void run() {
cf.subscribe(ts);
}
};
TestHelper.race(r1, r2);
}
}
@Test
public void selectorDisconnectsIndependentSource() {
PublishProcessor<Integer> pp = PublishProcessor.create();
pp.publish(new Function<Flowable<Integer>, Flowable<Integer>>() {
@Override
public Flowable<Integer> apply(Flowable<Integer> v) throws Exception {
return Flowable.range(1, 2);
}
})
.test()
.assertResult(1, 2);
assertFalse(pp.hasSubscribers());
}
@Test
public void selectorLatecommer() {
Flowable.range(1, 5)
.publish(new Function<Flowable<Integer>, Flowable<Integer>>() {
@Override
public Flowable<Integer> apply(Flowable<Integer> v) throws Exception {
return v.concatWith(v);
}
})
.test()
.assertResult(1, 2, 3, 4, 5);
}
@Test
public void mainError() {
Flowable.error(new TestException())
.publish(Functions.<Flowable<Object>>identity())
.test()
.assertFailure(TestException.class);
}
@Test
public void selectorInnerError() {
PublishProcessor<Integer> pp = PublishProcessor.create();
pp.publish(new Function<Flowable<Integer>, Flowable<Integer>>() {
@Override
public Flowable<Integer> apply(Flowable<Integer> v) throws Exception {
return Flowable.error(new TestException());
}
})
.test()
.assertFailure(TestException.class);
assertFalse(pp.hasSubscribers());
}
@Test
public void preNextConnect() {
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
final ConnectableFlowable<Integer> cf = Flowable.<Integer>empty().publish();
cf.connect();
Runnable r1 = new Runnable() {
@Override
public void run() {
cf.test();
}
};
TestHelper.race(r1, r1);
}
}
@Test
public void connectRace() {
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
final ConnectableFlowable<Integer> cf = Flowable.<Integer>empty().publish();
Runnable r1 = new Runnable() {
@Override
public void run() {
cf.connect();
}
};
TestHelper.race(r1, r1);
}
}
@Test
public void selectorCrash() {
Flowable.just(1).publish(new Function<Flowable<Integer>, Flowable<Object>>() {
@Override
public Flowable<Object> apply(Flowable<Integer> v) throws Exception {
throw new TestException();
}
})
.test()
.assertFailure(TestException.class);
}
@Test
public void pollThrows() {
Flowable.just(1)
.map(new Function<Integer, Object>() {
@Override
public Object apply(Integer v) throws Exception {
throw new TestException();
}
})
.compose(TestHelper.flowableStripBoundary())
.publish()
.autoConnect()
.test()
.assertFailure(TestException.class);
}
@Test
public void pollThrowsNoSubscribers() {
ConnectableFlowable<Integer> cf = Flowable.just(1, 2)
.map(new Function<Integer, Integer>() {
@Override
public Integer apply(Integer v) throws Exception {
if (v == 2) {
throw new TestException();
}
return v;
}
})
.compose(TestHelper.<Integer>flowableStripBoundary())
.publish();
TestSubscriber<Integer> ts = cf.take(1)
.test();
cf.connect();
ts.assertResult(1);
}
@Test
public void dryRunCrash() {
final TestSubscriber<Object> ts = new TestSubscriber<Object>(1L) {
@Override
public void onNext(Object t) {
super.onNext(t);
onComplete();
cancel();
}
};
Flowable<Object> source = Flowable.range(1, 10)
.map(new Function<Integer, Object>() {
@Override
public Object apply(Integer v) throws Exception {
if (v == 2) {
throw new TestException();
}
return v;
}
})
.publish()
.autoConnect();
source.subscribe(ts);
ts
.assertResult(1);
// 3.x: terminal events remain observable until reset
source.test()
.assertFailure(TestException.class);
}
@Test
public void overflowQueue() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
Flowable.create(new FlowableOnSubscribe<Object>() {
@Override
public void subscribe(FlowableEmitter<Object> s) throws Exception {
for (int i = 0; i < 10; i++) {
s.onNext(i);
}
}
}, BackpressureStrategy.MISSING)
.publish(8)
.autoConnect()
.test(0L)
// 3.x emits errors last, even the full queue errors
.requestMore(10)
.assertFailure(QueueOverflowException.class, 0, 1, 2, 3, 4, 5, 6, 7);
TestHelper.assertError(errors, 0, QueueOverflowException.class);
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void delayedUpstreamOnSubscribe() {
final Subscriber<?>[] sub = { null };
new Flowable<Integer>() {
@Override
protected void subscribeActual(Subscriber<? super Integer> s) {
sub[0] = s;
}
}
.publish()
.connect()
.dispose();
BooleanSubscription bs = new BooleanSubscription();
sub[0].onSubscribe(bs);
assertTrue(bs.isCancelled());
}
@Test
public void disposeRace() {
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
final AtomicReference<Disposable> ref = new AtomicReference<>();
final ConnectableFlowable<Integer> cf = new Flowable<Integer>() {
@Override
protected void subscribeActual(Subscriber<? super Integer> s) {
s.onSubscribe(new BooleanSubscription());
ref.set((Disposable)s);
}
}.publish();
cf.connect();
Runnable r1 = new Runnable() {
@Override
public void run() {
ref.get().dispose();
}
};
TestHelper.race(r1, r1);
}
}
@Test
public void removeNotPresent() {
final AtomicReference<PublishConnection<Integer>> ref = new AtomicReference<>();
final ConnectableFlowable<Integer> cf = new Flowable<Integer>() {
@Override
@SuppressWarnings("unchecked")
protected void subscribeActual(Subscriber<? super Integer> s) {
s.onSubscribe(new BooleanSubscription());
ref.set((PublishConnection<Integer>)s);
}
}.publish();
cf.connect();
ref.get().add(new InnerSubscription<>(new TestSubscriber<>(), ref.get()));
ref.get().remove(null);
}
@Test
public void subscriberSwap() {
final ConnectableFlowable<Integer> cf = Flowable.range(1, 5).publish();
cf.connect();
TestSubscriber<Integer> ts1 = new TestSubscriber<Integer>() {
@Override
public void onNext(Integer t) {
super.onNext(t);
cancel();
onComplete();
}
};
cf.subscribe(ts1);
ts1.assertResult(1);
TestSubscriber<Integer> ts2 = new TestSubscriber<>(0);
cf.subscribe(ts2);
ts2
.assertEmpty()
.requestMore(4)
.assertResult(2, 3, 4, 5);
}
@Test
public void subscriberLiveSwap() {
final ConnectableFlowable<Integer> cf = Flowable.range(1, 5).publish();
final TestSubscriber<Integer> ts2 = new TestSubscriber<>(0);
TestSubscriber<Integer> ts1 = new TestSubscriber<Integer>() {
@Override
public void onNext(Integer t) {
super.onNext(t);
cancel();
onComplete();
cf.subscribe(ts2);
}
};
cf.subscribe(ts1);
cf.connect();
ts1.assertResult(1);
ts2
.assertEmpty()
.requestMore(4)
.assertResult(2, 3, 4, 5);
}
@Test
public void selectorSubscriberSwap() {
final AtomicReference<Flowable<Integer>> ref = new AtomicReference<>();
Flowable.range(1, 5).publish(new Function<Flowable<Integer>, Publisher<Integer>>() {
@Override
public Publisher<Integer> apply(Flowable<Integer> f) throws Exception {
ref.set(f);
return Flowable.never();
}
}).test();
ref.get().take(2).test().assertResult(1, 2);
ref.get()
.test(0)
.assertEmpty()
.requestMore(2)
.assertValuesOnly(3, 4)
.requestMore(1)
.assertResult(3, 4, 5);
}
@Test
public void leavingSubscriberOverrequests() {
final AtomicReference<Flowable<Integer>> ref = new AtomicReference<>();
PublishProcessor<Integer> pp = PublishProcessor.create();
pp.publish(new Function<Flowable<Integer>, Publisher<Integer>>() {
@Override
public Publisher<Integer> apply(Flowable<Integer> f) throws Exception {
ref.set(f);
return Flowable.never();
}
}).test();
TestSubscriber<Integer> ts1 = ref.get().take(2).test();
pp.onNext(1);
pp.onNext(2);
ts1.assertResult(1, 2);
pp.onNext(3);
pp.onNext(4);
TestSubscriber<Integer> ts2 = ref.get().test(0L);
ts2.assertEmpty();
ts2.requestMore(2);
ts2.assertValuesOnly(3, 4);
}
// call a transformer only if the input is non-empty
@Test
public void composeIfNotEmpty() {
final FlowableTransformer<Integer, Integer> transformer = new FlowableTransformer<Integer, Integer>() {
@Override
public Publisher<Integer> apply(Flowable<Integer> g) {
return g.map(new Function<Integer, Integer>() {
@Override
public Integer apply(Integer v) throws Exception {
return v + 1;
}
});
}
};
final AtomicInteger calls = new AtomicInteger();
Flowable.range(1, 5)
.publish(new Function<Flowable<Integer>, Publisher<Integer>>() {
@Override
public Publisher<Integer> apply(final Flowable<Integer> shared)
throws Exception {
return shared.take(1).concatMap(new Function<Integer, Publisher<? extends Integer>>() {
@Override
public Publisher<? extends Integer> apply(Integer first)
throws Exception {
calls.incrementAndGet();
return transformer.apply(Flowable.just(first).concatWith(shared));
}
});
}
})
.test()
.assertResult(2, 3, 4, 5, 6);
assertEquals(1, calls.get());
}
// call a transformer only if the input is non-empty
@Test
public void composeIfNotEmptyNotFused() {
final FlowableTransformer<Integer, Integer> transformer = new FlowableTransformer<Integer, Integer>() {
@Override
public Publisher<Integer> apply(Flowable<Integer> g) {
return g.map(new Function<Integer, Integer>() {
@Override
public Integer apply(Integer v) throws Exception {
return v + 1;
}
});
}
};
final AtomicInteger calls = new AtomicInteger();
Flowable.range(1, 5).hide()
.publish(new Function<Flowable<Integer>, Publisher<Integer>>() {
@Override
public Publisher<Integer> apply(final Flowable<Integer> shared)
throws Exception {
return shared.take(1).concatMap(new Function<Integer, Publisher<? extends Integer>>() {
@Override
public Publisher<? extends Integer> apply(Integer first)
throws Exception {
calls.incrementAndGet();
return transformer.apply(Flowable.just(first).concatWith(shared));
}
});
}
})
.test()
.assertResult(2, 3, 4, 5, 6);
assertEquals(1, calls.get());
}
// call a transformer only if the input is non-empty
@Test
public void composeIfNotEmptyIsEmpty() {
final FlowableTransformer<Integer, Integer> transformer = new FlowableTransformer<Integer, Integer>() {
@Override
public Publisher<Integer> apply(Flowable<Integer> g) {
return g.map(new Function<Integer, Integer>() {
@Override
public Integer apply(Integer v) throws Exception {
return v + 1;
}
});
}
};
final AtomicInteger calls = new AtomicInteger();
Flowable.<Integer>empty().hide()
.publish(new Function<Flowable<Integer>, Publisher<Integer>>() {
@Override
public Publisher<Integer> apply(final Flowable<Integer> shared)
throws Exception {
return shared.take(1).concatMap(new Function<Integer, Publisher<? extends Integer>>() {
@Override
public Publisher<? extends Integer> apply(Integer first)
throws Exception {
calls.incrementAndGet();
return transformer.apply(Flowable.just(first).concatWith(shared));
}
});
}
})
.test()
.assertResult();
assertEquals(0, calls.get());
}
@Test
public void publishFunctionCancelOuterAfterOneInner() {
final AtomicReference<Flowable<Integer>> ref = new AtomicReference<>();
PublishProcessor<Integer> pp = PublishProcessor.create();
final TestSubscriber<Integer> ts = pp.publish(new Function<Flowable<Integer>, Publisher<Integer>>() {
@Override
public Publisher<Integer> apply(Flowable<Integer> f) throws Exception {
ref.set(f);
return Flowable.never();
}
}).test();
ref.get().subscribe(new TestSubscriber<Integer>() {
@Override
public void onNext(Integer t) {
super.onNext(t);
onComplete();
ts.cancel();
}
});
pp.onNext(1);
}
@Test
public void publishFunctionCancelOuterAfterOneInnerBackpressured() {
final AtomicReference<Flowable<Integer>> ref = new AtomicReference<>();
PublishProcessor<Integer> pp = PublishProcessor.create();
final TestSubscriber<Integer> ts = pp.publish(new Function<Flowable<Integer>, Publisher<Integer>>() {
@Override
public Publisher<Integer> apply(Flowable<Integer> f) throws Exception {
ref.set(f);
return Flowable.never();
}
}).test();
ref.get().subscribe(new TestSubscriber<Integer>(1L) {
@Override
public void onNext(Integer t) {
super.onNext(t);
onComplete();
ts.cancel();
}
});
pp.onNext(1);
}
@Test
public void publishCancelOneAsync() {
for (int i = 0; i < TestHelper.RACE_LONG_LOOPS; i++) {
final PublishProcessor<Integer> pp = PublishProcessor.create();
final AtomicReference<Flowable<Integer>> ref = new AtomicReference<>();
pp.publish(new Function<Flowable<Integer>, Publisher<Integer>>() {
@Override
public Publisher<Integer> apply(Flowable<Integer> f) throws Exception {
ref.set(f);
return Flowable.never();
}
}).test();
final TestSubscriber<Integer> ts1 = ref.get().test();
TestSubscriber<Integer> ts2 = ref.get().test();
Runnable r1 = new Runnable() {
@Override
public void run() {
pp.onNext(1);
}
};
Runnable r2 = new Runnable() {
@Override
public void run() {
ts1.cancel();
}
};
TestHelper.race(r1, r2);
ts2.assertValuesOnly(1);
}
}
@Test
public void publishCancelOneAsync2() {
final PublishProcessor<Integer> pp = PublishProcessor.create();
ConnectableFlowable<Integer> cf = pp.publish();
final TestSubscriber<Integer> ts1 = new TestSubscriber<>();
final AtomicReference<InnerSubscription<Integer>> ref = new AtomicReference<>();
cf.subscribe(new FlowableSubscriber<Integer>() {
@SuppressWarnings("unchecked")
@Override
public void onSubscribe(Subscription s) {
ts1.onSubscribe(new BooleanSubscription());
// pretend to be cancelled without removing it from the subscriber list
ref.set((InnerSubscription<Integer>)s);
}
@Override
public void onNext(Integer t) {
ts1.onNext(t);
}
@Override
public void onError(Throwable t) {
ts1.onError(t);
}
@Override
public void onComplete() {
ts1.onComplete();
}
});
TestSubscriber<Integer> ts2 = cf.test();
cf.connect();
ref.get().set(Long.MIN_VALUE);
pp.onNext(1);
ts1.assertEmpty();
ts2.assertValuesOnly(1);
}
@Test
public void boundaryFusion() {
Flowable.range(1, 10000)
.observeOn(Schedulers.single())
.map(new Function<Integer, String>() {
@Override
public String apply(Integer t) throws Exception {
String name = Thread.currentThread().getName();
if (name.contains("RxSingleScheduler")) {
return "RxSingleScheduler";
}
return name;
}
})
.share()
.observeOn(Schedulers.computation())
.distinct()
.test()
.awaitDone(5, TimeUnit.SECONDS)
.assertResult("RxSingleScheduler");
}
@Test
public void badRequest() {
TestHelper.assertBadRequestReported(Flowable.range(1, 5).publish());
}
@Test
public void splitCombineSubscriberChangeAfterOnNext() {
Flowable<Integer> source = Flowable.range(0, 20)
.doOnSubscribe(new Consumer<Subscription>() {
@Override
public void accept(Subscription v) throws Exception {
System.out.println("Subscribed");
}
})
.publish(10)
.refCount()
;
Flowable<Integer> evenNumbers = source.filter(new Predicate<Integer>() {
@Override
public boolean test(Integer v) throws Exception {
return v % 2 == 0;
}
});
Flowable<Integer> oddNumbers = source.filter(new Predicate<Integer>() {
@Override
public boolean test(Integer v) throws Exception {
return v % 2 != 0;
}
});
final Single<Integer> getNextOdd = oddNumbers.first(0);
TestSubscriber<List<Integer>> ts = evenNumbers.concatMap(new Function<Integer, Publisher<List<Integer>>>() {
@Override
public Publisher<List<Integer>> apply(Integer v) throws Exception {
return Single.zip(
Single.just(v), getNextOdd,
new BiFunction<Integer, Integer, List<Integer>>() {
@Override
public List<Integer> apply(Integer a, Integer b) throws Exception {
return Arrays.asList( a, b );
}
}
)
.toFlowable();
}
})
.takeWhile(new Predicate<List<Integer>>() {
@Override
public boolean test(List<Integer> v) throws Exception {
return v.get(0) < 20;
}
})
.test();
ts
.assertResult(
Arrays.asList(0, 1),
Arrays.asList(2, 3),
Arrays.asList(4, 5),
Arrays.asList(6, 7),
Arrays.asList(8, 9),
Arrays.asList(10, 11),
Arrays.asList(12, 13),
Arrays.asList(14, 15),
Arrays.asList(16, 17),
Arrays.asList(18, 19)
);
}
@Test
public void splitCombineSubscriberChangeAfterOnNextFused() {
Flowable<Integer> source = Flowable.range(0, 20)
.publish(10)
.refCount()
;
Flowable<Integer> evenNumbers = source.filter(new Predicate<Integer>() {
@Override
public boolean test(Integer v) throws Exception {
return v % 2 == 0;
}
});
Flowable<Integer> oddNumbers = source.filter(new Predicate<Integer>() {
@Override
public boolean test(Integer v) throws Exception {
return v % 2 != 0;
}
});
final Single<Integer> getNextOdd = oddNumbers.first(0);
TestSubscriber<List<Integer>> ts = evenNumbers.concatMap(new Function<Integer, Publisher<List<Integer>>>() {
@Override
public Publisher<List<Integer>> apply(Integer v) throws Exception {
return Single.zip(
Single.just(v), getNextOdd,
new BiFunction<Integer, Integer, List<Integer>>() {
@Override
public List<Integer> apply(Integer a, Integer b) throws Exception {
return Arrays.asList( a, b );
}
}
)
.toFlowable();
}
})
.takeWhile(new Predicate<List<Integer>>() {
@Override
public boolean test(List<Integer> v) throws Exception {
return v.get(0) < 20;
}
})
.test();
ts
.assertResult(
Arrays.asList(0, 1),
Arrays.asList(2, 3),
Arrays.asList(4, 5),
Arrays.asList(6, 7),
Arrays.asList(8, 9),
Arrays.asList(10, 11),
Arrays.asList(12, 13),
Arrays.asList(14, 15),
Arrays.asList(16, 17),
Arrays.asList(18, 19)
);
}
@Test
public void altConnectCrash() {
try {
new FlowablePublish<>(Flowable.<Integer>empty(), 128)
.connect(new Consumer<Disposable>() {
@Override
public void accept(Disposable t) throws Exception {
throw new TestException();
}
});
fail("Should have thrown");
} catch (TestException expected) {
// expected
}
}
@Test
public void altConnectRace() {
for (int i = 0; i < TestHelper.RACE_LONG_LOOPS; i++) {
final ConnectableFlowable<Integer> cf =
new FlowablePublish<>(Flowable.<Integer>never(), 128);
Runnable r = new Runnable() {
@Override
public void run() {
cf.connect();
}
};
TestHelper.race(r, r);
}
}
@Test
public void fusedPollCrash() {
Flowable.range(1, 5)
.map(new Function<Integer, Object>() {
@Override
public Object apply(Integer v) throws Exception {
throw new TestException();
}
})
.compose(TestHelper.flowableStripBoundary())
.publish()
.refCount()
.test()
.assertFailure(TestException.class);
}
@Test
public void syncFusedNoRequest() {
Flowable.range(1, 5)
.publish(1)
.refCount()
.test()
.assertResult(1, 2, 3, 4, 5);
}
@Test
public void normalBackpressuredPolls() {
Flowable.range(1, 5)
.hide()
.publish(1)
.refCount()
.test()
.assertResult(1, 2, 3, 4, 5);
}
@Test
public void emptyHidden() {
Flowable.empty()
.hide()
.publish(1)
.refCount()
.test()
.assertResult();
}
@Test
public void emptyFused() {
Flowable.empty()
.publish(1)
.refCount()
.test()
.assertResult();
}
@Test
public void overflowQueueRefCount() {
new Flowable<Integer>() {
@Override
protected void subscribeActual(Subscriber<? super Integer> s) {
s.onSubscribe(new BooleanSubscription());
s.onNext(1);
s.onNext(2);
}
}
.publish(1)
.refCount()
.test(0)
.requestMore(1)
.assertFailure(QueueOverflowException.class, 1);
}
@Test
public void doubleErrorRefCount() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
new Flowable<Integer>() {
@Override
protected void subscribeActual(Subscriber<? super Integer> s) {
s.onSubscribe(new BooleanSubscription());
s.onError(new TestException("one"));
s.onError(new TestException("two"));
}
}
.publish(1)
.refCount()
.to(TestHelper.<Integer>testSubscriber(0L))
.assertFailureAndMessage(TestException.class, "one");
TestHelper.assertUndeliverable(errors, 0, TestException.class, "two");
assertEquals(1, errors.size());
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void onCompleteAvailableUntilReset() {
ConnectableFlowable<Integer> cf = Flowable.just(1).publish();
TestSubscriber<Integer> ts = cf.test();
ts.assertEmpty();
cf.connect();
ts.assertResult(1);
cf.test().assertResult();
cf.reset();
ts = cf.test();
ts.assertEmpty();
cf.connect();
ts.assertResult(1);
}
@Test
public void onErrorAvailableUntilReset() {
ConnectableFlowable<Integer> cf = Flowable.just(1)
.concatWith(Flowable.<Integer>error(new TestException()))
.publish();
TestSubscriber<Integer> ts = cf.test();
ts.assertEmpty();
cf.connect();
ts.assertFailure(TestException.class, 1);
cf.test().assertFailure(TestException.class);
cf.reset();
ts = cf.test();
ts.assertEmpty();
cf.connect();
ts.assertFailure(TestException.class, 1);
}
@Test
public void disposeResets() {
PublishProcessor<Integer> pp = PublishProcessor.create();
ConnectableFlowable<Integer> cf = pp.publish();
assertFalse(pp.hasSubscribers());
Disposable d = cf.connect();
assertTrue(pp.hasSubscribers());
d.dispose();
assertFalse(pp.hasSubscribers());
TestSubscriber<Integer> ts = cf.test();
cf.connect();
assertTrue(pp.hasSubscribers());
pp.onNext(1);
ts.assertValuesOnly(1);
}
@Test(expected = TestException.class)
public void connectDisposeCrash() {
ConnectableFlowable<Object> cf = Flowable.never().publish();
cf.connect();
cf.connect(d -> { throw new TestException(); });
}
@Test
public void resetWhileNotConnectedIsNoOp() {
ConnectableFlowable<Object> cf = Flowable.never().publish();
cf.reset();
}
@Test
public void resetWhileActiveIsNoOp() {
ConnectableFlowable<Object> cf = Flowable.never().publish();
cf.connect();
cf.reset();
}
@Test
public void crossCancelOnComplete() {
TestSubscriber<Integer> ts1 = new TestSubscriber<>();
TestSubscriber<Integer> ts2 = new TestSubscriber<Integer>() {
@Override
public void onComplete() {
super.onComplete();
ts1.cancel();
}
};
PublishProcessor<Integer> pp = PublishProcessor.create();
ConnectableFlowable<Integer> cf = pp.publish();
cf.subscribe(ts2);
cf.subscribe(ts1);
cf.connect();
pp.onComplete();
ts2.assertResult();
ts1.assertEmpty();
}
@Test
public void crossCancelOnError() {
TestSubscriber<Integer> ts1 = new TestSubscriber<>();
TestSubscriber<Integer> ts2 = new TestSubscriber<Integer>() {
@Override
public void onError(Throwable t) {
super.onError(t);
ts1.cancel();
}
};
PublishProcessor<Integer> pp = PublishProcessor.create();
ConnectableFlowable<Integer> cf = pp.publish();
cf.subscribe(ts2);
cf.subscribe(ts1);
cf.connect();
pp.onError(new TestException());
ts2.assertFailure(TestException.class);
ts1.assertEmpty();
}
@Test
public void disposeNoNeedForReset() {
PublishProcessor<Integer> pp = PublishProcessor.create();
ConnectableFlowable<Integer> cf = pp.publish();
TestSubscriber<Integer> ts = cf.test();
Disposable d = cf.connect();
pp.onNext(1);
d.dispose();
ts = cf.test();
ts.assertEmpty();
cf.connect();
ts.assertEmpty();
pp.onNext(2);
ts.assertValuesOnly(2);
}
}
|
FlowablePublishTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/property/access/spi/EnhancedSetterMethodImpl.java
|
{
"start": 950,
"end": 1793
}
|
class ____ extends SetterMethodImpl {
private final String propertyName;
private final int enhancementState;
public EnhancedSetterMethodImpl(Class<?> containerClass, String propertyName, Method setterMethod) {
super( containerClass, propertyName, setterMethod );
this.propertyName = propertyName;
this.enhancementState = determineEnhancementState( containerClass, setterMethod.getReturnType() );
}
@Override
public void set(Object target, @Nullable Object value) {
super.set( target, value );
handleEnhancedInjection( target, value, enhancementState, propertyName );
}
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// serialization
@Serial
private Object writeReplace() {
return new SerialForm( getContainerClass(), propertyName, getMethod() );
}
private static
|
EnhancedSetterMethodImpl
|
java
|
quarkusio__quarkus
|
extensions/vertx-http/deployment-spi/src/main/java/io/quarkus/vertx/http/deployment/spi/RouteBuildItem.java
|
{
"start": 722,
"end": 1399
}
|
enum ____ {
/**
* A regular route handler invoked on the event loop.
*
* @see io.vertx.ext.web.Route#handler(Handler)
*/
NORMAL,
/**
* A blocking route handler, invoked on a worker thread.
*
* @see io.vertx.ext.web.Route#blockingHandler(Handler)
*/
BLOCKING,
/**
* A failure handler, invoked when an exception is thrown from a route handler.
* This is invoked on the event loop.
*
* @see io.vertx.ext.web.Route#failureHandler(Handler)
*/
FAILURE
}
/**
* Type of routes.
*/
public
|
HandlerType
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/main/java/org/springframework/boot/availability/AvailabilityState.java
|
{
"start": 752,
"end": 912
}
|
interface ____
* usually implemented on an {@code enum} type.
*
* @author Phillip Webb
* @since 2.3.0
* @see LivenessState
* @see ReadinessState
*/
public
|
is
|
java
|
apache__dubbo
|
dubbo-registry/dubbo-registry-zookeeper/src/main/java/org/apache/dubbo/registry/zookeeper/ZookeeperInstance.java
|
{
"start": 1047,
"end": 2072
}
|
class ____ {
private String id;
private String name;
private Map<String, String> metadata = new HashMap<>();
@SuppressWarnings("unused")
private ZookeeperInstance() {}
public ZookeeperInstance(String id, String name, Map<String, String> metadata) {
this.id = id;
this.name = name;
this.metadata = metadata;
}
public String getId() {
return this.id;
}
public String getName() {
return this.name;
}
public void setId(String id) {
this.id = id;
}
public void setName(String name) {
this.name = name;
}
public Map<String, String> getMetadata() {
return this.metadata;
}
public void setMetadata(Map<String, String> metadata) {
this.metadata = metadata;
}
@Override
public String toString() {
return "ZookeeperInstance{" + "id='" + this.id + '\'' + ", name='" + this.name + '\'' + ", metadata="
+ this.metadata + '}';
}
}
|
ZookeeperInstance
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java
|
{
"start": 5409,
"end": 5666
}
|
class ____ extends BaseNodesRequest {
private final Snapshot[] snapshots;
public Request(String[] nodesIds, Snapshot[] snapshots) {
super(nodesIds);
this.snapshots = snapshots;
}
}
public static
|
Request
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/jdk8/DurationTest.java
|
{
"start": 166,
"end": 873
}
|
class ____ extends TestCase {
public void test_for_issue() throws Exception {
VO vo = new VO();
vo.setDate(Duration.ofHours(3));
String text = JSON.toJSONString(vo);
System.out.println(text);
VO vo1 = JSON.parseObject(text, VO.class);
Assert.assertEquals(vo.getDate(), vo1.getDate());
}
public void test_for_issue_1() throws Exception {
String text = "{\"zero\":false,\"seconds\":5184000,\"negative\":false,\"nano\":0,\"units\":[\"SECONDS\",\"NANOS\"]}";
Duration duration = JSON.parseObject(text, Duration.class);
assertEquals("PT1440H", duration.toString());
}
public static
|
DurationTest
|
java
|
ReactiveX__RxJava
|
src/test/java/io/reactivex/rxjava3/internal/operators/observable/ObservableConcatMapEagerTest.java
|
{
"start": 1312,
"end": 33709
}
|
class ____ extends RxJavaTest {
@Test
public void normal() {
Observable.range(1, 5)
.concatMapEager(new Function<Integer, ObservableSource<Integer>>() {
@Override
public ObservableSource<Integer> apply(Integer t) {
return Observable.range(t, 2);
}
})
.test()
.assertResult(1, 2, 2, 3, 3, 4, 4, 5, 5, 6);
}
@Test
public void normalDelayBoundary() {
Observable.range(1, 5)
.concatMapEagerDelayError(new Function<Integer, ObservableSource<Integer>>() {
@Override
public ObservableSource<Integer> apply(Integer t) {
return Observable.range(t, 2);
}
}, false)
.test()
.assertResult(1, 2, 2, 3, 3, 4, 4, 5, 5, 6);
}
@Test
public void normalDelayEnd() {
Observable.range(1, 5)
.concatMapEagerDelayError(new Function<Integer, ObservableSource<Integer>>() {
@Override
public ObservableSource<Integer> apply(Integer t) {
return Observable.range(t, 2);
}
}, true)
.test()
.assertResult(1, 2, 2, 3, 3, 4, 4, 5, 5, 6);
}
@Test
public void mainErrorsDelayBoundary() {
PublishSubject<Integer> main = PublishSubject.create();
final PublishSubject<Integer> inner = PublishSubject.create();
TestObserverEx<Integer> to = main.concatMapEagerDelayError(
new Function<Integer, ObservableSource<Integer>>() {
@Override
public ObservableSource<Integer> apply(Integer t) {
return inner;
}
}, false).to(TestHelper.<Integer>testConsumer());
main.onNext(1);
inner.onNext(2);
to.assertValue(2);
main.onError(new TestException("Forced failure"));
to.assertNoErrors();
inner.onNext(3);
inner.onComplete();
to.assertFailureAndMessage(TestException.class, "Forced failure", 2, 3);
}
@Test
public void mainErrorsDelayEnd() {
PublishSubject<Integer> main = PublishSubject.create();
final PublishSubject<Integer> inner = PublishSubject.create();
TestObserverEx<Integer> to = main.concatMapEagerDelayError(
new Function<Integer, ObservableSource<Integer>>() {
@Override
public ObservableSource<Integer> apply(Integer t) {
return inner;
}
}, true).to(TestHelper.<Integer>testConsumer());
main.onNext(1);
main.onNext(2);
inner.onNext(2);
to.assertValue(2);
main.onError(new TestException("Forced failure"));
to.assertNoErrors();
inner.onNext(3);
inner.onComplete();
to.assertFailureAndMessage(TestException.class, "Forced failure", 2, 3, 2, 3);
}
@Test
public void mainErrorsImmediate() {
PublishSubject<Integer> main = PublishSubject.create();
final PublishSubject<Integer> inner = PublishSubject.create();
TestObserverEx<Integer> to = main.concatMapEager(
new Function<Integer, ObservableSource<Integer>>() {
@Override
public ObservableSource<Integer> apply(Integer t) {
return inner;
}
}).to(TestHelper.<Integer>testConsumer());
main.onNext(1);
main.onNext(2);
inner.onNext(2);
to.assertValue(2);
main.onError(new TestException("Forced failure"));
assertFalse("inner has subscribers?", inner.hasObservers());
inner.onNext(3);
inner.onComplete();
to.assertFailureAndMessage(TestException.class, "Forced failure", 2);
}
@Test
public void longEager() {
Observable.range(1, 2 * Observable.bufferSize())
.concatMapEager(new Function<Integer, ObservableSource<Integer>>() {
@Override
public ObservableSource<Integer> apply(Integer v) {
return Observable.just(1);
}
})
.test()
.assertValueCount(2 * Observable.bufferSize())
.assertNoErrors()
.assertComplete();
}
TestObserver<Object> to;
Function<Integer, Observable<Integer>> toJust = new Function<Integer, Observable<Integer>>() {
@Override
public Observable<Integer> apply(Integer t) {
return Observable.just(t);
}
};
Function<Integer, Observable<Integer>> toRange = new Function<Integer, Observable<Integer>>() {
@Override
public Observable<Integer> apply(Integer t) {
return Observable.range(t, 2);
}
};
@Before
public void before() {
to = new TestObserver<>();
}
@Test
public void simple() {
Observable.range(1, 100).concatMapEager(toJust).subscribe(to);
to.assertNoErrors();
to.assertValueCount(100);
to.assertComplete();
}
@Test
public void simple2() {
Observable.range(1, 100).concatMapEager(toRange).subscribe(to);
to.assertNoErrors();
to.assertValueCount(200);
to.assertComplete();
}
@Test
public void eagerness2() {
final AtomicInteger count = new AtomicInteger();
Observable<Integer> source = Observable.just(1).doOnNext(new Consumer<Integer>() {
@Override
public void accept(Integer t) {
count.getAndIncrement();
}
}).hide();
Observable.concatArrayEager(source, source).subscribe(to);
Assert.assertEquals(2, count.get());
to.assertValueCount(count.get());
to.assertNoErrors();
to.assertComplete();
}
@Test
public void eagerness3() {
final AtomicInteger count = new AtomicInteger();
Observable<Integer> source = Observable.just(1).doOnNext(new Consumer<Integer>() {
@Override
public void accept(Integer t) {
count.getAndIncrement();
}
}).hide();
Observable.concatArrayEager(source, source, source).subscribe(to);
Assert.assertEquals(3, count.get());
to.assertValueCount(count.get());
to.assertNoErrors();
to.assertComplete();
}
@Test
public void eagerness4() {
final AtomicInteger count = new AtomicInteger();
Observable<Integer> source = Observable.just(1).doOnNext(new Consumer<Integer>() {
@Override
public void accept(Integer t) {
count.getAndIncrement();
}
}).hide();
Observable.concatArrayEager(source, source, source, source).subscribe(to);
Assert.assertEquals(4, count.get());
to.assertValueCount(count.get());
to.assertNoErrors();
to.assertComplete();
}
@Test
public void eagerness5() {
final AtomicInteger count = new AtomicInteger();
Observable<Integer> source = Observable.just(1).doOnNext(new Consumer<Integer>() {
@Override
public void accept(Integer t) {
count.getAndIncrement();
}
}).hide();
Observable.concatArrayEager(source, source, source, source, source).subscribe(to);
Assert.assertEquals(5, count.get());
to.assertValueCount(count.get());
to.assertNoErrors();
to.assertComplete();
}
@Test
public void eagerness6() {
final AtomicInteger count = new AtomicInteger();
Observable<Integer> source = Observable.just(1).doOnNext(new Consumer<Integer>() {
@Override
public void accept(Integer t) {
count.getAndIncrement();
}
}).hide();
Observable.concatArrayEager(source, source, source, source, source, source).subscribe(to);
Assert.assertEquals(6, count.get());
to.assertValueCount(count.get());
to.assertNoErrors();
to.assertComplete();
}
@Test
public void eagerness7() {
final AtomicInteger count = new AtomicInteger();
Observable<Integer> source = Observable.just(1).doOnNext(new Consumer<Integer>() {
@Override
public void accept(Integer t) {
count.getAndIncrement();
}
}).hide();
Observable.concatArrayEager(source, source, source, source, source, source, source).subscribe(to);
Assert.assertEquals(7, count.get());
to.assertValueCount(count.get());
to.assertNoErrors();
to.assertComplete();
}
@Test
public void eagerness8() {
final AtomicInteger count = new AtomicInteger();
Observable<Integer> source = Observable.just(1).doOnNext(new Consumer<Integer>() {
@Override
public void accept(Integer t) {
count.getAndIncrement();
}
}).hide();
Observable.concatArrayEager(source, source, source, source, source, source, source, source).subscribe(to);
Assert.assertEquals(8, count.get());
to.assertValueCount(count.get());
to.assertNoErrors();
to.assertComplete();
}
@Test
public void eagerness9() {
final AtomicInteger count = new AtomicInteger();
Observable<Integer> source = Observable.just(1).doOnNext(new Consumer<Integer>() {
@Override
public void accept(Integer t) {
count.getAndIncrement();
}
}).hide();
Observable.concatArrayEager(source, source, source, source, source, source, source, source, source).subscribe(to);
Assert.assertEquals(9, count.get());
to.assertValueCount(count.get());
to.assertNoErrors();
to.assertComplete();
}
@Test
public void mainError() {
Observable.<Integer>error(new TestException()).concatMapEager(toJust).subscribe(to);
to.assertNoValues();
to.assertError(TestException.class);
to.assertNotComplete();
}
@Test
public void innerError() {
// TODO verify: concatMapEager subscribes first then consumes the sources is okay
PublishSubject<Integer> ps = PublishSubject.create();
Observable.concatArrayEager(Observable.just(1), ps)
.subscribe(to);
ps.onError(new TestException());
to.assertValue(1);
to.assertError(TestException.class);
to.assertNotComplete();
}
@Test
public void innerEmpty() {
Observable.concatArrayEager(Observable.empty(), Observable.empty()).subscribe(to);
to.assertNoValues();
to.assertNoErrors();
to.assertComplete();
}
@Test
public void mapperThrows() {
Observable.just(1).concatMapEager(new Function<Integer, Observable<Integer>>() {
@Override
public Observable<Integer> apply(Integer t) {
throw new TestException();
}
}).subscribe(to);
to.assertNoValues();
to.assertNotComplete();
to.assertError(TestException.class);
}
@Test(expected = IllegalArgumentException.class)
public void invalidMaxConcurrent() {
Observable.just(1).concatMapEager(toJust, 0, Observable.bufferSize());
}
@Test(expected = IllegalArgumentException.class)
public void invalidCapacityHint() {
Observable.just(1).concatMapEager(toJust, Observable.bufferSize(), 0);
}
@Test
public void asynchronousRun() {
Observable.range(1, 2).concatMapEager(new Function<Integer, Observable<Integer>>() {
@Override
public Observable<Integer> apply(Integer t) {
return Observable.range(1, 1000).subscribeOn(Schedulers.computation());
}
}).observeOn(Schedulers.newThread()).subscribe(to);
to.awaitDone(5, TimeUnit.SECONDS);
to.assertNoErrors();
to.assertValueCount(2000);
}
@Test
public void reentrantWork() {
final PublishSubject<Integer> subject = PublishSubject.create();
final AtomicBoolean once = new AtomicBoolean();
subject.concatMapEager(new Function<Integer, Observable<Integer>>() {
@Override
public Observable<Integer> apply(Integer t) {
return Observable.just(t);
}
})
.doOnNext(new Consumer<Integer>() {
@Override
public void accept(Integer t) {
if (once.compareAndSet(false, true)) {
subject.onNext(2);
}
}
})
.subscribe(to);
subject.onNext(1);
to.assertNoErrors();
to.assertNotComplete();
to.assertValues(1, 2);
}
@SuppressWarnings("unchecked")
@Test
public void concatArrayEager() throws Exception {
for (int i = 2; i < 10; i++) {
Observable<Integer>[] obs = new Observable[i];
Arrays.fill(obs, Observable.just(1));
Integer[] expected = new Integer[i];
Arrays.fill(expected, 1);
Method m = Observable.class.getMethod("concatArrayEager", ObservableSource[].class);
TestObserver<Integer> to = TestObserver.create();
((Observable<Integer>)m.invoke(null, new Object[]{obs})).subscribe(to);
to.assertValues(expected);
to.assertNoErrors();
to.assertComplete();
}
}
@Test
public void capacityHint() {
Observable<Integer> source = Observable.just(1);
TestObserver<Integer> to = TestObserver.create();
Observable.concatEager(Arrays.asList(source, source, source), 1, 1).subscribe(to);
to.assertValues(1, 1, 1);
to.assertNoErrors();
to.assertComplete();
}
@Test
public void Observable() {
Observable<Integer> source = Observable.just(1);
TestObserver<Integer> to = TestObserver.create();
Observable.concatEager(Observable.just(source, source, source)).subscribe(to);
to.assertValues(1, 1, 1);
to.assertNoErrors();
to.assertComplete();
}
@Test
public void ObservableCapacityHint() {
Observable<Integer> source = Observable.just(1);
TestObserver<Integer> to = TestObserver.create();
Observable.concatEager(Observable.just(source, source, source), 1, 1).subscribe(to);
to.assertValues(1, 1, 1);
to.assertNoErrors();
to.assertComplete();
}
@Test
public void badCapacityHint() throws Exception {
Observable<Integer> source = Observable.just(1);
try {
Observable.concatEager(Arrays.asList(source, source, source), 1, -99);
} catch (IllegalArgumentException ex) {
assertEquals("bufferSize > 0 required but it was -99", ex.getMessage());
}
}
@SuppressWarnings({ "unchecked", "rawtypes" })
@Test
public void mappingBadCapacityHint() throws Exception {
Observable<Integer> source = Observable.just(1);
try {
Observable.just(source, source, source).concatMapEager((Function)Functions.identity(), 10, -99);
} catch (IllegalArgumentException ex) {
assertEquals("bufferSize > 0 required but it was -99", ex.getMessage());
}
}
@Test
public void concatEagerIterable() {
Observable.concatEager(Arrays.asList(Observable.just(1), Observable.just(2)))
.test()
.assertResult(1, 2);
}
@Test
public void dispose() {
TestHelper.checkDisposed(Observable.just(1).hide().concatMapEager(new Function<Integer, ObservableSource<Integer>>() {
@Override
public ObservableSource<Integer> apply(Integer v) throws Exception {
return Observable.range(1, 2);
}
}));
}
@Test
public void empty() {
Observable.<Integer>empty().hide().concatMapEager(new Function<Integer, ObservableSource<Integer>>() {
@Override
public ObservableSource<Integer> apply(Integer v) throws Exception {
return Observable.range(1, 2);
}
})
.test()
.assertResult();
}
@Test
public void innerError2() {
Observable.<Integer>just(1).hide().concatMapEager(new Function<Integer, ObservableSource<Integer>>() {
@Override
public ObservableSource<Integer> apply(Integer v) throws Exception {
return Observable.error(new TestException());
}
})
.test()
.assertFailure(TestException.class);
}
@Test
public void innerErrorMaxConcurrency() {
Observable.<Integer>just(1).hide().concatMapEager(new Function<Integer, ObservableSource<Integer>>() {
@Override
public ObservableSource<Integer> apply(Integer v) throws Exception {
return Observable.error(new TestException());
}
}, 1, 128)
.test()
.assertFailure(TestException.class);
}
@Test
public void innerCallableThrows() {
Observable.<Integer>just(1).hide().concatMapEager(new Function<Integer, ObservableSource<Integer>>() {
@Override
public ObservableSource<Integer> apply(Integer v) throws Exception {
return Observable.fromCallable(new Callable<Integer>() {
@Override
public Integer call() throws Exception {
throw new TestException();
}
});
}
})
.test()
.assertFailure(TestException.class);
}
@Test
public void innerOuterRace() {
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
final PublishSubject<Integer> ps1 = PublishSubject.create();
final PublishSubject<Integer> ps2 = PublishSubject.create();
TestObserverEx<Integer> to = ps1.concatMapEager(new Function<Integer, ObservableSource<Integer>>() {
@Override
public ObservableSource<Integer> apply(Integer v) throws Exception {
return ps2;
}
}).to(TestHelper.<Integer>testConsumer());
final TestException ex1 = new TestException();
final TestException ex2 = new TestException();
ps1.onNext(1);
Runnable r1 = new Runnable() {
@Override
public void run() {
ps1.onError(ex1);
}
};
Runnable r2 = new Runnable() {
@Override
public void run() {
ps2.onError(ex2);
}
};
TestHelper.race(r1, r2);
to.assertSubscribed().assertNoValues().assertNotComplete();
Throwable ex = to.errors().get(0);
if (ex instanceof CompositeException) {
List<Throwable> es = TestHelper.errorList(to);
TestHelper.assertError(es, 0, TestException.class);
TestHelper.assertError(es, 1, TestException.class);
} else {
to.assertError(TestException.class);
if (!errors.isEmpty()) {
TestHelper.assertUndeliverable(errors, 0, TestException.class);
}
}
} finally {
RxJavaPlugins.reset();
}
}
}
@Test
public void nextCancelRace() {
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
final PublishSubject<Integer> ps1 = PublishSubject.create();
final TestObserver<Integer> to = ps1.concatMapEager(new Function<Integer, ObservableSource<Integer>>() {
@Override
public ObservableSource<Integer> apply(Integer v) throws Exception {
return Observable.never();
}
}).test();
Runnable r1 = new Runnable() {
@Override
public void run() {
ps1.onNext(1);
}
};
Runnable r2 = new Runnable() {
@Override
public void run() {
to.dispose();
}
};
TestHelper.race(r1, r2);
to.assertEmpty();
}
}
@Test
public void mapperCancels() {
final TestObserver<Integer> to = new TestObserver<>();
Observable.just(1).hide()
.concatMapEager(new Function<Integer, ObservableSource<Integer>>() {
@Override
public ObservableSource<Integer> apply(Integer v) throws Exception {
to.dispose();
return Observable.never();
}
}, 1, 128)
.subscribe(to);
to.assertEmpty();
}
@Test
public void innerErrorFused() {
Observable.<Integer>just(1).hide().concatMapEager(new Function<Integer, ObservableSource<Integer>>() {
@Override
public ObservableSource<Integer> apply(Integer v) throws Exception {
return Observable.range(1, 2).map(new Function<Integer, Integer>() {
@Override
public Integer apply(Integer v) throws Exception {
throw new TestException();
}
});
}
})
.test()
.assertFailure(TestException.class);
}
@Test
public void innerErrorAfterPoll() {
final UnicastSubject<Integer> us = UnicastSubject.create();
us.onNext(1);
TestObserver<Integer> to = new TestObserver<Integer>() {
@Override
public void onNext(Integer t) {
super.onNext(t);
us.onError(new TestException());
}
};
Observable.<Integer>just(1).hide()
.concatMapEager(new Function<Integer, ObservableSource<Integer>>() {
@Override
public ObservableSource<Integer> apply(Integer v) throws Exception {
return us;
}
}, 1, 128)
.subscribe(to);
to
.assertFailure(TestException.class, 1);
}
@Test
public void fuseAndTake() {
UnicastSubject<Integer> us = UnicastSubject.create();
us.onNext(1);
us.onComplete();
us.concatMapEager(new Function<Integer, ObservableSource<Integer>>() {
@Override
public ObservableSource<Integer> apply(Integer v) throws Exception {
return Observable.just(1);
}
})
.take(1)
.test()
.assertResult(1);
}
@Test
public void doubleOnSubscribe() {
TestHelper.checkDoubleOnSubscribeObservable(new Function<Observable<Object>, ObservableSource<Object>>() {
@Override
public ObservableSource<Object> apply(Observable<Object> o) throws Exception {
return o.concatMapEager(new Function<Object, ObservableSource<Object>>() {
@Override
public ObservableSource<Object> apply(Object v) throws Exception {
return Observable.just(v);
}
});
}
});
}
@Test
public void oneDelayed() {
Observable.just(1, 2, 3, 4, 5)
.concatMapEager(new Function<Integer, ObservableSource<Integer>>() {
@Override
public ObservableSource<Integer> apply(Integer i) throws Exception {
return i == 3 ? Observable.just(i) : Observable
.just(i)
.delay(1, TimeUnit.MILLISECONDS, Schedulers.io());
}
})
.observeOn(Schedulers.io())
.test()
.awaitDone(5, TimeUnit.SECONDS)
.assertResult(1, 2, 3, 4, 5)
;
}
@Test
@SuppressWarnings("unchecked")
public void maxConcurrencyOf2() {
List<Integer>[] list = new ArrayList[100];
for (int i = 0; i < 100; i++) {
List<Integer> lst = new ArrayList<>();
list[i] = lst;
for (int k = 1; k <= 10; k++) {
lst.add((i) * 10 + k);
}
}
Observable.range(1, 1000)
.buffer(10)
.concatMapEager(new Function<List<Integer>, ObservableSource<List<Integer>>>() {
@Override
public ObservableSource<List<Integer>> apply(List<Integer> v)
throws Exception {
return Observable.just(v)
.subscribeOn(Schedulers.io())
.doOnNext(new Consumer<List<Integer>>() {
@Override
public void accept(List<Integer> v)
throws Exception {
Thread.sleep(new Random().nextInt(20));
}
});
}
}
, 2, 3)
.test()
.awaitDone(5, TimeUnit.SECONDS)
.assertResult(list);
}
@Test
public void arrayDelayErrorDefault() {
PublishSubject<Integer> ps1 = PublishSubject.create();
PublishSubject<Integer> ps2 = PublishSubject.create();
PublishSubject<Integer> ps3 = PublishSubject.create();
TestObserver<Integer> to = Observable.concatArrayEagerDelayError(ps1, ps2, ps3)
.test();
to.assertEmpty();
assertTrue(ps1.hasObservers());
assertTrue(ps2.hasObservers());
assertTrue(ps3.hasObservers());
ps2.onNext(2);
ps2.onComplete();
to.assertEmpty();
ps1.onNext(1);
to.assertValuesOnly(1);
ps1.onComplete();
to.assertValuesOnly(1, 2);
ps3.onComplete();
to.assertResult(1, 2);
}
@Test
public void arrayDelayErrorMaxConcurrency() {
PublishSubject<Integer> ps1 = PublishSubject.create();
PublishSubject<Integer> ps2 = PublishSubject.create();
PublishSubject<Integer> ps3 = PublishSubject.create();
TestObserver<Integer> to = Observable.concatArrayEagerDelayError(2, 2, ps1, ps2, ps3)
.test();
to.assertEmpty();
assertTrue(ps1.hasObservers());
assertTrue(ps2.hasObservers());
assertFalse(ps3.hasObservers());
ps2.onNext(2);
ps2.onComplete();
to.assertEmpty();
ps1.onNext(1);
to.assertValuesOnly(1);
ps1.onComplete();
assertTrue(ps3.hasObservers());
to.assertValuesOnly(1, 2);
ps3.onComplete();
to.assertResult(1, 2);
}
@Test
public void arrayDelayErrorMaxConcurrencyErrorDelayed() {
PublishSubject<Integer> ps1 = PublishSubject.create();
PublishSubject<Integer> ps2 = PublishSubject.create();
PublishSubject<Integer> ps3 = PublishSubject.create();
TestObserver<Integer> to = Observable.concatArrayEagerDelayError(2, 2, ps1, ps2, ps3)
.test();
to.assertEmpty();
assertTrue(ps1.hasObservers());
assertTrue(ps2.hasObservers());
assertFalse(ps3.hasObservers());
ps2.onNext(2);
ps2.onError(new TestException());
to.assertEmpty();
ps1.onNext(1);
to.assertValuesOnly(1);
ps1.onComplete();
assertTrue(ps3.hasObservers());
to.assertValuesOnly(1, 2);
ps3.onComplete();
to.assertFailure(TestException.class, 1, 2);
}
@Test
public void cancelActive() {
PublishSubject<Integer> ps1 = PublishSubject.create();
PublishSubject<Integer> ps2 = PublishSubject.create();
TestObserver<Integer> to = Observable
.concatEager(Observable.just(ps1, ps2))
.test();
assertTrue(ps1.hasObservers());
assertTrue(ps2.hasObservers());
to.dispose();
assertFalse(ps1.hasObservers());
assertFalse(ps2.hasObservers());
}
@Test
public void cancelNoInnerYet() {
PublishSubject<Observable<Integer>> ps1 = PublishSubject.create();
TestObserver<Integer> to = Observable
.concatEager(ps1)
.test();
assertTrue(ps1.hasObservers());
to.dispose();
assertFalse(ps1.hasObservers());
}
@Test
public void undeliverableUponCancel() {
TestHelper.checkUndeliverableUponCancel(new ObservableConverter<Integer, Observable<Integer>>() {
@Override
public Observable<Integer> apply(Observable<Integer> upstream) {
return upstream.concatMapEager(new Function<Integer, Observable<Integer>>() {
@Override
public Observable<Integer> apply(Integer v) throws Throwable {
return Observable.just(v).hide();
}
});
}
});
}
@Test
public void undeliverableUponCancelDelayError() {
TestHelper.checkUndeliverableUponCancel(new ObservableConverter<Integer, Observable<Integer>>() {
@Override
public Observable<Integer> apply(Observable<Integer> upstream) {
return upstream.concatMapEagerDelayError(new Function<Integer, Observable<Integer>>() {
@Override
public Observable<Integer> apply(Integer v) throws Throwable {
return Observable.just(v).hide();
}
}, false);
}
});
}
@Test
public void undeliverableUponCancelDelayErrorTillEnd() {
TestHelper.checkUndeliverableUponCancel(new ObservableConverter<Integer, Observable<Integer>>() {
@Override
public Observable<Integer> apply(Observable<Integer> upstream) {
return upstream.concatMapEagerDelayError(new Function<Integer, Observable<Integer>>() {
@Override
public Observable<Integer> apply(Integer v) throws Throwable {
return Observable.just(v).hide();
}
}, true);
}
});
}
@Test
public void iterableDelayError() {
Observable.concatEagerDelayError(Arrays.asList(
Observable.range(1, 2),
Observable.error(new TestException()),
Observable.range(3, 3)
))
.test()
.assertFailure(TestException.class, 1, 2, 3, 4, 5);
}
@Test
public void iterableDelayErrorMaxConcurrency() {
Observable.concatEagerDelayError(Arrays.asList(
Observable.range(1, 2),
Observable.error(new TestException()),
Observable.range(3, 3)
), 1, 1)
.test()
.assertFailure(TestException.class, 1, 2, 3, 4, 5);
}
@Test
public void observerDelayError() {
Observable.concatEagerDelayError(Observable.fromArray(
Observable.range(1, 2),
Observable.error(new TestException()),
Observable.range(3, 3)
))
.test()
.assertFailure(TestException.class, 1, 2, 3, 4, 5);
}
@Test
public void observerDelayErrorMaxConcurrency() {
Observable.concatEagerDelayError(Observable.fromArray(
Observable.range(1, 2),
Observable.error(new TestException()),
Observable.range(3, 3)
), 1, 1)
.test()
.assertFailure(TestException.class, 1, 2, 3, 4, 5);
}
@Test
public void innerFusionRejected() {
Observable.just(1)
.hide()
.concatMapEager(v -> TestHelper.rejectObservableFusion())
.test()
.assertEmpty();
}
}
|
ObservableConcatMapEagerTest
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/store/BlockUploadStatistics.java
|
{
"start": 851,
"end": 1028
}
|
interface ____ {
/**
* A block has been allocated.
*/
void blockAllocated();
/**
* A block has been released.
*/
void blockReleased();
}
|
BlockUploadStatistics
|
java
|
hibernate__hibernate-orm
|
hibernate-envers/src/main/java/org/hibernate/envers/internal/tools/query/QueryBuilder.java
|
{
"start": 1446,
"end": 1554
}
|
class ____ incrementally building a HQL query.
*
* @author Adam Warski (adam at warski dot org)
*/
public
|
for
|
java
|
elastic__elasticsearch
|
x-pack/plugin/security/cli/src/test/java/org/elasticsearch/xpack/security/cli/CertificateGenerateToolTests.java
|
{
"start": 3430,
"end": 27627
}
|
class ____ extends ESTestCase {
private FileSystem jimfs;
private static final String CN_OID = "2.5.4.3";
private Path initTempDir() throws Exception {
Configuration conf = Configuration.unix().toBuilder().setAttributeViews("posix").build();
jimfs = Jimfs.newFileSystem(conf);
Path tempDir = jimfs.getPath("temp");
IOUtils.rm(tempDir);
Files.createDirectories(tempDir);
return tempDir;
}
@BeforeClass
public static void checkFipsJvm() {
assumeFalse("Can't run in a FIPS JVM, depends on Non FIPS BouncyCastle", inFipsJvm());
}
@After
public void tearDown() throws Exception {
IOUtils.close(jimfs);
super.tearDown();
}
public void testOutputDirectory() throws Exception {
Path outputDir = createTempDir();
Path outputFile = outputDir.resolve("certs.zip");
MockTerminal terminal = MockTerminal.create();
// test with a user provided dir
Path resolvedOutputFile = CertificateGenerateTool.getOutputFile(terminal, outputFile.toString(), null);
assertEquals(outputFile, resolvedOutputFile);
assertTrue(terminal.getOutput().isEmpty());
// test without a user provided directory
Path userPromptedOutputFile = outputDir.resolve("csr");
assertFalse(Files.exists(userPromptedOutputFile));
terminal.addTextInput(userPromptedOutputFile.toString());
resolvedOutputFile = CertificateGenerateTool.getOutputFile(terminal, null, "out.zip");
assertEquals(userPromptedOutputFile, resolvedOutputFile);
assertTrue(terminal.getOutput().isEmpty());
// test with empty user input
String defaultFilename = randomAlphaOfLengthBetween(1, 10);
Path expectedDefaultPath = resolvePath(defaultFilename);
terminal.addTextInput("");
resolvedOutputFile = CertificateGenerateTool.getOutputFile(terminal, null, defaultFilename);
assertEquals(expectedDefaultPath, resolvedOutputFile);
assertTrue(terminal.getOutput().isEmpty());
}
public void testPromptingForInstanceInformation() throws Exception {
final int numberOfInstances = scaledRandomIntBetween(1, 12);
Map<String, Map<String, String>> instanceInput = Maps.newMapWithExpectedSize(numberOfInstances);
for (int i = 0; i < numberOfInstances; i++) {
final String name;
while (true) {
String randomName = getValidRandomInstanceName();
if (instanceInput.containsKey(randomName) == false) {
name = randomName;
break;
}
}
Map<String, String> instanceInfo = new HashMap<>();
instanceInput.put(name, instanceInfo);
instanceInfo.put("ip", randomFrom("127.0.0.1", "::1", "192.168.1.1,::1", ""));
instanceInfo.put("dns", randomFrom("localhost", "localhost.localdomain", "localhost,myhost", ""));
logger.info("instance [{}] name [{}] [{}]", i, name, instanceInfo);
}
int count = 0;
MockTerminal terminal = MockTerminal.create();
for (Entry<String, Map<String, String>> entry : instanceInput.entrySet()) {
terminal.addTextInput(entry.getKey());
terminal.addTextInput("");
terminal.addTextInput(entry.getValue().get("ip"));
terminal.addTextInput(entry.getValue().get("dns"));
count++;
if (count == numberOfInstances) {
terminal.addTextInput("n");
} else {
terminal.addTextInput("y");
}
}
Collection<CertificateInformation> certInfos = CertificateGenerateTool.getCertificateInformationList(terminal, null);
logger.info("certificate tool output:\n{}", terminal.getOutput());
assertEquals(numberOfInstances, certInfos.size());
for (CertificateInformation certInfo : certInfos) {
String name = certInfo.name.originalName;
Map<String, String> instanceInfo = instanceInput.get(name);
assertNotNull("did not find map for " + name, instanceInfo);
List<String> expectedIps = Arrays.asList(Strings.commaDelimitedListToStringArray(instanceInfo.get("ip")));
List<String> expectedDns = Arrays.asList(Strings.commaDelimitedListToStringArray(instanceInfo.get("dns")));
assertEquals(expectedIps, certInfo.ipAddresses);
assertEquals(expectedDns, certInfo.dnsNames);
instanceInput.remove(name);
}
assertEquals(0, instanceInput.size());
final String output = terminal.getOutput();
assertTrue("Output: " + output, output.isEmpty());
}
public void testParsingFile() throws Exception {
Path tempDir = initTempDir();
Path instanceFile = writeInstancesTo(tempDir.resolve("instances.yml"));
Collection<CertificateInformation> certInfos = CertificateGenerateTool.parseFile(instanceFile);
assertEquals(4, certInfos.size());
Map<String, CertificateInformation> certInfosMap = certInfos.stream()
.collect(Collectors.toMap((c) -> c.name.originalName, Function.identity()));
CertificateInformation certInfo = certInfosMap.get("node1");
assertEquals(Collections.singletonList("127.0.0.1"), certInfo.ipAddresses);
assertEquals(Collections.singletonList("localhost"), certInfo.dnsNames);
assertEquals(Collections.emptyList(), certInfo.commonNames);
assertEquals("node1", certInfo.name.filename);
certInfo = certInfosMap.get("node2");
assertEquals(Collections.singletonList("::1"), certInfo.ipAddresses);
assertEquals(Collections.emptyList(), certInfo.dnsNames);
assertEquals(Collections.singletonList("node2.elasticsearch"), certInfo.commonNames);
assertEquals("node2", certInfo.name.filename);
certInfo = certInfosMap.get("node3");
assertEquals(Collections.emptyList(), certInfo.ipAddresses);
assertEquals(Collections.emptyList(), certInfo.dnsNames);
assertEquals(Collections.emptyList(), certInfo.commonNames);
assertEquals("node3", certInfo.name.filename);
certInfo = certInfosMap.get("CN=different value");
assertEquals(Collections.emptyList(), certInfo.ipAddresses);
assertEquals(Collections.singletonList("node4.mydomain.com"), certInfo.dnsNames);
assertEquals(Collections.emptyList(), certInfo.commonNames);
assertEquals("different file", certInfo.name.filename);
}
public void testGeneratingCsr() throws Exception {
Path tempDir = initTempDir();
Path outputFile = tempDir.resolve("out.zip");
Path instanceFile = writeInstancesTo(tempDir.resolve("instances.yml"));
Collection<CertificateInformation> certInfos = CertificateGenerateTool.parseFile(instanceFile);
assertEquals(4, certInfos.size());
assertFalse(Files.exists(outputFile));
CertificateGenerateTool.generateAndWriteCsrs(outputFile, certInfos, randomFrom(1024, 2048));
assertTrue(Files.exists(outputFile));
Set<PosixFilePermission> perms = Files.getPosixFilePermissions(outputFile);
assertTrue(perms.toString(), perms.contains(PosixFilePermission.OWNER_READ));
assertTrue(perms.toString(), perms.contains(PosixFilePermission.OWNER_WRITE));
assertEquals(perms.toString(), 2, perms.size());
FileSystem fileSystem = FileSystems.newFileSystem(new URI("jar:" + outputFile.toUri()), Collections.emptyMap());
Path zipRoot = fileSystem.getPath("/");
assertFalse(Files.exists(zipRoot.resolve("ca")));
for (CertificateInformation certInfo : certInfos) {
String filename = certInfo.name.filename;
assertTrue(Files.exists(zipRoot.resolve(filename)));
final Path csr = zipRoot.resolve(filename + "/" + filename + ".csr");
assertTrue(Files.exists(csr));
assertTrue(Files.exists(zipRoot.resolve(filename + "/" + filename + ".key")));
PKCS10CertificationRequest request = readCertificateRequest(csr);
assertEquals(certInfo.name.x500Principal.getName(), request.getSubject().toString());
Attribute[] extensionsReq = request.getAttributes(PKCSObjectIdentifiers.pkcs_9_at_extensionRequest);
if (certInfo.ipAddresses.size() > 0 || certInfo.dnsNames.size() > 0) {
assertEquals(1, extensionsReq.length);
Extensions extensions = Extensions.getInstance(extensionsReq[0].getAttributeValues()[0]);
GeneralNames subjAltNames = GeneralNames.fromExtensions(extensions, Extension.subjectAlternativeName);
assertSubjAltNames(subjAltNames, certInfo);
} else {
assertEquals(0, extensionsReq.length);
}
}
}
public void testGeneratingSignedCertificates() throws Exception {
Path tempDir = initTempDir();
Path outputFile = tempDir.resolve("out.zip");
Path instanceFile = writeInstancesTo(tempDir.resolve("instances.yml"));
Collection<CertificateInformation> certInfos = CertificateGenerateTool.parseFile(instanceFile);
assertEquals(4, certInfos.size());
final int keysize = randomFrom(1024, 2048);
final int days = randomIntBetween(1, 1024);
KeyPair keyPair = CertGenUtils.generateKeyPair(keysize);
X509Certificate caCert = CertGenUtils.generateCACertificate(new X500Principal("CN=test ca"), keyPair, days, null);
final boolean generatedCa = randomBoolean();
final char[] keyPassword = randomBoolean() ? SecuritySettingsSourceField.TEST_PASSWORD.toCharArray() : null;
final char[] pkcs12Password = randomBoolean() ? randomAlphaOfLengthBetween(1, 12).toCharArray() : null;
assertFalse(Files.exists(outputFile));
CAInfo caInfo = new CAInfo(caCert, keyPair.getPrivate(), generatedCa, keyPassword);
CertificateGenerateTool.generateAndWriteSignedCertificates(outputFile, certInfos, caInfo, keysize, days, pkcs12Password);
assertTrue(Files.exists(outputFile));
Set<PosixFilePermission> perms = Files.getPosixFilePermissions(outputFile);
assertTrue(perms.toString(), perms.contains(PosixFilePermission.OWNER_READ));
assertTrue(perms.toString(), perms.contains(PosixFilePermission.OWNER_WRITE));
assertEquals(perms.toString(), 2, perms.size());
FileSystem fileSystem = FileSystems.newFileSystem(new URI("jar:" + outputFile.toUri()), Collections.emptyMap());
Path zipRoot = fileSystem.getPath("/");
if (generatedCa) {
assertTrue(Files.exists(zipRoot.resolve("ca")));
assertTrue(Files.exists(zipRoot.resolve("ca").resolve("ca.crt")));
assertTrue(Files.exists(zipRoot.resolve("ca").resolve("ca.key")));
// check the CA cert
try (InputStream input = Files.newInputStream(zipRoot.resolve("ca").resolve("ca.crt"))) {
X509Certificate parsedCaCert = readX509Certificate(input);
assertThat(parsedCaCert.getSubjectX500Principal().getName(), containsString("test ca"));
assertEquals(caCert, parsedCaCert);
long daysBetween = ChronoUnit.DAYS.between(caCert.getNotBefore().toInstant(), caCert.getNotAfter().toInstant());
assertEquals(days, (int) daysBetween);
}
// check the CA key
if (keyPassword != null) {
try (Reader reader = Files.newBufferedReader(zipRoot.resolve("ca").resolve("ca.key"))) {
PEMParser pemParser = new PEMParser(reader);
Object parsed = pemParser.readObject();
assertThat(parsed, instanceOf(PEMEncryptedKeyPair.class));
char[] zeroChars = new char[keyPassword.length];
Arrays.fill(zeroChars, (char) 0);
assertArrayEquals(zeroChars, keyPassword);
}
}
PrivateKey privateKey = PemUtils.readPrivateKey(
zipRoot.resolve("ca").resolve("ca.key"),
() -> keyPassword != null ? SecuritySettingsSourceField.TEST_PASSWORD.toCharArray() : null
);
assertEquals(caInfo.privateKey, privateKey);
} else {
assertFalse(Files.exists(zipRoot.resolve("ca")));
}
for (CertificateInformation certInfo : certInfos) {
String filename = certInfo.name.filename;
assertTrue(Files.exists(zipRoot.resolve(filename)));
final Path cert = zipRoot.resolve(filename + "/" + filename + ".crt");
assertTrue(Files.exists(cert));
assertTrue(Files.exists(zipRoot.resolve(filename + "/" + filename + ".key")));
final Path p12 = zipRoot.resolve(filename + "/" + filename + ".p12");
try (InputStream input = Files.newInputStream(cert)) {
X509Certificate certificate = readX509Certificate(input);
assertEquals(certInfo.name.x500Principal.toString(), certificate.getSubjectX500Principal().getName());
final int sanCount = certInfo.ipAddresses.size() + certInfo.dnsNames.size() + certInfo.commonNames.size();
if (sanCount == 0) {
assertNull(certificate.getSubjectAlternativeNames());
} else {
X509CertificateHolder x509CertHolder = new X509CertificateHolder(certificate.getEncoded());
GeneralNames subjAltNames = GeneralNames.fromExtensions(
x509CertHolder.getExtensions(),
Extension.subjectAlternativeName
);
assertSubjAltNames(subjAltNames, certInfo);
}
if (pkcs12Password != null) {
assertThat(p12, pathExists());
try (InputStream in = Files.newInputStream(p12)) {
final KeyStore ks = KeyStore.getInstance("PKCS12");
ks.load(in, pkcs12Password);
final Certificate p12Certificate = ks.getCertificate(certInfo.name.originalName);
assertThat("Certificate " + certInfo.name, p12Certificate, notNullValue());
assertThat(p12Certificate, equalTo(certificate));
final Key key = ks.getKey(certInfo.name.originalName, pkcs12Password);
assertThat(key, notNullValue());
}
} else {
assertThat(p12, not(pathExists()));
}
}
}
}
public void testGetCAInfo() throws Exception {
Environment env = TestEnvironment.newEnvironment(Settings.builder().put("path.home", createTempDir()).build());
Path testNodeCertPath = getDataPath("/org/elasticsearch/xpack/security/cli/testnode.crt");
Path testNodeKeyPath = getDataPath("/org/elasticsearch/xpack/security/cli/testnode.pem");
final boolean passwordPrompt = randomBoolean();
MockTerminal terminal = MockTerminal.create();
if (passwordPrompt) {
terminal.addSecretInput("testnode");
}
final int days = randomIntBetween(1, 1024);
CAInfo caInfo = CertificateGenerateTool.getCAInfo(
terminal,
"CN=foo",
testNodeCertPath.toString(),
testNodeKeyPath.toString(),
passwordPrompt ? null : "testnode".toCharArray(),
passwordPrompt,
env,
randomFrom(1024, 2048),
days
);
assertTrue(terminal.getOutput().isEmpty());
assertEquals(caInfo.caCert.getSubjectX500Principal().getName(), "CN=Elasticsearch Test Node,OU=elasticsearch,O=org");
assertThat(caInfo.privateKey.getAlgorithm(), containsString("RSA"));
assertEquals(2048, ((RSAKey) caInfo.privateKey).getModulus().bitLength());
assertFalse(caInfo.generated);
long daysBetween = ChronoUnit.DAYS.between(caInfo.caCert.getNotBefore().toInstant(), caInfo.caCert.getNotAfter().toInstant());
assertEquals(1460L, daysBetween);
// test generation
final boolean passwordProtected = randomBoolean();
final char[] password;
if (passwordPrompt && passwordProtected) {
password = null;
terminal.addSecretInput("testnode");
} else {
password = "testnode".toCharArray();
}
final int keysize = randomFrom(1024, 2048);
caInfo = CertificateGenerateTool.getCAInfo(
terminal,
"CN=foo bar",
null,
null,
password,
passwordProtected && passwordPrompt,
env,
keysize,
days
);
assertTrue(terminal.getOutput().isEmpty());
assertThat(caInfo.caCert, instanceOf(X509Certificate.class));
assertEquals(caInfo.caCert.getSubjectX500Principal().getName(), "CN=foo bar");
assertThat(caInfo.privateKey.getAlgorithm(), containsString("RSA"));
assertTrue(caInfo.generated);
assertEquals(keysize, ((RSAKey) caInfo.privateKey).getModulus().bitLength());
daysBetween = ChronoUnit.DAYS.between(caInfo.caCert.getNotBefore().toInstant(), caInfo.caCert.getNotAfter().toInstant());
assertEquals(days, (int) daysBetween);
}
public void testNameValues() throws Exception {
// good name
Name name = Name.fromUserProvidedName("my instance", "my instance");
assertEquals("my instance", name.originalName);
assertNull(name.error);
assertEquals("CN=my instance", name.x500Principal.getName());
assertEquals("my instance", name.filename);
// too long
String userProvidedName = randomAlphaOfLength(CertificateGenerateTool.MAX_FILENAME_LENGTH + 1);
name = Name.fromUserProvidedName(userProvidedName, userProvidedName);
assertEquals(userProvidedName, name.originalName);
assertThat(name.error, containsString("valid filename"));
// too short
name = Name.fromUserProvidedName("", "");
assertEquals("", name.originalName);
assertThat(name.error, containsString("valid filename"));
assertEquals("CN=", name.x500Principal.getName());
assertNull(name.filename);
// invalid characters only
userProvidedName = "<>|<>*|?\"\\";
name = Name.fromUserProvidedName(userProvidedName, userProvidedName);
assertEquals(userProvidedName, name.originalName);
assertThat(name.error, containsString("valid DN"));
assertNull(name.x500Principal);
assertNull(name.filename);
// invalid for file but DN ok
userProvidedName = "*";
name = Name.fromUserProvidedName(userProvidedName, userProvidedName);
assertEquals(userProvidedName, name.originalName);
assertThat(name.error, containsString("valid filename"));
assertEquals("CN=" + userProvidedName, name.x500Principal.getName());
assertNull(name.filename);
// invalid with valid chars for filename
userProvidedName = "*.mydomain.com";
name = Name.fromUserProvidedName(userProvidedName, userProvidedName);
assertEquals(userProvidedName, name.originalName);
assertThat(name.error, containsString("valid filename"));
assertEquals("CN=" + userProvidedName, name.x500Principal.getName());
// valid but could create hidden file/dir so it is not allowed
userProvidedName = ".mydomain.com";
name = Name.fromUserProvidedName(userProvidedName, userProvidedName);
assertEquals(userProvidedName, name.originalName);
assertThat(name.error, containsString("valid filename"));
assertEquals("CN=" + userProvidedName, name.x500Principal.getName());
}
private PKCS10CertificationRequest readCertificateRequest(Path path) throws Exception {
try (Reader reader = Files.newBufferedReader(path); PEMParser pemParser = new PEMParser(reader)) {
Object object = pemParser.readObject();
assertThat(object, instanceOf(PKCS10CertificationRequest.class));
return (PKCS10CertificationRequest) object;
}
}
private X509Certificate readX509Certificate(InputStream input) throws Exception {
List<Certificate> list = CertParsingUtils.readCertificates(input);
assertEquals(1, list.size());
assertThat(list.get(0), instanceOf(X509Certificate.class));
return (X509Certificate) list.get(0);
}
private void assertSubjAltNames(GeneralNames subjAltNames, CertificateInformation certInfo) throws Exception {
final int expectedCount = certInfo.ipAddresses.size() + certInfo.dnsNames.size() + certInfo.commonNames.size();
assertEquals(expectedCount, subjAltNames.getNames().length);
Collections.sort(certInfo.dnsNames);
Collections.sort(certInfo.ipAddresses);
for (GeneralName generalName : subjAltNames.getNames()) {
if (generalName.getTagNo() == GeneralName.dNSName) {
String dns = ((ASN1String) generalName.getName()).getString();
assertTrue(certInfo.dnsNames.stream().anyMatch(dns::equals));
} else if (generalName.getTagNo() == GeneralName.iPAddress) {
byte[] ipBytes = DEROctetString.getInstance(generalName.getName()).getOctets();
String ip = NetworkAddress.format(InetAddress.getByAddress(ipBytes));
assertTrue(certInfo.ipAddresses.stream().anyMatch(ip::equals));
} else if (generalName.getTagNo() == GeneralName.otherName) {
ASN1Sequence seq = ASN1Sequence.getInstance(generalName.getName());
assertThat(seq.size(), equalTo(2));
assertThat(seq.getObjectAt(0), instanceOf(ASN1ObjectIdentifier.class));
assertThat(seq.getObjectAt(0).toString(), equalTo(CN_OID));
assertThat(seq.getObjectAt(1), instanceOf(DLTaggedObject.class));
DLTaggedObject taggedName = (DLTaggedObject) seq.getObjectAt(1);
assertThat(taggedName.getTagNo(), equalTo(0));
assertThat(taggedName.getBaseObject(), instanceOf(ASN1String.class));
assertThat(taggedName.getBaseObject().toString(), is(in(certInfo.commonNames)));
} else {
fail("unknown general name with tag " + generalName.getTagNo());
}
}
}
/**
* Gets a random name that is valid for certificate generation. There are some cases where the random value could match one of the
* reserved names like ca, so this method allows us to avoid these issues.
*/
private String getValidRandomInstanceName() {
String name;
boolean valid;
do {
name = randomAlphaOfLengthBetween(1, 32);
valid = Name.fromUserProvidedName(name, name).error == null;
} while (valid == false);
return name;
}
/**
* Writes the description of instances to a given {@link Path}
*/
private Path writeInstancesTo(Path path) throws IOException {
String instances = """
instances:
- name: "node1"
ip:
- "127.0.0.1"
dns: "localhost"
- name: "node2"
filename: "node2"
ip: "::1"
cn:
- "node2.elasticsearch"
- name: "node3"
filename: "node3"
- name: "CN=different value"
filename: "different file"
dns:
- "node4.mydomain.com"
""";
return Files.writeString(path, instances);
}
@SuppressForbidden(reason = "resolve paths against CWD for a CLI tool")
private static Path resolvePath(String path) {
return PathUtils.get(path).toAbsolutePath();
}
}
|
CertificateGenerateToolTests
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/AnalyticsProcess.java
|
{
"start": 478,
"end": 1301
}
|
interface ____<ProcessResult> extends NativeProcess {
/**
* Writes a control message that informs the process
* all data has been sent
* @throws IOException If an error occurs writing to the process
*/
void writeEndOfDataMessage() throws IOException;
/**
* @return stream of data frame analytics results.
*/
Iterator<ProcessResult> readAnalyticsResults();
/**
*
* @return the process config
*/
AnalyticsProcessConfig getConfig();
/**
* Restores the model state from a previously persisted one
* @param client the client to use for fetching the state documents
* @param stateDocIdPrefix the prefix of ids of the state documents
*/
void restoreState(Client client, String stateDocIdPrefix) throws IOException;
}
|
AnalyticsProcess
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest/spi-deployment/src/main/java/io/quarkus/resteasy/reactive/server/spi/HandlerConfigurationProviderBuildItem.java
|
{
"start": 818,
"end": 1501
}
|
class ____ extends MultiBuildItem {
/**
* The runtime configuration class
*/
private final Class configClass;
/**
* A supplier of the runtime value of the configuration class.
* This supplier is meant to be provided by a recorder
*/
private final Supplier valueSupplier;
public HandlerConfigurationProviderBuildItem(Class configClass, Supplier valueSupplier) {
this.configClass = configClass;
this.valueSupplier = valueSupplier;
}
public Class getConfigClass() {
return configClass;
}
public Supplier getValueSupplier() {
return valueSupplier;
}
}
|
HandlerConfigurationProviderBuildItem
|
java
|
assertj__assertj-core
|
assertj-core/src/main/java/org/assertj/core/api/ThrowableAssertAlternative.java
|
{
"start": 1382,
"end": 31816
}
|
class ____<ACTUAL extends Throwable>
extends AbstractObjectAssert<ThrowableAssertAlternative<ACTUAL>, ACTUAL> {
private final ThrowableAssert<ACTUAL> delegate;
public ThrowableAssertAlternative(final ACTUAL actual) {
super(actual, ThrowableAssertAlternative.class);
delegate = new ThrowableAssert<>(actual);
}
protected ThrowableAssert<ACTUAL> getDelegate() {
return delegate;
}
@Override
public ThrowableAssertAlternative<ACTUAL> as(Description description) {
return super.as(description);
}
/**
* Verifies that the message of the actual {@code Throwable} is equal to the given one.
* <p>
* Examples:
* <pre><code class='java'> Throwable illegalArgumentException = new IllegalArgumentException("wrong amount 123");
*
* // assertion succeeds:
* assertThatExceptionOfType(Throwable.class)
* .isThrownBy(() -> {throw illegalArgumentException;})
* .withMessage("wrong amount 123");
*
* // assertion fails:
* assertThatExceptionOfType(Throwable.class)
* .isThrownBy(() -> {throw illegalArgumentException;})
* .withMessage("wrong amount 123 euros");</code></pre>
*
* @param message the expected message.
* @return this assertion object.
* @throws AssertionError if the actual {@code Throwable} is {@code null}.
* @throws AssertionError if the message of the actual {@code Throwable} is not equal to the given one.
* @see AbstractThrowableAssert#hasMessage(String)
*/
public ThrowableAssertAlternative<ACTUAL> withMessage(String message) {
getDelegate().hasMessage(message);
return myself;
}
/**
* Verifies that the message of the actual {@code Throwable} is equal to the given one built using {@link String#format(String, Object...)} syntax.
* <p>
* Examples:
* <pre><code class='java'> Throwable illegalArgumentException = new IllegalArgumentException("wrong amount 123");
*
* // assertion succeeds:
* assertThatExceptionOfType(Throwable.class)
* .isThrownBy(() -> {throw illegalArgumentException;})
* .withMessage("wrong amount %s, "123");
*
* // assertion fails:
* assertThatExceptionOfType(Throwable.class)
* .isThrownBy(() -> {throw illegalArgumentException;})
* .withMessage("wrong amount 123 euros");</code></pre>
*
* @param message a format string representing the expected message
* @param parameters argument referenced by the format specifiers in the format string
* @return this assertion object.
* @throws AssertionError if the actual {@code Throwable} is {@code null}.
* @throws AssertionError if the message of the actual {@code Throwable} is not equal to the given one.
* @see AbstractThrowableAssert#hasMessage(String)
*/
public ThrowableAssertAlternative<ACTUAL> withMessage(String message, Object... parameters) {
getDelegate().hasMessage(message, parameters);
return myself;
}
/**
* Verifies that the actual {@code Throwable} has a cause similar to the given one, that is with same type and message
* (it does not use {@link Throwable#equals(Object) equals} method for comparison).
* <p>
* Example:
* <pre><code class='java'> Throwable illegalArgumentException = new IllegalArgumentException("invalid arg");
* Throwable wrappingException = new Throwable(illegalArgumentException);
*
* // This assertion succeeds:
*
* assertThatExceptionOfType(Throwable.class)
* .isThrownBy(() -> {throw wrappingException;})
* .withCause(illegalArgumentException);
*
* // These assertions fail:
*
* assertThatExceptionOfType(Throwable.class)
* .isThrownBy(() -> {throw wrappingException;})
* .withCause(new IllegalArgumentException("bad arg"));
*
* assertThatExceptionOfType(Throwable.class)
* .isThrownBy(() -> {throw wrappingException;})
* .withCause(new NullPointerException());
*
* assertThatExceptionOfType(Throwable.class)
* .isThrownBy(() -> {throw wrappingException;})
* .withCause(null);</code></pre>
*
* @param cause the expected cause.
* @return this assertion object.
* @throws AssertionError if the actual {@code Throwable} is {@code null}.
* @throws AssertionError if the actual {@code Throwable} has not the given cause.
* @see AbstractThrowableAssert#hasCause(Throwable)
*/
public ThrowableAssertAlternative<ACTUAL> withCause(Throwable cause) {
getDelegate().hasCause(cause);
return myself;
}
/**
* Verifies that the actual {@code Throwable} does not have a cause.
* <p>
* Example:
* <pre><code class='java'> IllegalArgumentException exception = new IllegalArgumentException();
*
* // This assertion succeeds:
* assertThatExceptionOfType(IllegalArgumentException.class)
* .isThrownBy(() -> {throw exception;})
* .withNoCause();
*
* // These assertion fails:
* Throwable illegalArgumentException = new Throwable(exception);
* assertThatExceptionOfType(Throwable.class)
* .isThrownBy(() -> {throw illegalArgumentException;})
* .withNoCause();</code></pre>
*
* @return this assertion object.
* @throws AssertionError if the actual {@code Throwable} is {@code null}.
* @throws AssertionError if the actual {@code Throwable} has a cause.
* @see AbstractThrowableAssert#hasNoCause()
*/
public ThrowableAssertAlternative<ACTUAL> withNoCause() {
getDelegate().hasNoCause();
return myself;
}
/**
* Verifies that the message of the actual {@code Throwable} starts with the given description.
* <p>
* Examples:
* <pre><code class='java'> Throwable illegalArgumentException = new IllegalArgumentException("wrong amount 123");
*
* // assertion succeeds:
* assertThatExceptionOfType(Throwable.class)
* .isThrownBy(() -> {throw illegalArgumentException;})
* .withMessageStartingWith("wrong amount");
*
* // assertion fails:
* assertThatExceptionOfType(Throwable.class)
* .isThrownBy(() -> {throw illegalArgumentException;})
* .withMessageStartingWith("right amount");</code></pre>
*
* @param description the description expected to start the actual {@code Throwable}'s message.
* @return this assertion object.
* @throws AssertionError if the actual {@code Throwable} is {@code null}.
* @throws AssertionError if the message of the actual {@code Throwable} does not start with the given description.
* @see AbstractThrowableAssert#hasMessageStartingWith(String)
*/
public ThrowableAssertAlternative<ACTUAL> withMessageStartingWith(String description) {
getDelegate().hasMessageStartingWith(description);
return myself;
}
/**
* Verifies that the message of the actual {@code Throwable} starts with the given description, after being formatted using
* the {@link String#format} method.
* <p>
* Examples:
* <pre><code class='java'> Throwable illegalArgumentException = new IllegalArgumentException("wrong amount 123");
*
* // assertion succeeds:
* assertThatExceptionOfType(Throwable.class)
* .isThrownBy(() -> {throw illegalArgumentException;})
* .withMessageStartingWith("%s amount", "wrong");
*
* // assertion fails:
* assertThatExceptionOfType(Throwable.class)
* .isThrownBy(() -> {throw illegalArgumentException;})
* .withMessageStartingWith("%s amount", "right");</code></pre>
*
* @param description the description expected to start the actual {@code Throwable}'s message.
* @param parameters argument referenced by the format specifiers in the format string
* @return this assertion object.
* @throws AssertionError if the actual {@code Throwable} is {@code null}.
* @throws AssertionError if the message of the actual {@code Throwable} does not start with the given description.
* @throws IllegalFormatException if the message contains an illegal syntax according to {@link String#format(String, Object...)}.
* @see AbstractThrowableAssert#hasMessageStartingWith(String, Object...)
*/
public ThrowableAssertAlternative<ACTUAL> withMessageStartingWith(String description, Object... parameters) {
getDelegate().hasMessageStartingWith(description, parameters);
return myself;
}
/**
* Verifies that the message of the actual {@code Throwable} contains the given description.
* <p>
* Examples:
* <pre><code class='java'> Throwable illegalArgumentException = new IllegalArgumentException("wrong amount 123");
*
* // assertion succeeds:
* assertThatExceptionOfType(Throwable.class)
* .isThrownBy(() -> {throw illegalArgumentException;})
* .withMessageContaining("amount");
*
* // assertion fails:
* assertThatExceptionOfType(Throwable.class)
* .isThrownBy(() -> {throw illegalArgumentException;})
* .withMessageContaining("456");</code></pre>
*
* @param description the description expected to be contained in the actual {@code Throwable}'s message.
* @return this assertion object.
* @throws AssertionError if the actual {@code Throwable} is {@code null}.
* @throws AssertionError if the message of the actual {@code Throwable} does not contain the given description.
* @see AbstractThrowableAssert#hasMessageContaining(String)
*/
public ThrowableAssertAlternative<ACTUAL> withMessageContaining(String description) {
getDelegate().hasMessageContaining(description);
return myself;
}
/**
* Verifies that the message of the actual {@code Throwable} contains all the given values.
* <p>
* Examples:
* <pre><code class='java'> Throwable illegalArgumentException = new IllegalArgumentException("wrong amount 123");
*
* // assertion succeeds:
* assertThatExceptionOfType(Throwable.class)
* .isThrownBy(() -> {throw illegalArgumentException;})
* .withMessageContainingAll("amount", "123");
*
* // assertion fails:
* assertThatExceptionOfType(Throwable.class)
* .isThrownBy(() -> {throw illegalArgumentException;})
* .withMessageContainingAll("456");</code></pre>
*
* @param values the Strings expected to be contained in the actual {@code Throwable}'s message.
* @return this assertion object.
* @throws AssertionError if the actual {@code Throwable} is {@code null}.
* @throws AssertionError if the message of the actual {@code Throwable} does not contain all the given values.
* @see AbstractThrowableAssert#hasMessageContainingAll(CharSequence...)
*/
public ThrowableAssertAlternative<ACTUAL> withMessageContainingAll(CharSequence... values) {
getDelegate().hasMessageContainingAll(values);
return myself;
}
/**
* Verifies that the message of the actual {@code Throwable} does not contain the given content or is null.
* <p>
* Examples:
* <pre><code class='java'> //assertions will pass
* assertThatExceptionOfType(Exception.class)
* .isThrownBy(codeThrowing(new Exception("boom")))
* .withMessageNotContaining("bam");
*
* assertThatExceptionOfType(Exception.class)
* .isThrownBy(codeThrowing(new Exception()))
* .withMessageNotContaining("bam");
*
* //assertion fails:
* assertThatExceptionOfType(Exception.class)
* .isThrownBy(codeThrowing(new Exception("boom")))
* .withMessageNotContaining("boom");</code></pre>
*
* @param content the content expected to not be contained in the actual {@code Throwable}'s message.
* @return this assertion object
* @throws AssertionError if the actual {@code Throwable} is {@code null}.
* @throws AssertionError if the message of the actual {@code Throwable} contains the given content.
* @see AbstractThrowableAssert#hasMessageNotContaining(String)
*/
public ThrowableAssertAlternative<ACTUAL> withMessageNotContaining(String content) {
getDelegate().hasMessageNotContaining(content);
return myself;
}
/**
* Verifies that the message of the actual {@code Throwable} does not contain any of the given values or is {@code null}.
* <p>
* Examples:
* <pre><code class='java'> //assertions will pass
* assertThatExceptionOfType(Exception.class)
* .isThrownBy(codeThrowing(new Exception("boom")))
* .withMessageNotContainingAny("bam");
*
* assertThatExceptionOfType(Exception.class)
* .isThrownBy(codeThrowing(new Exception()))
* .withMessageNotContainingAny("bam");
*
* // assertion fails:
* assertThatExceptionOfType(Exception.class)
* .isThrownBy(codeThrowing(new Exception("boom")))
* .withMessageNotContainingAny("bam", "boom");</code></pre>
*
* @param values the contents expected to not be contained in the actual {@code Throwable}'s message.
* @return this assertion object
* @throws AssertionError if the actual {@code Throwable} is {@code null}.
* @throws AssertionError if the message of the actual {@code Throwable} contains any of the given values.
* @see AbstractThrowableAssert#hasMessageNotContainingAny(CharSequence...)
*/
public ThrowableAssertAlternative<ACTUAL> withMessageNotContainingAny(CharSequence... values) {
getDelegate().hasMessageNotContainingAny(values);
return myself;
}
/**
* Verifies that the stack trace of the actual {@code Throwable} contains with the given description.
* <p>
* Examples:
* <pre><code class='java'> Throwable illegalArgumentException = new IllegalArgumentException("wrong amount 123");
*
* // assertion succeeds:
* assertThatExceptionOfType(Throwable.class)
* .isThrownBy(() -> {throw illegalArgumentException;})
* .withStackTraceContaining("amount");
*
* // assertion fails:
* assertThatExceptionOfType(Throwable.class)
* .isThrownBy(() -> {throw illegalArgumentException;})
* .withStackTraceContaining("456");</code></pre>
*
* @param description the description expected to be contained in the actual {@code Throwable}'s stack trace.
* @return this assertion object.
* @throws AssertionError if the actual {@code Throwable} is {@code null}.
* @throws AssertionError if the stack trace of the actual {@code Throwable} does not contain the given description.
* @see AbstractThrowableAssert#hasStackTraceContaining(String)
*/
public ThrowableAssertAlternative<ACTUAL> withStackTraceContaining(String description) {
getDelegate().hasStackTraceContaining(description);
return myself;
}
/**
* Verifies that the stack trace of the actual {@code Throwable} contains with the given description, after being formatted using
* the {@link String#format} method.
* <p>
* Examples:
* <pre><code class='java'> Throwable illegalArgumentException = new IllegalArgumentException("wrong amount 123");
*
* // assertion succeeds:
* assertThatExceptionOfType(Throwable.class)
* .isThrownBy(() -> {throw illegalArgumentException;})
* .withStackTraceContaining("%s", amount);
*
* // assertion fails:
* assertThatExceptionOfType(Throwable.class)
* .isThrownBy(() -> {throw illegalArgumentException;})
* .withStackTraceContaining("%d", 456);</code></pre>
*
* @param description the description expected to be contained in the actual {@code Throwable}'s stack trace.
* @param parameters argument referenced by the format specifiers in the format string
* @return this assertion object.
* @throws AssertionError if the actual {@code Throwable} is {@code null}.
* @throws AssertionError if the stack trace of the actual {@code Throwable} does not contain the given description.
* @throws IllegalFormatException if the message contains an illegal syntax according to {@link String#format(String, Object...)}.
* @see AbstractThrowableAssert#hasStackTraceContaining(String, Object...)
*/
public ThrowableAssertAlternative<ACTUAL> withStackTraceContaining(String description, Object... parameters) {
getDelegate().hasStackTraceContaining(description, parameters);
return myself;
}
/**
* Verifies that the message of the actual {@code Throwable} matches with the given regular expression.
* <p>
* Examples:
* <pre><code class='java'> Throwable illegalArgumentException = new IllegalArgumentException("wrong amount 123");
*
* // assertion succeeds:
* assertThatExceptionOfType(Throwable.class)
* .isThrownBy(() -> {throw illegalArgumentException;})
* .withMessageMatching("wrong amount [0-9]*");
*
* // assertion fails:
* assertThatExceptionOfType(Throwable.class)
* .isThrownBy(() -> {throw illegalArgumentException;})
* .withMessageMatching("wrong amount [0-9]* euros");</code></pre>
*
* @param regex the regular expression of value expected to be matched the actual {@code Throwable}'s message.
* @return this assertion object.
* @throws AssertionError if the actual {@code Throwable} is {@code null}.
* @throws AssertionError if the message of the actual {@code Throwable} does not match the given regular expression.
* @throws NullPointerException if the regex is null
* @see AbstractThrowableAssert#hasMessageMatching(String)
*/
public ThrowableAssertAlternative<ACTUAL> withMessageMatching(String regex) {
getDelegate().hasMessageMatching(regex);
return myself;
}
/**
* Verifies that the message of the actual {@code Throwable} ends with the given description.
* <p>
* Examples:
* <pre><code class='java'> Throwable illegalArgumentException = new IllegalArgumentException("wrong amount 123");
*
* // assertion succeeds:
* assertThatExceptionOfType(Throwable.class)
* .isThrownBy(() -> {throw illegalArgumentException;})
* .withMessageEndingWith("123");
*
* // assertion fails:
* assertThatExceptionOfType(Throwable.class)
* .isThrownBy(() -> {throw illegalArgumentException;})
* .withMessageEndingWith("456");</code></pre>
*
* @param description the description expected to end the actual {@code Throwable}'s message.
* @return this assertion object.
* @throws AssertionError if the actual {@code Throwable} is {@code null}.
* @throws AssertionError if the message of the actual {@code Throwable} does not end with the given description.
* @see AbstractThrowableAssert#hasMessageEndingWith(String)
*/
public ThrowableAssertAlternative<ACTUAL> withMessageEndingWith(String description) {
getDelegate().hasMessageEndingWith(description);
return myself;
}
/**
* Verifies that the message of the actual {@code Throwable} ends with the given description, after being formatted using
* the {@link String#format} method.
* <p>
* Examples:
* <pre><code class='java'> Throwable illegalArgumentException = new IllegalArgumentException("wrong amount 123");
*
* // assertion succeeds:
* assertThatExceptionOfType(Throwable.class)
* .isThrownBy(() -> {throw illegalArgumentException;})
* .withMessageEndingWith("%d", 123);
*
* // assertion fails:
* assertThatExceptionOfType(Throwable.class)
* .isThrownBy(() -> {throw illegalArgumentException;})
* .withMessageEndingWith("%d", 456);</code></pre>
*
* @param description the description expected to end the actual {@code Throwable}'s message.
* @param parameters argument referenced by the format specifiers in the format string
* @return this assertion object.
* @throws AssertionError if the actual {@code Throwable} is {@code null}.
* @throws AssertionError if the message of the actual {@code Throwable} does not end with the given description.
* @throws IllegalFormatException if the message contains an illegal syntax according to {@link String#format(String, Object...)}.
* @see AbstractThrowableAssert#hasMessageEndingWith(String, Object...)
*/
public ThrowableAssertAlternative<ACTUAL> withMessageEndingWith(String description, Object... parameters) {
getDelegate().hasMessageEndingWith(description, parameters);
return myself;
}
/**
* Verifies that the cause of the actual {@code Throwable} is an instance of the given type.
* <p>
* Example:
* <pre><code class='java'> Throwable throwable = new Throwable(new NullPointerException());
*
* // assertion succeeds:
* assertThatExceptionOfType(Throwable.class)
* .isThrownBy(() -> {throw throwable;})
* .withCauseInstanceOf(NullPointerException.class);
* assertThatExceptionOfType(Throwable.class)
* .isThrownBy(() -> {throw throwable;})
* .withCauseInstanceOf(RuntimeException.class);
*
* // assertion fails:
* assertThatExceptionOfType(Throwable.class)
* .isThrownBy(() -> {throw throwable;})
* .withCauseInstanceOf(IllegalArgumentException.class);</code></pre>
*
* @param type the expected cause type.
* @return this assertion object.
* @throws NullPointerException if given type is {@code null}.
* @throws AssertionError if the actual {@code Throwable} is {@code null}.
* @throws AssertionError if the actual {@code Throwable} has no cause.
* @throws AssertionError if the cause of the actual {@code Throwable} is not an instance of the given type.
* @see AbstractThrowableAssert#hasCauseInstanceOf(Class)
*/
public ThrowableAssertAlternative<ACTUAL> withCauseInstanceOf(Class<? extends Throwable> type) {
getDelegate().hasCauseInstanceOf(type);
return myself;
}
/**
* Verifies that the cause of the actual {@code Throwable} is <b>exactly</b> an instance of the given type.
* <p>
* Example:
* <pre><code class='java'> Throwable throwable = new Throwable(new NullPointerException());
*
* // assertion succeeds:
* assertThatExceptionOfType(Throwable.class)
* .isThrownBy(() -> {throw throwable;})
* .withCauseExactlyInstanceOf(NullPointerException.class);
*
* // assertions will fail (even if NullPointerException is a RuntimeException since we want an exact match)
* assertThatExceptionOfType(Throwable.class)
* .isThrownBy(() -> {throw throwable;})
* .withCauseExactlyInstanceOf(RuntimeException.class);
* assertThatExceptionOfType(Throwable.class)
* .isThrownBy(() -> {throw throwable;})
* .withCauseExactlyInstanceOf(IllegalArgumentException.class);</code></pre>
*
* @param type the expected cause type.
* @return this assertion object.
* @throws NullPointerException if given type is {@code null}.
* @throws AssertionError if the actual {@code Throwable} is {@code null}.
* @throws AssertionError if the actual {@code Throwable} has no cause.
* @throws AssertionError if the cause of the actual {@code Throwable} is not <b>exactly</b> an instance of the given
* type.
* @see AbstractThrowableAssert#hasCauseExactlyInstanceOf(Class)
*/
public ThrowableAssertAlternative<ACTUAL> withCauseExactlyInstanceOf(Class<? extends Throwable> type) {
getDelegate().hasCauseExactlyInstanceOf(type);
return myself;
}
/**
* Verifies that the root cause of the actual {@code Throwable} is an instance of the given type.
* <p>
* Example:
* <pre><code class='java'> Throwable throwable = new Throwable(
* new IllegalStateException(
* new NullPointerException()));
*
* // assertion succeeds:
* assertThatExceptionOfType(Throwable.class)
* .isThrownBy(() -> {throw throwable;})
* .withRootCauseInstanceOf(NullPointerException.class);
* assertThatExceptionOfType(Throwable.class)
* .isThrownBy(() -> {throw throwable;})
* .withRootCauseInstanceOf(RuntimeException.class);
*
* // assertion fails:
* assertThatExceptionOfType(Throwable.class)
* .isThrownBy(() -> {throw throwable;})
* .withRootCauseInstanceOf(IllegalStateException.class);</code></pre>
*
* @param type the expected cause type.
* @return this assertion object.
* @throws NullPointerException if given type is {@code null}.
* @throws AssertionError if the actual {@code Throwable} is {@code null}.
* @throws AssertionError if the actual {@code Throwable} has no cause.
* @throws AssertionError if the cause of the actual {@code Throwable} is not an instance of the given type.
* @see AbstractThrowableAssert#hasRootCauseInstanceOf(Class)
*/
public ThrowableAssertAlternative<ACTUAL> withRootCauseInstanceOf(Class<? extends Throwable> type) {
getDelegate().hasRootCauseInstanceOf(type);
return myself;
}
/**
* Verifies that the root cause of the actual {@code Throwable} is <b>exactly</b> an instance of the given type.
* <p>
* Example:
* <pre><code class='java'> Throwable throwable = new Throwable(
* new IllegalStateException(
* new NullPointerException()));
*
* // assertion succeeds:
* assertThatExceptionOfType(Throwable.class)
* .isThrownBy(() -> {throw throwable;})
* .withRootCauseExactlyInstanceOf(NullPointerException.class);
*
* // assertion fails (even if NullPointerException is a RuntimeException since we want an exact match)
* assertThatExceptionOfType(Throwable.class)
* .isThrownBy(() -> {throw throwable;})
* .withRootCauseExactlyInstanceOf(RuntimeException.class);
* assertThatExceptionOfType(Throwable.class)
* .isThrownBy(() -> {throw throwable;})
* .withRootCauseExactlyInstanceOf(IllegalStateException.class);</code></pre>
*
* @param type the expected cause type.
* @return this assertion object.
* @throws NullPointerException if given type is {@code null}.
* @throws AssertionError if the actual {@code Throwable} is {@code null}.
* @throws AssertionError if the actual {@code Throwable} has no cause.
* @throws AssertionError if the root cause of the actual {@code Throwable} is not <b>exactly</b> an instance of the
* given type.
* @see AbstractThrowableAssert#hasRootCauseExactlyInstanceOf(Class)
*/
public ThrowableAssertAlternative<ACTUAL> withRootCauseExactlyInstanceOf(Class<? extends Throwable> type) {
getDelegate().hasRootCauseExactlyInstanceOf(type);
return myself;
}
/** {@inheritDoc} */
@Override
@CheckReturnValue
public ThrowableAssertAlternative<ACTUAL> describedAs(String description, Object... args) {
getDelegate().describedAs(description, args);
return super.describedAs(description, args);
}
/** {@inheritDoc} */
@Override
@CheckReturnValue
public ThrowableAssertAlternative<ACTUAL> describedAs(Description description) {
getDelegate().describedAs(description);
return super.describedAs(description);
}
/**
* Checks if the actual {@link Throwable} has a cause and returns a new assertion object where the
* cause becomes the actual Throwable in order to further assert properties of the cause {@link Throwable}
*
* @return a new assertion object with the cause of the current actual becoming the new actual
* @throws AssertionError if the actual {@code Throwable} is {@code null}.
* @throws AssertionError if the actual {@code Throwable} has no cause.
*
* @since 3.16.0
*/
public ThrowableAssertAlternative<?> havingCause() {
AbstractThrowableAssert<?, ?> causeAssert = getDelegate().cause();
return new ThrowableAssertAlternative<>(causeAssert.actual);
}
/**
* Checks if the actual {@link Throwable} has a root cause and returns a new assertion object where the
* root cause becomes the actual Throwable in order to further assert properties of the cause {@link Throwable}
*
* @return a new assertion object with the root cause of the current actual becoming the new actual
* @throws AssertionError if the actual {@code Throwable} is {@code null}.
* @throws AssertionError if the actual {@code Throwable} has no root cause.
*
* @since 3.16.0
*/
public ThrowableAssertAlternative<?> havingRootCause() {
AbstractThrowableAssert<?, ?> rootCauseAssert = getDelegate().rootCause();
return new ThrowableAssertAlternative<>(rootCauseAssert.actual);
}
/**
* Returns a new assertion object that uses the suppressed exceptions of the current {@link Throwable} as the object under test.
* <p>
* As suppressed exceptions is a {@code Throwable[]}, you can chain any array assertions after {@code withSuppressedExceptionsThat()}.
* <p>
* You can navigate back to the current {@link Throwable} with {@link AbstractSuppressedExceptionsAssert#returnToInitialThrowable() returnToInitialThrowable()}.
* <p>
* Examples:
* <pre><code class='java'>var exception = new Exception("boom!");
* Throwable invalidArgException = new IllegalArgumentException("invalid argument");
* Throwable ioException = new IOException("IO error");
* exception.addSuppressed(invalidArgException);
* exception.addSuppressed(ioException);
*
* // these assertions succeed:
* assertThatException().isThrownBy(() -> { throw exception; })
* .withSuppressedExceptionsThat()
* .containsOnly(invalidArgException, ioException)
* .returnToInitialThrowable()
* .hasMessage("boom!");
*
* // this assertion fails:
* assertThatException().isThrownBy(() -> { throw exception; })
* .withSuppressedExceptionsThat()
* .isEmpty();</code></pre>
*
* @return a new assertion object
* @throws AssertionError if the actual {@code Throwable} is {@code null}.
* @since 4.0.0
*/
public AbstractSuppressedExceptionsAssert<ThrowableAssert<ACTUAL>, ACTUAL> withSuppressedExceptionsThat() {
return getDelegate().suppressedExceptions();
}
}
|
ThrowableAssertAlternative
|
java
|
apache__camel
|
components/camel-google/camel-google-pubsub-lite/src/test/java/org/apache/camel/component/google/pubsublite/GooglePubsubLiteProducerTest.java
|
{
"start": 1399,
"end": 4476
}
|
class ____ extends CamelTestSupport {
@Mock
private GooglePubsubLiteEndpoint mockEndpoint;
@Mock
private Exchange mockExchange;
@Mock
private Message mockMessage;
@Mock
private Publisher mockPublisher;
@Override
public void doPreSetup() {
MockitoAnnotations.initMocks(this);
}
@Test
public void testProcess() throws Exception {
GooglePubsubLiteProducer producer = new GooglePubsubLiteProducer(mockEndpoint);
String testPayload = "Test Payload";
when(mockExchange.getIn()).thenReturn(mockMessage);
when(mockEndpoint.getProjectId()).thenReturn(123456789012L);
when(mockEndpoint.getLocation()).thenReturn("europe-west3");
when(mockEndpoint.getDestinationName()).thenReturn("testDestination");
when(mockEndpoint.getComponent()).thenReturn(mock(GooglePubsubLiteComponent.class));
when(mockEndpoint.getComponent().getPublisher(any(), any())).thenReturn(mockPublisher);
when(mockExchange.getMessage()).thenReturn(mockMessage);
when(mockMessage.getBody()).thenReturn(testPayload.getBytes());
when(mockExchange.getMessage().getHeader(GooglePubsubLiteConstants.ATTRIBUTES, Map.class)).thenReturn(null);
when(mockExchange.getMessage().getHeader(GooglePubsubLiteConstants.ORDERING_KEY, String.class)).thenReturn(null);
when(mockPublisher.publish(any())).thenReturn(ApiFutures.immediateFuture("messageId"));
producer.process(mockExchange);
verify(mockPublisher, times(1)).publish(any());
}
@Test
public void testProcessException() throws ExecutionException {
GooglePubsubLiteProducer producer = new GooglePubsubLiteProducer(mockEndpoint);
String testPayload = "Test Payload";
when(mockEndpoint.getProjectId()).thenReturn(123456789012L);
when(mockEndpoint.getLocation()).thenReturn("europe-west3");
when(mockEndpoint.getDestinationName()).thenReturn("testDestination");
when(mockEndpoint.getComponent()).thenReturn(mock(GooglePubsubLiteComponent.class));
// Make getPublisher() throw an ExecutionException
when(mockEndpoint.getComponent().getPublisher(any(), any()))
.thenThrow(new ExecutionException("Test exception", new Throwable()));
when(mockExchange.getIn()).thenReturn(mockMessage);
when(mockMessage.getBody()).thenReturn(testPayload.getBytes());
when(mockExchange.getIn().getHeader(GooglePubsubLiteConstants.ATTRIBUTES, Map.class)).thenReturn(null);
when(mockExchange.getIn().getHeader(GooglePubsubLiteConstants.ORDERING_KEY, String.class)).thenReturn(null);
assertThrows(ExecutionException.class, () -> producer.process(mockExchange));
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("direct:start")
.to("google-pubsub-lite:123456789012:europe-west3:test");
}
};
}
}
|
GooglePubsubLiteProducerTest
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/mixins/TestMixinDeserForClass.java
|
{
"start": 827,
"end": 965
}
|
class ____
extends BaseClass { }
@JsonAutoDetect(setterVisibility=Visibility.NONE, fieldVisibility=Visibility.NONE)
|
LeafClass
|
java
|
apache__flink
|
flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/functions/scalar/UrlEncodeFunction.java
|
{
"start": 1360,
"end": 1940
}
|
class ____ extends BuiltInScalarFunction {
public UrlEncodeFunction(SpecializedFunction.SpecializedContext context) {
super(BuiltInFunctionDefinitions.URL_ENCODE, context);
}
public @Nullable StringData eval(StringData url) {
if (url == null) {
return null;
}
final Charset charset = StandardCharsets.UTF_8;
try {
return StringData.fromString(URLEncoder.encode(url.toString(), charset.name()));
} catch (UnsupportedEncodingException e) {
return null;
}
}
}
|
UrlEncodeFunction
|
java
|
spring-projects__spring-boot
|
module/spring-boot-webmvc-test/src/test/java/org/springframework/boot/webmvc/test/autoconfigure/mockmvc/WebMvcTestJacksonComponentIntegrationTests.java
|
{
"start": 1245,
"end": 1530
}
|
class ____ {
@Autowired
private MockMvcTester mvc;
@Test
void shouldFindJacksonComponent() {
assertThat(this.mvc.post().uri("/two/1234abcd")).hasStatusOk().bodyJson().isLenientlyEqualTo("""
{ "identifier": "1234abcd" }
""");
}
}
|
WebMvcTestJacksonComponentIntegrationTests
|
java
|
apache__flink
|
flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/operators/rank/RetractableTopNFunction.java
|
{
"start": 2349,
"end": 19892
}
|
class ____ extends AbstractSyncStateTopNFunction {
private static final long serialVersionUID = 1365312180599454480L;
private static final Logger LOG = LoggerFactory.getLogger(RetractableTopNFunction.class);
// Message to indicate the state is cleared because of ttl restriction. The message could be
// used to output to log.
private static final String STATE_CLEARED_WARN_MSG =
"The state is cleared because of state ttl. "
+ "This will result in incorrect result. You can increase the state ttl to avoid this.";
private final InternalTypeInfo<RowData> sortKeyType;
// flag to skip records with non-exist error instead to fail, true by default.
private final boolean lenient = true;
// a map state stores mapping from sort key to records list
private transient MapState<RowData, List<RowData>> dataState;
// a sorted map stores mapping from sort key to records count
private transient ValueState<SortedMap<RowData, Long>> treeMap;
// The util to compare two RowData equals to each other.
private GeneratedRecordEqualiser generatedEqualiser;
private RecordEqualiser equaliser;
private final ComparableRecordComparator serializableComparator;
private final TypeSerializer<RowData> inputRowSer;
public RetractableTopNFunction(
StateTtlConfig ttlConfig,
InternalTypeInfo<RowData> inputRowType,
ComparableRecordComparator comparableRecordComparator,
RowDataKeySelector sortKeySelector,
RankType rankType,
RankRange rankRange,
GeneratedRecordEqualiser generatedEqualiser,
boolean generateUpdateBefore,
boolean outputRankNumber) {
super(
ttlConfig,
inputRowType,
comparableRecordComparator.getGeneratedRecordComparator(),
sortKeySelector,
rankType,
rankRange,
generateUpdateBefore,
outputRankNumber);
this.sortKeyType = sortKeySelector.getProducedType();
this.serializableComparator = comparableRecordComparator;
this.generatedEqualiser = generatedEqualiser;
this.inputRowSer = inputRowType.createSerializer(new SerializerConfigImpl());
}
@Override
public void open(OpenContext openContext) throws Exception {
super.open(openContext);
// compile equaliser
equaliser = generatedEqualiser.newInstance(getRuntimeContext().getUserCodeClassLoader());
generatedEqualiser = null;
ListTypeInfo<RowData> valueTypeInfo = new ListTypeInfo<>(inputRowType);
MapStateDescriptor<RowData, List<RowData>> mapStateDescriptor =
new MapStateDescriptor<>("data-state", sortKeyType, valueTypeInfo);
if (ttlConfig.isEnabled()) {
mapStateDescriptor.enableTimeToLive(ttlConfig);
}
dataState = getRuntimeContext().getMapState(mapStateDescriptor);
ValueStateDescriptor<SortedMap<RowData, Long>> valueStateDescriptor =
new ValueStateDescriptor<>(
"sorted-map",
new SortedMapTypeInfo<>(
sortKeyType, BasicTypeInfo.LONG_TYPE_INFO, serializableComparator));
if (ttlConfig.isEnabled()) {
valueStateDescriptor.enableTimeToLive(ttlConfig);
}
treeMap = getRuntimeContext().getState(valueStateDescriptor);
}
@Override
public void processElement(RowData input, Context ctx, Collector<RowData> out)
throws Exception {
initRankEnd(input);
SortedMap<RowData, Long> sortedMap = treeMap.value();
if (sortedMap == null) {
sortedMap = new TreeMap<>(sortKeyComparator);
}
RowData sortKey = sortKeySelector.getKey(input);
boolean isAccumulate = RowDataUtil.isAccumulateMsg(input);
input.setRowKind(RowKind.INSERT); // erase row kind for further state accessing
if (isAccumulate) {
// update sortedMap
if (sortedMap.containsKey(sortKey)) {
sortedMap.put(sortKey, sortedMap.get(sortKey) + 1);
} else {
sortedMap.put(sortKey, 1L);
}
// emit
if (outputRankNumber || hasOffset()) {
// the without-number-algorithm can't handle topN with offset,
// so use the with-number-algorithm to handle offset
emitRecordsWithRowNumber(sortedMap, sortKey, input, out);
} else {
emitRecordsWithoutRowNumber(sortedMap, sortKey, input, out);
}
// update data state
List<RowData> inputs = dataState.get(sortKey);
if (inputs == null) {
// the sort key is never seen
inputs = new ArrayList<>();
}
inputs.add(input);
dataState.put(sortKey, inputs);
} else {
final boolean stateRemoved;
// emit updates first
if (outputRankNumber || hasOffset()) {
// the without-number-algorithm can't handle topN with offset,
// so use the with-number-algorithm to handle offset
stateRemoved = retractRecordWithRowNumber(sortedMap, sortKey, input, out);
} else {
stateRemoved = retractRecordWithoutRowNumber(sortedMap, sortKey, input, out);
}
// and then update sortedMap
if (sortedMap.containsKey(sortKey)) {
long count = sortedMap.get(sortKey) - 1;
if (count == 0) {
sortedMap.remove(sortKey);
} else {
sortedMap.put(sortKey, count);
}
} else {
stateStaledErrorHandle();
}
if (!stateRemoved) {
// the input record has not been removed from state
// should update the data state
List<RowData> inputs = dataState.get(sortKey);
if (inputs != null) {
// comparing record by equaliser
Iterator<RowData> inputsIter = inputs.iterator();
while (inputsIter.hasNext()) {
if (equaliser.equals(inputsIter.next(), input)) {
inputsIter.remove();
break;
}
}
if (inputs.isEmpty()) {
dataState.remove(sortKey);
} else {
dataState.put(sortKey, inputs);
}
}
}
}
treeMap.update(sortedMap);
}
// ------------- ROW_NUMBER-------------------------------
private void processStateStaled(Iterator<Map.Entry<RowData, Long>> sortedMapIterator)
throws RuntimeException {
// Sync with dataState first
sortedMapIterator.remove();
stateStaledErrorHandle();
}
/**
* Handle state staled error by configured lenient option. If option is true, warning log only,
* otherwise a {@link RuntimeException} will be thrown.
*/
private void stateStaledErrorHandle() {
// Skip the data if it's state is cleared because of state ttl.
if (lenient) {
LOG.warn(STATE_CLEARED_WARN_MSG);
} else {
throw new RuntimeException(STATE_CLEARED_WARN_MSG);
}
}
private void emitRecordsWithRowNumber(
SortedMap<RowData, Long> sortedMap,
RowData sortKey,
RowData inputRow,
Collector<RowData> out)
throws Exception {
Iterator<Map.Entry<RowData, Long>> iterator = sortedMap.entrySet().iterator();
long currentRank = 0L;
RowData currentRow = null;
boolean findsSortKey = false;
while (iterator.hasNext() && isInRankEnd(currentRank)) {
Map.Entry<RowData, Long> entry = iterator.next();
RowData key = entry.getKey();
if (!findsSortKey && key.equals(sortKey)) {
currentRank += entry.getValue();
currentRow = inputRow;
findsSortKey = true;
} else if (findsSortKey) {
List<RowData> inputs = dataState.get(key);
if (inputs == null) {
processStateStaled(iterator);
} else {
int i = 0;
while (i < inputs.size() && isInRankEnd(currentRank)) {
RowData prevRow = inputs.get(i);
collectUpdateBefore(out, prevRow, currentRank);
collectUpdateAfter(out, currentRow, currentRank);
currentRow = prevRow;
currentRank += 1;
i++;
}
}
} else {
currentRank += entry.getValue();
}
}
if (isInRankEnd(currentRank)) {
// there is no enough elements in Top-N, emit INSERT message for the new record.
collectInsert(out, currentRow, currentRank);
}
}
private void emitRecordsWithoutRowNumber(
SortedMap<RowData, Long> sortedMap,
RowData sortKey,
RowData inputRow,
Collector<RowData> out)
throws Exception {
Iterator<Map.Entry<RowData, Long>> iterator = sortedMap.entrySet().iterator();
long curRank = 0L;
boolean findsSortKey = false;
RowData toCollect = null;
RowData toDelete = null;
while (iterator.hasNext() && isInRankEnd(curRank)) {
Map.Entry<RowData, Long> entry = iterator.next();
RowData key = entry.getKey();
if (!findsSortKey && key.equals(sortKey)) {
curRank += entry.getValue();
if (isInRankRange(curRank)) {
toCollect = inputRow;
}
findsSortKey = true;
} else if (findsSortKey) {
List<RowData> inputs = dataState.get(key);
if (inputs == null) {
processStateStaled(iterator);
} else {
long count = entry.getValue();
// gets the rank of last record with same sortKey
long rankOfLastRecord = curRank + count;
// deletes the record if there is a record recently downgrades to Top-(N+1)
if (isInRankEnd(rankOfLastRecord)) {
curRank = rankOfLastRecord;
} else {
int index = Long.valueOf(rankEnd - curRank).intValue();
toDelete = inputs.get(index);
break;
}
}
} else {
curRank += entry.getValue();
}
}
if (toDelete != null) {
collectDelete(out, inputRowSer.copy(toDelete));
}
if (toCollect != null) {
collectInsert(out, inputRow);
}
}
/**
* Retract the input record and emit updated records. This works for outputting with row_number.
*
* @return true if the input record has been removed from {@link #dataState}.
*/
private boolean retractRecordWithRowNumber(
SortedMap<RowData, Long> sortedMap,
RowData sortKey,
RowData inputRow,
Collector<RowData> out)
throws Exception {
Iterator<Map.Entry<RowData, Long>> iterator = sortedMap.entrySet().iterator();
long currentRank = 0L;
RowData prevRow = null;
boolean findsSortKey = false;
while (iterator.hasNext() && isInRankEnd(currentRank)) {
Map.Entry<RowData, Long> entry = iterator.next();
RowData key = entry.getKey();
if (!findsSortKey && key.equals(sortKey)) {
List<RowData> inputs = dataState.get(key);
if (inputs == null) {
processStateStaled(iterator);
} else {
Iterator<RowData> inputIter = inputs.iterator();
while (inputIter.hasNext() && isInRankEnd(currentRank)) {
RowData currentRow = inputIter.next();
if (!findsSortKey && equaliser.equals(currentRow, inputRow)) {
prevRow = currentRow;
findsSortKey = true;
inputIter.remove();
} else if (findsSortKey) {
collectUpdateBefore(out, prevRow, currentRank);
collectUpdateAfter(out, currentRow, currentRank);
prevRow = currentRow;
}
currentRank += 1;
}
if (inputs.isEmpty()) {
dataState.remove(key);
} else {
dataState.put(key, inputs);
}
}
} else if (findsSortKey) {
List<RowData> inputs = dataState.get(key);
if (inputs == null) {
processStateStaled(iterator);
} else {
int i = 0;
while (i < inputs.size() && isInRankEnd(currentRank)) {
RowData currentRow = inputs.get(i);
collectUpdateBefore(out, prevRow, currentRank);
collectUpdateAfter(out, currentRow, currentRank);
prevRow = currentRow;
currentRank += 1;
i++;
}
}
} else {
currentRank += entry.getValue();
}
}
if (isInRankEnd(currentRank)) {
if (!findsSortKey && null == prevRow) {
stateStaledErrorHandle();
} else {
// there is no enough elements in Top-N, emit DELETE message for the retract record.
collectDelete(out, prevRow, currentRank);
}
}
return findsSortKey;
}
/**
* Retract the input record and emit updated records. This works for outputting without
* row_number.
*
* @return true if the input record has been removed from {@link #dataState}.
*/
private boolean retractRecordWithoutRowNumber(
SortedMap<RowData, Long> sortedMap,
RowData sortKey,
RowData inputRow,
Collector<RowData> out)
throws Exception {
Iterator<Map.Entry<RowData, Long>> iterator = sortedMap.entrySet().iterator();
long nextRank = 1L; // the next rank number, should be in the rank range
boolean findsSortKey = false;
while (iterator.hasNext() && isInRankEnd(nextRank)) {
Map.Entry<RowData, Long> entry = iterator.next();
RowData key = entry.getKey();
if (!findsSortKey && key.equals(sortKey)) {
List<RowData> inputs = dataState.get(key);
if (inputs == null) {
processStateStaled(iterator);
} else {
Iterator<RowData> inputIter = inputs.iterator();
while (inputIter.hasNext() && isInRankEnd(nextRank)) {
RowData prevRow = inputIter.next();
if (!findsSortKey && equaliser.equals(prevRow, inputRow)) {
collectDelete(out, prevRow, nextRank);
nextRank -= 1;
findsSortKey = true;
inputIter.remove();
} else if (findsSortKey) {
if (nextRank == rankEnd) {
collectInsert(out, prevRow, nextRank);
}
}
nextRank += 1;
}
if (inputs.isEmpty()) {
dataState.remove(key);
} else {
dataState.put(key, inputs);
}
}
} else if (findsSortKey) {
long count = entry.getValue();
// gets the rank of last record with same sortKey
long rankOfLastRecord = nextRank + count - 1;
if (rankOfLastRecord < rankEnd) {
nextRank = rankOfLastRecord + 1;
} else {
// sends the record if there is a record recently upgrades to Top-N
int index = Long.valueOf(rankEnd - nextRank).intValue();
List<RowData> inputs = dataState.get(key);
if (inputs == null) {
processStateStaled(iterator);
} else {
RowData toAdd = inputs.get(index);
collectInsert(out, toAdd);
break;
}
}
} else {
nextRank += entry.getValue();
}
}
return findsSortKey;
}
}
|
RetractableTopNFunction
|
java
|
apache__camel
|
tooling/maven/camel-package-maven-plugin/src/main/java/org/apache/camel/maven/packaging/GenerateTestConfigurerMojo.java
|
{
"start": 1751,
"end": 2363
}
|
class ____ extends AbstractGenerateConfigurerMojo {
/**
* The output directory for generated test java source code
*/
@Parameter(defaultValue = "${project.basedir}/src/test/java")
protected File sourcesOutputDir;
/**
* The output directory for generated test resource source code
*/
@Parameter(defaultValue = "${project.basedir}/src/test/resources")
protected File resourcesOutputDir;
/**
* To generate configurer for these test classes. The syntax is either <tt>fqn</tt> or </tt>fqn=targetFqn</tt>. This
* allows mapping source
|
GenerateTestConfigurerMojo
|
java
|
apache__flink
|
flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/generated/WatermarkGenerator.java
|
{
"start": 1115,
"end": 1682
}
|
class ____ extends AbstractRichFunction {
private static final long serialVersionUID = 1L;
/** Returns the timestamp for the current row. */
public abstract long extractTimestamp(RowData row) throws Exception;
/**
* Returns the watermark for the current row or null if no watermark should be generated.
*
* @param row The current row.
* @return The watermark for this row or null if no watermark should be generated.
*/
@Nullable
public abstract Long currentWatermark(RowData row) throws Exception;
}
|
WatermarkGenerator
|
java
|
apache__kafka
|
storage/src/test/java/org/apache/kafka/server/log/remote/metadata/storage/ClassLoaderAwareRemoteLogMetadataManagerTest.java
|
{
"start": 2396,
"end": 2448
}
|
class ____ extends ClassLoader {
}
}
|
DummyClassLoader
|
java
|
google__guava
|
android/guava-testlib/test/com/google/common/collect/testing/IteratorTesterTest.java
|
{
"start": 10088,
"end": 10580
}
|
class ____<E> implements Iterator<E> {
private final RuntimeException ex;
private ThrowingIterator(RuntimeException ex) {
this.ex = ex;
}
@Override
public boolean hasNext() {
// IteratorTester doesn't expect exceptions for hasNext().
return true;
}
@Override
public E next() {
ex.fillInStackTrace();
throw ex;
}
@Override
public void remove() {
ex.fillInStackTrace();
throw ex;
}
}
}
|
ThrowingIterator
|
java
|
spring-projects__spring-security
|
config/src/test/java/org/springframework/security/config/websocket/WebSocketMessageBrokerConfigTests.java
|
{
"start": 30017,
"end": 30230
}
|
class ____ implements ChannelInterceptor {
@Override
public Message<?> preSend(Message<?> message, MessageChannel channel) {
throw new UnsupportedOperationException("no");
}
}
static
|
ExceptingInterceptor
|
java
|
apache__avro
|
lang/java/protobuf/src/test/java/org/apache/avro/protobuf/multiplefiles/Foo.java
|
{
"start": 68146,
"end": 94129
}
|
enum ____ set.
* @return This builder for chaining.
*/
public Builder setEnum(org.apache.avro.protobuf.multiplefiles.A value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00008000;
enum_ = value.getNumber();
onChanged();
return this;
}
/**
* <code>optional .org.apache.avro.protobuf.multiplefiles.A enum = 16 [default = Z];</code>
*
* @return This builder for chaining.
*/
public Builder clearEnum() {
bitField0_ = (bitField0_ & ~0x00008000);
enum_ = 3;
onChanged();
return this;
}
private com.google.protobuf.Internal.IntList intArray_ = emptyIntList();
private void ensureIntArrayIsMutable() {
if (!intArray_.isModifiable()) {
intArray_ = makeMutableCopy(intArray_);
}
bitField0_ |= 0x00010000;
}
/**
* <pre>
* some repeated types
* </pre>
*
* <code>repeated int32 intArray = 17;</code>
*
* @return A list containing the intArray.
*/
public java.util.List<java.lang.Integer> getIntArrayList() {
intArray_.makeImmutable();
return intArray_;
}
/**
* <pre>
* some repeated types
* </pre>
*
* <code>repeated int32 intArray = 17;</code>
*
* @return The count of intArray.
*/
public int getIntArrayCount() {
return intArray_.size();
}
/**
* <pre>
* some repeated types
* </pre>
*
* <code>repeated int32 intArray = 17;</code>
*
* @param index The index of the element to return.
* @return The intArray at the given index.
*/
public int getIntArray(int index) {
return intArray_.getInt(index);
}
/**
* <pre>
* some repeated types
* </pre>
*
* <code>repeated int32 intArray = 17;</code>
*
* @param index The index to set the value at.
* @param value The intArray to set.
* @return This builder for chaining.
*/
public Builder setIntArray(int index, int value) {
ensureIntArrayIsMutable();
intArray_.setInt(index, value);
bitField0_ |= 0x00010000;
onChanged();
return this;
}
/**
* <pre>
* some repeated types
* </pre>
*
* <code>repeated int32 intArray = 17;</code>
*
* @param value The intArray to add.
* @return This builder for chaining.
*/
public Builder addIntArray(int value) {
ensureIntArrayIsMutable();
intArray_.addInt(value);
bitField0_ |= 0x00010000;
onChanged();
return this;
}
/**
* <pre>
* some repeated types
* </pre>
*
* <code>repeated int32 intArray = 17;</code>
*
* @param values The intArray to add.
* @return This builder for chaining.
*/
public Builder addAllIntArray(java.lang.Iterable<? extends java.lang.Integer> values) {
ensureIntArrayIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, intArray_);
bitField0_ |= 0x00010000;
onChanged();
return this;
}
/**
* <pre>
* some repeated types
* </pre>
*
* <code>repeated int32 intArray = 17;</code>
*
* @return This builder for chaining.
*/
public Builder clearIntArray() {
intArray_ = emptyIntList();
bitField0_ = (bitField0_ & ~0x00010000);
onChanged();
return this;
}
private java.util.List<org.apache.avro.protobuf.multiplefiles.Foo> fooArray_ = java.util.Collections.emptyList();
private void ensureFooArrayIsMutable() {
if (!((bitField0_ & 0x00020000) != 0)) {
fooArray_ = new java.util.ArrayList<org.apache.avro.protobuf.multiplefiles.Foo>(fooArray_);
bitField0_ |= 0x00020000;
}
}
private com.google.protobuf.RepeatedFieldBuilder<org.apache.avro.protobuf.multiplefiles.Foo, org.apache.avro.protobuf.multiplefiles.Foo.Builder, org.apache.avro.protobuf.multiplefiles.FooOrBuilder> fooArrayBuilder_;
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.Foo fooArray = 20;</code>
*/
public java.util.List<org.apache.avro.protobuf.multiplefiles.Foo> getFooArrayList() {
if (fooArrayBuilder_ == null) {
return java.util.Collections.unmodifiableList(fooArray_);
} else {
return fooArrayBuilder_.getMessageList();
}
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.Foo fooArray = 20;</code>
*/
public int getFooArrayCount() {
if (fooArrayBuilder_ == null) {
return fooArray_.size();
} else {
return fooArrayBuilder_.getCount();
}
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.Foo fooArray = 20;</code>
*/
public org.apache.avro.protobuf.multiplefiles.Foo getFooArray(int index) {
if (fooArrayBuilder_ == null) {
return fooArray_.get(index);
} else {
return fooArrayBuilder_.getMessage(index);
}
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.Foo fooArray = 20;</code>
*/
public Builder setFooArray(int index, org.apache.avro.protobuf.multiplefiles.Foo value) {
if (fooArrayBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFooArrayIsMutable();
fooArray_.set(index, value);
onChanged();
} else {
fooArrayBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.Foo fooArray = 20;</code>
*/
public Builder setFooArray(int index, org.apache.avro.protobuf.multiplefiles.Foo.Builder builderForValue) {
if (fooArrayBuilder_ == null) {
ensureFooArrayIsMutable();
fooArray_.set(index, builderForValue.build());
onChanged();
} else {
fooArrayBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.Foo fooArray = 20;</code>
*/
public Builder addFooArray(org.apache.avro.protobuf.multiplefiles.Foo value) {
if (fooArrayBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFooArrayIsMutable();
fooArray_.add(value);
onChanged();
} else {
fooArrayBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.Foo fooArray = 20;</code>
*/
public Builder addFooArray(int index, org.apache.avro.protobuf.multiplefiles.Foo value) {
if (fooArrayBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFooArrayIsMutable();
fooArray_.add(index, value);
onChanged();
} else {
fooArrayBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.Foo fooArray = 20;</code>
*/
public Builder addFooArray(org.apache.avro.protobuf.multiplefiles.Foo.Builder builderForValue) {
if (fooArrayBuilder_ == null) {
ensureFooArrayIsMutable();
fooArray_.add(builderForValue.build());
onChanged();
} else {
fooArrayBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.Foo fooArray = 20;</code>
*/
public Builder addFooArray(int index, org.apache.avro.protobuf.multiplefiles.Foo.Builder builderForValue) {
if (fooArrayBuilder_ == null) {
ensureFooArrayIsMutable();
fooArray_.add(index, builderForValue.build());
onChanged();
} else {
fooArrayBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.Foo fooArray = 20;</code>
*/
public Builder addAllFooArray(java.lang.Iterable<? extends org.apache.avro.protobuf.multiplefiles.Foo> values) {
if (fooArrayBuilder_ == null) {
ensureFooArrayIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, fooArray_);
onChanged();
} else {
fooArrayBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.Foo fooArray = 20;</code>
*/
public Builder clearFooArray() {
if (fooArrayBuilder_ == null) {
fooArray_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00020000);
onChanged();
} else {
fooArrayBuilder_.clear();
}
return this;
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.Foo fooArray = 20;</code>
*/
public Builder removeFooArray(int index) {
if (fooArrayBuilder_ == null) {
ensureFooArrayIsMutable();
fooArray_.remove(index);
onChanged();
} else {
fooArrayBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.Foo fooArray = 20;</code>
*/
public org.apache.avro.protobuf.multiplefiles.Foo.Builder getFooArrayBuilder(int index) {
return getFooArrayFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.Foo fooArray = 20;</code>
*/
public org.apache.avro.protobuf.multiplefiles.FooOrBuilder getFooArrayOrBuilder(int index) {
if (fooArrayBuilder_ == null) {
return fooArray_.get(index);
} else {
return fooArrayBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.Foo fooArray = 20;</code>
*/
public java.util.List<? extends org.apache.avro.protobuf.multiplefiles.FooOrBuilder> getFooArrayOrBuilderList() {
if (fooArrayBuilder_ != null) {
return fooArrayBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(fooArray_);
}
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.Foo fooArray = 20;</code>
*/
public org.apache.avro.protobuf.multiplefiles.Foo.Builder addFooArrayBuilder() {
return getFooArrayFieldBuilder().addBuilder(org.apache.avro.protobuf.multiplefiles.Foo.getDefaultInstance());
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.Foo fooArray = 20;</code>
*/
public org.apache.avro.protobuf.multiplefiles.Foo.Builder addFooArrayBuilder(int index) {
return getFooArrayFieldBuilder().addBuilder(index,
org.apache.avro.protobuf.multiplefiles.Foo.getDefaultInstance());
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.Foo fooArray = 20;</code>
*/
public java.util.List<org.apache.avro.protobuf.multiplefiles.Foo.Builder> getFooArrayBuilderList() {
return getFooArrayFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<org.apache.avro.protobuf.multiplefiles.Foo, org.apache.avro.protobuf.multiplefiles.Foo.Builder, org.apache.avro.protobuf.multiplefiles.FooOrBuilder> getFooArrayFieldBuilder() {
if (fooArrayBuilder_ == null) {
fooArrayBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<org.apache.avro.protobuf.multiplefiles.Foo, org.apache.avro.protobuf.multiplefiles.Foo.Builder, org.apache.avro.protobuf.multiplefiles.FooOrBuilder>(
fooArray_, ((bitField0_ & 0x00020000) != 0), getParentForChildren(), isClean());
fooArray_ = null;
}
return fooArrayBuilder_;
}
private java.util.List<java.lang.Integer> syms_ = java.util.Collections.emptyList();
private void ensureSymsIsMutable() {
if (!((bitField0_ & 0x00040000) != 0)) {
syms_ = new java.util.ArrayList<java.lang.Integer>(syms_);
bitField0_ |= 0x00040000;
}
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.A syms = 19;</code>
*
* @return A list containing the syms.
*/
public java.util.List<org.apache.avro.protobuf.multiplefiles.A> getSymsList() {
return new com.google.protobuf.Internal.ListAdapter<java.lang.Integer, org.apache.avro.protobuf.multiplefiles.A>(
syms_, syms_converter_);
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.A syms = 19;</code>
*
* @return The count of syms.
*/
public int getSymsCount() {
return syms_.size();
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.A syms = 19;</code>
*
* @param index The index of the element to return.
* @return The syms at the given index.
*/
public org.apache.avro.protobuf.multiplefiles.A getSyms(int index) {
return syms_converter_.convert(syms_.get(index));
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.A syms = 19;</code>
*
* @param index The index to set the value at.
* @param value The syms to set.
* @return This builder for chaining.
*/
public Builder setSyms(int index, org.apache.avro.protobuf.multiplefiles.A value) {
if (value == null) {
throw new NullPointerException();
}
ensureSymsIsMutable();
syms_.set(index, value.getNumber());
onChanged();
return this;
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.A syms = 19;</code>
*
* @param value The syms to add.
* @return This builder for chaining.
*/
public Builder addSyms(org.apache.avro.protobuf.multiplefiles.A value) {
if (value == null) {
throw new NullPointerException();
}
ensureSymsIsMutable();
syms_.add(value.getNumber());
onChanged();
return this;
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.A syms = 19;</code>
*
* @param values The syms to add.
* @return This builder for chaining.
*/
public Builder addAllSyms(java.lang.Iterable<? extends org.apache.avro.protobuf.multiplefiles.A> values) {
ensureSymsIsMutable();
for (org.apache.avro.protobuf.multiplefiles.A value : values) {
syms_.add(value.getNumber());
}
onChanged();
return this;
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.A syms = 19;</code>
*
* @return This builder for chaining.
*/
public Builder clearSyms() {
syms_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00040000);
onChanged();
return this;
}
private org.apache.avro.protobuf.multiplefiles.Foo foo_;
private com.google.protobuf.SingleFieldBuilder<org.apache.avro.protobuf.multiplefiles.Foo, org.apache.avro.protobuf.multiplefiles.Foo.Builder, org.apache.avro.protobuf.multiplefiles.FooOrBuilder> fooBuilder_;
/**
* <pre>
* a recursive type
* </pre>
*
* <code>optional .org.apache.avro.protobuf.multiplefiles.Foo foo = 18;</code>
*
* @return Whether the foo field is set.
*/
public boolean hasFoo() {
return ((bitField0_ & 0x00080000) != 0);
}
/**
* <pre>
* a recursive type
* </pre>
*
* <code>optional .org.apache.avro.protobuf.multiplefiles.Foo foo = 18;</code>
*
* @return The foo.
*/
public org.apache.avro.protobuf.multiplefiles.Foo getFoo() {
if (fooBuilder_ == null) {
return foo_ == null ? org.apache.avro.protobuf.multiplefiles.Foo.getDefaultInstance() : foo_;
} else {
return fooBuilder_.getMessage();
}
}
/**
* <pre>
* a recursive type
* </pre>
*
* <code>optional .org.apache.avro.protobuf.multiplefiles.Foo foo = 18;</code>
*/
public Builder setFoo(org.apache.avro.protobuf.multiplefiles.Foo value) {
if (fooBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
foo_ = value;
} else {
fooBuilder_.setMessage(value);
}
bitField0_ |= 0x00080000;
onChanged();
return this;
}
/**
* <pre>
* a recursive type
* </pre>
*
* <code>optional .org.apache.avro.protobuf.multiplefiles.Foo foo = 18;</code>
*/
public Builder setFoo(org.apache.avro.protobuf.multiplefiles.Foo.Builder builderForValue) {
if (fooBuilder_ == null) {
foo_ = builderForValue.build();
} else {
fooBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00080000;
onChanged();
return this;
}
/**
* <pre>
* a recursive type
* </pre>
*
* <code>optional .org.apache.avro.protobuf.multiplefiles.Foo foo = 18;</code>
*/
public Builder mergeFoo(org.apache.avro.protobuf.multiplefiles.Foo value) {
if (fooBuilder_ == null) {
if (((bitField0_ & 0x00080000) != 0) && foo_ != null
&& foo_ != org.apache.avro.protobuf.multiplefiles.Foo.getDefaultInstance()) {
getFooBuilder().mergeFrom(value);
} else {
foo_ = value;
}
} else {
fooBuilder_.mergeFrom(value);
}
if (foo_ != null) {
bitField0_ |= 0x00080000;
onChanged();
}
return this;
}
/**
* <pre>
* a recursive type
* </pre>
*
* <code>optional .org.apache.avro.protobuf.multiplefiles.Foo foo = 18;</code>
*/
public Builder clearFoo() {
bitField0_ = (bitField0_ & ~0x00080000);
foo_ = null;
if (fooBuilder_ != null) {
fooBuilder_.dispose();
fooBuilder_ = null;
}
onChanged();
return this;
}
/**
* <pre>
* a recursive type
* </pre>
*
* <code>optional .org.apache.avro.protobuf.multiplefiles.Foo foo = 18;</code>
*/
public org.apache.avro.protobuf.multiplefiles.Foo.Builder getFooBuilder() {
bitField0_ |= 0x00080000;
onChanged();
return getFooFieldBuilder().getBuilder();
}
/**
* <pre>
* a recursive type
* </pre>
*
* <code>optional .org.apache.avro.protobuf.multiplefiles.Foo foo = 18;</code>
*/
public org.apache.avro.protobuf.multiplefiles.FooOrBuilder getFooOrBuilder() {
if (fooBuilder_ != null) {
return fooBuilder_.getMessageOrBuilder();
} else {
return foo_ == null ? org.apache.avro.protobuf.multiplefiles.Foo.getDefaultInstance() : foo_;
}
}
/**
* <pre>
* a recursive type
* </pre>
*
* <code>optional .org.apache.avro.protobuf.multiplefiles.Foo foo = 18;</code>
*/
private com.google.protobuf.SingleFieldBuilder<org.apache.avro.protobuf.multiplefiles.Foo, org.apache.avro.protobuf.multiplefiles.Foo.Builder, org.apache.avro.protobuf.multiplefiles.FooOrBuilder> getFooFieldBuilder() {
if (fooBuilder_ == null) {
fooBuilder_ = new com.google.protobuf.SingleFieldBuilder<org.apache.avro.protobuf.multiplefiles.Foo, org.apache.avro.protobuf.multiplefiles.Foo.Builder, org.apache.avro.protobuf.multiplefiles.FooOrBuilder>(
getFoo(), getParentForChildren(), isClean());
foo_ = null;
}
return fooBuilder_;
}
private com.google.protobuf.Timestamp timestamp_;
private com.google.protobuf.SingleFieldBuilder<com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> timestampBuilder_;
/**
* <pre>
* a predefined message type
* </pre>
*
* <code>optional .google.protobuf.Timestamp timestamp = 21;</code>
*
* @return Whether the timestamp field is set.
*/
public boolean hasTimestamp() {
return ((bitField0_ & 0x00100000) != 0);
}
/**
* <pre>
* a predefined message type
* </pre>
*
* <code>optional .google.protobuf.Timestamp timestamp = 21;</code>
*
* @return The timestamp.
*/
public com.google.protobuf.Timestamp getTimestamp() {
if (timestampBuilder_ == null) {
return timestamp_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : timestamp_;
} else {
return timestampBuilder_.getMessage();
}
}
/**
* <pre>
* a predefined message type
* </pre>
*
* <code>optional .google.protobuf.Timestamp timestamp = 21;</code>
*/
public Builder setTimestamp(com.google.protobuf.Timestamp value) {
if (timestampBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
timestamp_ = value;
} else {
timestampBuilder_.setMessage(value);
}
bitField0_ |= 0x00100000;
onChanged();
return this;
}
/**
* <pre>
* a predefined message type
* </pre>
*
* <code>optional .google.protobuf.Timestamp timestamp = 21;</code>
*/
public Builder setTimestamp(com.google.protobuf.Timestamp.Builder builderForValue) {
if (timestampBuilder_ == null) {
timestamp_ = builderForValue.build();
} else {
timestampBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00100000;
onChanged();
return this;
}
/**
* <pre>
* a predefined message type
* </pre>
*
* <code>optional .google.protobuf.Timestamp timestamp = 21;</code>
*/
public Builder mergeTimestamp(com.google.protobuf.Timestamp value) {
if (timestampBuilder_ == null) {
if (((bitField0_ & 0x00100000) != 0) && timestamp_ != null
&& timestamp_ != com.google.protobuf.Timestamp.getDefaultInstance()) {
getTimestampBuilder().mergeFrom(value);
} else {
timestamp_ = value;
}
} else {
timestampBuilder_.mergeFrom(value);
}
if (timestamp_ != null) {
bitField0_ |= 0x00100000;
onChanged();
}
return this;
}
/**
* <pre>
* a predefined message type
* </pre>
*
* <code>optional .google.protobuf.Timestamp timestamp = 21;</code>
*/
public Builder clearTimestamp() {
bitField0_ = (bitField0_ & ~0x00100000);
timestamp_ = null;
if (timestampBuilder_ != null) {
timestampBuilder_.dispose();
timestampBuilder_ = null;
}
onChanged();
return this;
}
/**
* <pre>
* a predefined message type
* </pre>
*
* <code>optional .google.protobuf.Timestamp timestamp = 21;</code>
*/
public com.google.protobuf.Timestamp.Builder getTimestampBuilder() {
bitField0_ |= 0x00100000;
onChanged();
return getTimestampFieldBuilder().getBuilder();
}
/**
* <pre>
* a predefined message type
* </pre>
*
* <code>optional .google.protobuf.Timestamp timestamp = 21;</code>
*/
public com.google.protobuf.TimestampOrBuilder getTimestampOrBuilder() {
if (timestampBuilder_ != null) {
return timestampBuilder_.getMessageOrBuilder();
} else {
return timestamp_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : timestamp_;
}
}
/**
* <pre>
* a predefined message type
* </pre>
*
* <code>optional .google.protobuf.Timestamp timestamp = 21;</code>
*/
private com.google.protobuf.SingleFieldBuilder<com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> getTimestampFieldBuilder() {
if (timestampBuilder_ == null) {
timestampBuilder_ = new com.google.protobuf.SingleFieldBuilder<com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>(
getTimestamp(), getParentForChildren(), isClean());
timestamp_ = null;
}
return timestampBuilder_;
}
// @@protoc_insertion_point(builder_scope:org.apache.avro.protobuf.multiplefiles.Foo)
}
// @@protoc_insertion_point(class_scope:org.apache.avro.protobuf.multiplefiles.Foo)
private static final org.apache.avro.protobuf.multiplefiles.Foo DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.avro.protobuf.multiplefiles.Foo();
}
public static org.apache.avro.protobuf.multiplefiles.Foo getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<Foo> PARSER = new com.google.protobuf.AbstractParser<Foo>() {
@java.lang.Override
public Foo parsePartialFrom(com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<Foo> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<Foo> getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.avro.protobuf.multiplefiles.Foo getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
to
|
java
|
elastic__elasticsearch
|
x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetFlamegraphAction.java
|
{
"start": 362,
"end": 660
}
|
class ____ extends ActionType<GetFlamegraphResponse> {
public static final GetFlamegraphAction INSTANCE = new GetFlamegraphAction();
public static final String NAME = "indices:data/read/profiling/flamegraph";
private GetFlamegraphAction() {
super(NAME);
}
}
|
GetFlamegraphAction
|
java
|
quarkusio__quarkus
|
extensions/panache/hibernate-orm-panache/deployment/src/main/java/io/quarkus/hibernate/orm/panache/deployment/PanacheHibernateResourceProcessor.java
|
{
"start": 2179,
"end": 9092
}
|
class ____ {
static final DotName DOTNAME_PANACHE_REPOSITORY_BASE = DotName.createSimple(PanacheRepositoryBase.class.getName());
private static final DotName DOTNAME_PANACHE_REPOSITORY = DotName.createSimple(PanacheRepository.class.getName());
static final DotName DOTNAME_PANACHE_ENTITY = DotName.createSimple(PanacheEntity.class.getName());
static final DotName DOTNAME_PANACHE_ENTITY_BASE = DotName.createSimple(PanacheEntityBase.class.getName());
private static final DotName DOTNAME_SESSION = DotName.createSimple(Session.class.getName());
private static final DotName DOTNAME_ID = DotName.createSimple(Id.class.getName());
@BuildStep
FeatureBuildItem featureBuildItem() {
return new FeatureBuildItem(Feature.HIBERNATE_ORM_PANACHE);
}
@BuildStep
AdditionalJpaModelBuildItem produceModel() {
// only useful for the index resolution: hibernate will register it to be transformed, but BuildMojo
// only transforms classes from the application jar, so we do our own transforming
return new AdditionalJpaModelBuildItem("io.quarkus.hibernate.orm.panache.PanacheEntity",
// Only added to persistence units actually using this class, using Jandex-based discovery,
// so we pass empty sets of PUs.
// The build items tell the Hibernate extension to process the classes at build time:
// add to Jandex index, bytecode enhancement, proxy generation, ...
Set.of());
}
@BuildStep
UnremovableBeanBuildItem ensureBeanLookupAvailable() {
return new UnremovableBeanBuildItem(new UnremovableBeanBuildItem.BeanTypeExclusion(DOTNAME_SESSION));
}
@BuildStep
void collectEntityClasses(CombinedIndexBuildItem index, BuildProducer<PanacheEntityClassBuildItem> entityClasses) {
// NOTE: we don't skip abstract/generic entities because they still need accessors
for (ClassInfo panacheEntityBaseSubclass : index.getIndex().getAllKnownSubclasses(DOTNAME_PANACHE_ENTITY_BASE)) {
// FIXME: should we really skip PanacheEntity or all MappedSuperClass?
if (!panacheEntityBaseSubclass.name().equals(DOTNAME_PANACHE_ENTITY)) {
entityClasses.produce(new PanacheEntityClassBuildItem(panacheEntityBaseSubclass));
}
}
}
@BuildStep
@Consume(HibernateEnhancersRegisteredBuildItem.class)
@Consume(InterceptedStaticMethodsTransformersRegisteredBuildItem.class)
void build(
CombinedIndexBuildItem index,
BuildProducer<BytecodeTransformerBuildItem> transformers,
List<PanacheEntityClassBuildItem> entityClasses,
Optional<JpaModelPersistenceUnitMappingBuildItem> jpaModelPersistenceUnitMapping,
List<PanacheMethodCustomizerBuildItem> methodCustomizersBuildItems,
BuildProducer<EntityToPersistenceUnitBuildItem> entityToPersistenceUnit) {
List<PanacheMethodCustomizer> methodCustomizers = methodCustomizersBuildItems.stream()
.map(PanacheMethodCustomizerBuildItem::getMethodCustomizer).collect(Collectors.toList());
PanacheJpaRepositoryEnhancer daoEnhancer = new PanacheJpaRepositoryEnhancer(index.getIndex(),
JavaJpaTypeBundle.BUNDLE);
Set<String> panacheEntities = new HashSet<>();
for (ClassInfo classInfo : index.getIndex().getAllKnownImplementors(DOTNAME_PANACHE_REPOSITORY_BASE)) {
// Skip PanacheRepository
if (classInfo.name().equals(DOTNAME_PANACHE_REPOSITORY))
continue;
if (daoEnhancer.skipRepository(classInfo))
continue;
List<org.jboss.jandex.Type> typeParameters = JandexUtil
.resolveTypeParameters(classInfo.name(), DOTNAME_PANACHE_REPOSITORY_BASE, index.getIndex());
var entityTypeName = typeParameters.get(0).name();
panacheEntities.add(entityTypeName.toString());
// Also add subclasses, so that they get resolved to a persistence unit.
for (var subclass : index.getIndex().getAllKnownSubclasses(entityTypeName)) {
panacheEntities.add(subclass.name().toString());
}
transformers.produce(new BytecodeTransformerBuildItem(classInfo.name().toString(), daoEnhancer));
}
PanacheJpaEntityOperationsEnhancer entityOperationsEnhancer = new PanacheJpaEntityOperationsEnhancer(index.getIndex(),
methodCustomizers, JavaJpaTypeBundle.BUNDLE);
Set<String> modelClasses = new HashSet<>();
for (PanacheEntityClassBuildItem entityClass : entityClasses) {
String entityClassName = entityClass.get().name().toString();
modelClasses.add(entityClassName);
transformers.produce(new BytecodeTransformerBuildItem(entityClassName, entityOperationsEnhancer));
}
panacheEntities.addAll(modelClasses);
determineEntityPersistenceUnits(jpaModelPersistenceUnitMapping, panacheEntities, "Panache")
.forEach((e, pu) -> entityToPersistenceUnit.produce(new EntityToPersistenceUnitBuildItem(e, pu)));
}
@BuildStep
@Record(ExecutionTime.STATIC_INIT)
void recordEntityToPersistenceUnit(Optional<JpaModelPersistenceUnitMappingBuildItem> jpaModelPersistenceUnitMapping,
List<EntityToPersistenceUnitBuildItem> items, PanacheHibernateOrmRecorder recorder) {
Map<String, String> map = new HashMap<>();
for (EntityToPersistenceUnitBuildItem item : items) {
map.put(item.getEntityClass(), item.getPersistenceUnitName());
}
// This is called even if there are no entity types, so that Panache gets properly initialized.
recorder.addEntityTypesToPersistenceUnit(map,
jpaModelPersistenceUnitMapping.map(JpaModelPersistenceUnitMappingBuildItem::isIncomplete)
// This happens if there is no persistence unit, in which case we definitely know this metadata is complete.
.orElse(false));
}
@BuildStep
ValidationPhaseBuildItem.ValidationErrorBuildItem validate(ValidationPhaseBuildItem validationPhase,
CombinedIndexBuildItem index) throws BuildException {
// we verify that no ID fields are defined (via @Id) when extending PanacheEntity
for (AnnotationInstance annotationInstance : index.getIndex().getAnnotations(DOTNAME_ID)) {
ClassInfo info = JandexUtil.getEnclosingClass(annotationInstance);
if (JandexUtil.isSubclassOf(index.getIndex(), info, DOTNAME_PANACHE_ENTITY)) {
BuildException be = new BuildException("You provide a JPA identifier via @Id inside '" + info.name() +
"' but one is already provided by PanacheEntity, " +
"your
|
PanacheHibernateResourceProcessor
|
java
|
junit-team__junit5
|
jupiter-tests/src/test/java/org/junit/jupiter/api/DisplayNameGenerationTests.java
|
{
"start": 16087,
"end": 16215
}
|
class ____ {
@Test
void is_instantiated_with_its_constructor() {
new Stack<>();
}
@Nested
|
IndicativeGeneratorTestCase
|
java
|
elastic__elasticsearch
|
x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java
|
{
"start": 261956,
"end": 267957
}
|
class ____ extends ConstantContext {
public TerminalNode GUID_ESC() {
return getToken(SqlBaseParser.GUID_ESC, 0);
}
public StringContext string() {
return getRuleContext(StringContext.class, 0);
}
public TerminalNode ESC_END() {
return getToken(SqlBaseParser.ESC_END, 0);
}
public GuidEscapedLiteralContext(ConstantContext ctx) {
copyFrom(ctx);
}
@Override
public void enterRule(ParseTreeListener listener) {
if (listener instanceof SqlBaseListener) ((SqlBaseListener) listener).enterGuidEscapedLiteral(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if (listener instanceof SqlBaseListener) ((SqlBaseListener) listener).exitGuidEscapedLiteral(this);
}
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if (visitor instanceof SqlBaseVisitor) return ((SqlBaseVisitor<? extends T>) visitor).visitGuidEscapedLiteral(this);
else return visitor.visitChildren(this);
}
}
public final ConstantContext constant() throws RecognitionException {
ConstantContext _localctx = new ConstantContext(_ctx, getState());
enterRule(_localctx, 90, RULE_constant);
try {
int _alt;
setState(795);
_errHandler.sync(this);
switch (_input.LA(1)) {
case NULL:
_localctx = new NullLiteralContext(_localctx);
enterOuterAlt(_localctx, 1); {
setState(769);
match(NULL);
}
break;
case INTERVAL:
_localctx = new IntervalLiteralContext(_localctx);
enterOuterAlt(_localctx, 2); {
setState(770);
interval();
}
break;
case INTEGER_VALUE:
case DECIMAL_VALUE:
_localctx = new NumericLiteralContext(_localctx);
enterOuterAlt(_localctx, 3); {
setState(771);
number();
}
break;
case FALSE:
case TRUE:
_localctx = new BooleanLiteralContext(_localctx);
enterOuterAlt(_localctx, 4); {
setState(772);
booleanValue();
}
break;
case STRING:
_localctx = new StringLiteralContext(_localctx);
enterOuterAlt(_localctx, 5); {
setState(774);
_errHandler.sync(this);
_alt = 1;
do {
switch (_alt) {
case 1: {
{
setState(773);
match(STRING);
}
}
break;
default:
throw new NoViableAltException(this);
}
setState(776);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input, 105, _ctx);
} while (_alt != 2 && _alt != org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER);
}
break;
case PARAM:
_localctx = new ParamLiteralContext(_localctx);
enterOuterAlt(_localctx, 6); {
setState(778);
match(PARAM);
}
break;
case DATE_ESC:
_localctx = new DateEscapedLiteralContext(_localctx);
enterOuterAlt(_localctx, 7); {
setState(779);
match(DATE_ESC);
setState(780);
string();
setState(781);
match(ESC_END);
}
break;
case TIME_ESC:
_localctx = new TimeEscapedLiteralContext(_localctx);
enterOuterAlt(_localctx, 8); {
setState(783);
match(TIME_ESC);
setState(784);
string();
setState(785);
match(ESC_END);
}
break;
case TIMESTAMP_ESC:
_localctx = new TimestampEscapedLiteralContext(_localctx);
enterOuterAlt(_localctx, 9); {
setState(787);
match(TIMESTAMP_ESC);
setState(788);
string();
setState(789);
match(ESC_END);
}
break;
case GUID_ESC:
_localctx = new GuidEscapedLiteralContext(_localctx);
enterOuterAlt(_localctx, 10); {
setState(791);
match(GUID_ESC);
setState(792);
string();
setState(793);
match(ESC_END);
}
break;
default:
throw new NoViableAltException(this);
}
} catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
} finally {
exitRule();
}
return _localctx;
}
@SuppressWarnings("CheckReturnValue")
public static
|
GuidEscapedLiteralContext
|
java
|
google__dagger
|
javatests/dagger/functional/assisted/AssistedFactoryDuplicatedParamNamesTest.java
|
{
"start": 1380,
"end": 1451
}
|
interface ____ {
Foo create(String arg);
}
@Component
|
FooFactory
|
java
|
spring-projects__spring-security
|
docs/modules/ROOT/examples/kerberos/AuthProviderConfig.java
|
{
"start": 1985,
"end": 4798
}
|
class ____ {
@Value("${app.service-principal}")
private String servicePrincipal;
@Value("${app.keytab-location}")
private String keytabLocation;
@Bean
public SecurityFilterChain filterChain(HttpSecurity http) throws Exception {
KerberosAuthenticationProvider kerberosAuthenticationProvider = kerberosAuthenticationProvider();
KerberosServiceAuthenticationProvider kerberosServiceAuthenticationProvider = kerberosServiceAuthenticationProvider();
ProviderManager providerManager = new ProviderManager(kerberosAuthenticationProvider,
kerberosServiceAuthenticationProvider);
http
.authorizeHttpRequests((authz) -> authz
.requestMatchers("/", "/home").permitAll()
.anyRequest().authenticated()
)
.exceptionHandling()
.authenticationEntryPoint(spnegoEntryPoint())
.and()
.formLogin()
.loginPage("/login").permitAll()
.and()
.logout()
.permitAll()
.and()
.authenticationProvider(kerberosAuthenticationProvider())
.authenticationProvider(kerberosServiceAuthenticationProvider())
.addFilterBefore(spnegoAuthenticationProcessingFilter(providerManager),
BasicAuthenticationFilter.class);
return http.build();
}
@Bean
public KerberosAuthenticationProvider kerberosAuthenticationProvider() {
KerberosAuthenticationProvider provider = new KerberosAuthenticationProvider();
SunJaasKerberosClient client = new SunJaasKerberosClient();
client.setDebug(true);
provider.setKerberosClient(client);
provider.setUserDetailsService(dummyUserDetailsService());
return provider;
}
@Bean
public SpnegoEntryPoint spnegoEntryPoint() {
return new SpnegoEntryPoint("/login");
}
public SpnegoAuthenticationProcessingFilter spnegoAuthenticationProcessingFilter(
AuthenticationManager authenticationManager) {
SpnegoAuthenticationProcessingFilter filter = new SpnegoAuthenticationProcessingFilter();
filter.setAuthenticationManager(authenticationManager);
return filter;
}
@Bean
public KerberosServiceAuthenticationProvider kerberosServiceAuthenticationProvider() {
KerberosServiceAuthenticationProvider provider = new KerberosServiceAuthenticationProvider();
provider.setTicketValidator(sunJaasKerberosTicketValidator());
provider.setUserDetailsService(dummyUserDetailsService());
return provider;
}
@Bean
public SunJaasKerberosTicketValidator sunJaasKerberosTicketValidator() {
SunJaasKerberosTicketValidator ticketValidator = new SunJaasKerberosTicketValidator();
ticketValidator.setServicePrincipal(servicePrincipal);
ticketValidator.setKeyTabLocation(new FileSystemResource(keytabLocation));
ticketValidator.setDebug(true);
return ticketValidator;
}
@Bean
public DummyUserDetailsService dummyUserDetailsService() {
return new DummyUserDetailsService();
}
}
//end::snippetA[]
|
WebSecurityConfig
|
java
|
apache__camel
|
tooling/maven/camel-eip-documentation-enricher-maven-plugin/src/test/java/org/apache/camel/maven/EipDocumentationEnricherMojoTest.java
|
{
"start": 1366,
"end": 3401
}
|
class ____ {
private EipDocumentationEnricherMojo eipDocumentationEnricherMojo = new EipDocumentationEnricherMojo();
@Mock
private File mockCamelCore;
@Mock
private File mockInputSchema;
@BeforeEach
public void setUp() {
eipDocumentationEnricherMojo.camelCoreModelDir = mockCamelCore;
eipDocumentationEnricherMojo.inputCamelSchemaFile = mockInputSchema;
eipDocumentationEnricherMojo.pathToModelDir = "sub/path";
}
@Test
public void testExecuteCamelCoreIsNull() {
eipDocumentationEnricherMojo.camelCoreModelDir = null;
when(mockInputSchema.exists()).thenReturn(true);
when(mockInputSchema.isFile()).thenReturn(true);
try {
eipDocumentationEnricherMojo.execute();
fail("Expected MojoExecutionException");
} catch (MojoExecutionException e) {
// Expected.
}
}
@Test
public void testExecuteInputCamelSchemaIsNotAFile() {
when(mockInputSchema.exists()).thenReturn(true);
when(mockInputSchema.isFile()).thenReturn(false);
try {
eipDocumentationEnricherMojo.execute();
fail("Expected MojoExecutionException");
} catch (MojoExecutionException e) {
// Expected.
}
}
@Test
public void testExecutePathToModelDirIsNull() {
eipDocumentationEnricherMojo.pathToModelDir = null;
try {
eipDocumentationEnricherMojo.execute();
fail("Expected MojoExecutionException");
} catch (MojoExecutionException e) {
// Expected.
}
}
@Test
public void testFixXmlOutput() throws Exception {
String xml = IOHelper.loadText(new FileInputStream("src/test/resources/enriched-camel-spring.xsd"));
String out = EipDocumentationEnricherMojo.fixXmlOutput(xml);
Assertions.assertNotNull(out);
Assertions.assertNotEquals(xml, out);
// System.out.println(out);
}
}
|
EipDocumentationEnricherMojoTest
|
java
|
apache__camel
|
components/camel-mongodb/src/test/java/org/apache/camel/component/mongodb/integration/MongoDbDynamicityIT.java
|
{
"start": 1845,
"end": 8802
}
|
class ____ extends AbstractMongoDbITSupport implements ConfigurableRoute {
@BeforeEach
void checkDocuments() {
Assumptions.assumeTrue(0 == testCollection.countDocuments(), "The collection should have no documents");
}
@Test
public void testInsertDynamicityDisabled() {
mongo.getDatabase("otherDB").drop();
db.getCollection("otherCollection").drop();
assertFalse(StreamSupport.stream(mongo.listDatabaseNames().spliterator(), false).anyMatch("otherDB"::equals),
"The otherDB database should not exist");
String body = "{\"_id\": \"testInsertDynamicityDisabled\", \"a\" : \"1\"}";
Map<String, Object> headers = new HashMap<>();
headers.put(MongoDbConstants.DATABASE, "otherDB");
headers.put(MongoDbConstants.COLLECTION, "otherCollection");
// Object result =
template.requestBodyAndHeaders("direct:noDynamicity", body, headers);
Document b = testCollection.find(eq(MONGO_ID, "testInsertDynamicityDisabled")).first();
assertNotNull(b, "No record with 'testInsertDynamicityDisabled' _id");
body = "{\"_id\": \"testInsertDynamicityDisabledExplicitly\", \"a\" : \"1\"}";
// result =
template.requestBodyAndHeaders("direct:noDynamicityExplicit", body, headers);
b = testCollection.find(eq(MONGO_ID, "testInsertDynamicityDisabledExplicitly")).first();
assertNotNull(b, "No record with 'testInsertDynamicityDisabledExplicitly' _id");
assertFalse(StreamSupport.stream(mongo.listDatabaseNames().spliterator(), false).anyMatch("otherDB"::equals),
"The otherDB database should not exist");
}
@Test
public void testInsertDynamicityEnabledDBOnly() {
mongo.getDatabase("otherDB").drop();
db.getCollection("otherCollection").drop();
assertFalse(StreamSupport.stream(mongo.listDatabaseNames().spliterator(), false).anyMatch("otherDB"::equals),
"The otherDB database should not exist");
String body = "{\"_id\": \"testInsertDynamicityEnabledDBOnly\", \"a\" : \"1\"}";
Map<String, Object> headers = new HashMap<>();
headers.put(MongoDbConstants.DATABASE, "otherDB");
// Object result =
template.requestBodyAndHeaders("direct:dynamicityEnabled", body, headers);
MongoCollection<Document> localDynamicCollection
= mongo.getDatabase("otherDB").getCollection(testCollection.getNamespace().getCollectionName(), Document.class);
Document b = localDynamicCollection.find(eq(MONGO_ID, "testInsertDynamicityEnabledDBOnly")).first();
assertNotNull(b, "No record with 'testInsertDynamicityEnabledDBOnly' _id");
b = testCollection.find(eq(MONGO_ID, "testInsertDynamicityEnabledDBOnly")).first();
assertNull(b, "There is a record with 'testInsertDynamicityEnabledDBOnly' _id in the test collection");
assertTrue(StreamSupport.stream(mongo.listDatabaseNames().spliterator(), false).anyMatch("otherDB"::equals),
"The otherDB database should exist");
}
@Test
public void testInsertDynamicityEnabledCollectionOnly() {
mongo.getDatabase("otherDB").drop();
db.getCollection("otherCollection").drop();
assertFalse(StreamSupport.stream(mongo.listDatabaseNames().spliterator(), false).anyMatch("otherDB"::equals),
"The otherDB database should not exist");
String body = "{\"_id\": \"testInsertDynamicityEnabledCollectionOnly\", \"a\" : \"1\"}";
Map<String, Object> headers = new HashMap<>();
headers.put(MongoDbConstants.COLLECTION, "otherCollection");
// Object result =
template.requestBodyAndHeaders("direct:dynamicityEnabled", body, headers);
MongoCollection<Document> loaclDynamicCollection = db.getCollection("otherCollection", Document.class);
Document b = loaclDynamicCollection.find(eq(MONGO_ID, "testInsertDynamicityEnabledCollectionOnly")).first();
assertNotNull(b, "No record with 'testInsertDynamicityEnabledCollectionOnly' _id");
b = testCollection.find(eq(MONGO_ID, "testInsertDynamicityEnabledDBOnly")).first();
assertNull(b, "There is a record with 'testInsertDynamicityEnabledCollectionOnly' _id in the test collection");
assertFalse(StreamSupport.stream(mongo.listDatabaseNames().spliterator(), false).anyMatch("otherDB"::equals),
"The otherDB database should not exist");
}
@Test
public void testInsertDynamicityEnabledDBAndCollection() {
assertEquals(0, testCollection.countDocuments());
mongo.getDatabase("otherDB").drop();
db.getCollection("otherCollection").drop();
assertFalse(StreamSupport.stream(mongo.listDatabaseNames().spliterator(), false).anyMatch("otherDB"::equals),
"The otherDB database should not exist");
String body = "{\"_id\": \"testInsertDynamicityEnabledDBAndCollection\", \"a\" : \"1\"}";
Map<String, Object> headers = new HashMap<>();
headers.put(MongoDbConstants.DATABASE, "otherDB");
headers.put(MongoDbConstants.COLLECTION, "otherCollection");
// Object result =
template.requestBodyAndHeaders("direct:dynamicityEnabled", body, headers);
MongoCollection<Document> loaclDynamicCollection
= mongo.getDatabase("otherDB").getCollection("otherCollection", Document.class);
Document b = loaclDynamicCollection.find(eq(MONGO_ID, "testInsertDynamicityEnabledDBAndCollection")).first();
assertNotNull(b, "No record with 'testInsertDynamicityEnabledDBAndCollection' _id");
b = testCollection.find(eq(MONGO_ID, "testInsertDynamicityEnabledDBOnly")).first();
assertNull(b, "There is a record with 'testInsertDynamicityEnabledDBAndCollection' _id in the test collection");
assertTrue(StreamSupport.stream(mongo.listDatabaseNames().spliterator(), false).anyMatch("otherDB"::equals),
"The otherDB database should exist");
}
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("direct:noDynamicity")
.to("mongodb:myDb?database={{mongodb.testDb}}&collection={{mongodb.testCollection}}&operation=insert");
from("direct:noDynamicityExplicit").to(
"mongodb:myDb?database={{mongodb.testDb}}&collection={{mongodb.testCollection}}&operation=insert&dynamicity=false");
from("direct:dynamicityEnabled").to(
"mongodb:myDb?database={{mongodb.testDb}}&collection={{mongodb.testCollection}}&operation=insert&dynamicity=true");
}
};
}
@RouteFixture
@Override
public void createRouteBuilder(CamelContext context) throws Exception {
context.addRoutes(createRouteBuilder());
}
}
|
MongoDbDynamicityIT
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/context/transaction/TransactionalTestExecutionListenerTests.java
|
{
"start": 12576,
"end": 12835
}
|
class ____ implements Invocable {
boolean invoked = false;
@Override
public void invoked(boolean invoked) {
this.invoked = invoked;
}
@Override
public boolean invoked() {
return this.invoked;
}
}
@Transactional
static
|
AbstractInvocable
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/NormalizedResourceEvent.java
|
{
"start": 1368,
"end": 2783
}
|
class ____ implements HistoryEvent {
private long memory;
private TaskType taskType;
/**
* Normalized request when sent to the Resource Manager.
* @param taskType the tasktype of the request.
* @param memory the normalized memory requirements.
*/
public NormalizedResourceEvent(TaskType taskType, long memory) {
this.memory = memory;
this.taskType = taskType;
}
/**
* the tasktype for the event.
* @return the tasktype for the event.
*/
public TaskType getTaskType() {
return this.taskType;
}
/**
* the normalized memory
* @return the normalized memory
*/
public long getMemory() {
return this.memory;
}
@Override
public EventType getEventType() {
return EventType.NORMALIZED_RESOURCE;
}
@Override
public Object getDatum() {
throw new UnsupportedOperationException("Not a seriable object");
}
@Override
public void setDatum(Object datum) {
throw new UnsupportedOperationException("Not a seriable object");
}
@Override
public TimelineEvent toTimelineEvent() {
TimelineEvent tEvent = new TimelineEvent();
tEvent.setId(StringUtils.toUpperCase(getEventType().name()));
tEvent.addInfo("MEMORY", "" + getMemory());
tEvent.addInfo("TASK_TYPE", getTaskType());
return tEvent;
}
@Override
public Set<TimelineMetric> getTimelineMetrics() {
return null;
}
}
|
NormalizedResourceEvent
|
java
|
apache__maven
|
its/core-it-suite/src/test/resources/mng-4660-outdated-packaged-artifact/module-b/src/test/java/org/apache/maven/it/TestCase.java
|
{
"start": 1678,
"end": 1789
}
|
class ____ {
@Test
public void testCase() {
final Example example = new Example();
}
}
|
TestCase
|
java
|
hibernate__hibernate-orm
|
hibernate-envers/src/main/java/org/hibernate/envers/internal/tools/ConcurrentReferenceHashMap.java
|
{
"start": 51777,
"end": 52304
}
|
class ____ extends AbstractCollection<V> {
@Override
public Iterator<V> iterator() {
return new ValueIterator();
}
@Override
public int size() {
return ConcurrentReferenceHashMap.this.size();
}
@Override
public boolean isEmpty() {
return ConcurrentReferenceHashMap.this.isEmpty();
}
@Override
public boolean contains(Object o) {
return ConcurrentReferenceHashMap.this.containsValue( o );
}
@Override
public void clear() {
ConcurrentReferenceHashMap.this.clear();
}
}
final
|
Values
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/bugs/_3732/Issue3732Test.java
|
{
"start": 421,
"end": 512
}
|
class ____ {
@ProcessorTest
void shouldGenerateCorrectMapper() {
}
}
|
Issue3732Test
|
java
|
mybatis__mybatis-3
|
src/main/java/org/apache/ibatis/datasource/jndi/JndiDataSourceFactory.java
|
{
"start": 1037,
"end": 2779
}
|
class ____ implements DataSourceFactory {
public static final String INITIAL_CONTEXT = "initial_context";
public static final String DATA_SOURCE = "data_source";
public static final String ENV_PREFIX = "env.";
private DataSource dataSource;
@Override
public void setProperties(Properties properties) {
try {
InitialContext initCtx;
Properties env = getEnvProperties(properties);
if (env == null) {
initCtx = new InitialContext();
} else {
initCtx = new InitialContext(env);
}
if (properties.containsKey(INITIAL_CONTEXT) && properties.containsKey(DATA_SOURCE)) {
Context ctx = (Context) initCtx.lookup(properties.getProperty(INITIAL_CONTEXT));
dataSource = (DataSource) ctx.lookup(properties.getProperty(DATA_SOURCE));
} else if (properties.containsKey(DATA_SOURCE)) {
dataSource = (DataSource) initCtx.lookup(properties.getProperty(DATA_SOURCE));
}
} catch (NamingException e) {
throw new DataSourceException("There was an error configuring JndiDataSourceTransactionPool. Cause: " + e, e);
}
}
@Override
public DataSource getDataSource() {
return dataSource;
}
private static Properties getEnvProperties(Properties allProps) {
Properties contextProperties = null;
for (Entry<Object, Object> entry : allProps.entrySet()) {
String key = (String) entry.getKey();
String value = (String) entry.getValue();
if (key.startsWith(ENV_PREFIX)) {
if (contextProperties == null) {
contextProperties = new Properties();
}
contextProperties.put(key.substring(ENV_PREFIX.length()), value);
}
}
return contextProperties;
}
}
|
JndiDataSourceFactory
|
java
|
hibernate__hibernate-orm
|
hibernate-jcache/src/test/java/org/hibernate/orm/test/jcache/InsertedDataTest.java
|
{
"start": 9334,
"end": 9795
}
|
class ____ {
private PK id;
private String name;
public CacheableEmbeddedIdItem() {
}
public CacheableEmbeddedIdItem(PK id,String name) {
this.id = id;
this.name = name;
}
@EmbeddedId
public PK getId() {
return id;
}
public void setId(PK id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
@Embeddable
public static
|
CacheableEmbeddedIdItem
|
java
|
apache__camel
|
core/camel-core-model/src/main/java/org/apache/camel/model/RouteContextRefDefinition.java
|
{
"start": 1335,
"end": 2014
}
|
class ____ {
@XmlAttribute(required = true)
private String ref;
public RouteContextRefDefinition() {
}
public RouteContextRefDefinition(String ref) {
this.ref = ref;
}
@Override
public String toString() {
return "RouteContextRef[" + getRef() + "]";
}
public String getRef() {
return ref;
}
/**
* Reference to the routes in the xml dsl
*/
public void setRef(String ref) {
this.ref = ref;
}
public List<RouteDefinition> lookupRoutes(CamelContext camelContext) {
return RouteContextRefDefinitionHelper.lookupRoutes(camelContext, ref);
}
}
|
RouteContextRefDefinition
|
java
|
spring-cloud__spring-cloud-gateway
|
spring-cloud-gateway-server-webmvc/src/test/java/org/springframework/cloud/gateway/server/mvc/predicate/PredicateDiscovererTests.java
|
{
"start": 1314,
"end": 1609
}
|
class ____ {
@Autowired
PredicateDiscoverer discoverer;
@Test
void contextLoads() {
MultiValueMap<String, OperationMethod> operations = discoverer.getOperations();
assertThat(operations).isNotEmpty();
}
@SpringBootConfiguration
@EnableAutoConfiguration
static
|
PredicateDiscovererTests
|
java
|
apache__flink
|
flink-connectors/flink-connector-files/src/test/java/org/apache/flink/connector/file/src/FileSourceTextLinesITCase.java
|
{
"start": 3047,
"end": 11764
}
|
class ____ {
private static final int PARALLELISM = 4;
private static final int SOURCE_PARALLELISM_UPPER_BOUND = 8;
@TempDir private static java.nio.file.Path tmpDir;
@RegisterExtension
private static final MiniClusterExtension MINI_CLUSTER_RESOURCE =
new MiniClusterExtension(createMiniClusterConfiguration());
// ------------------------------------------------------------------------
// test cases
// ------------------------------------------------------------------------
/** This test runs a job reading bounded input with a stream record format (text lines). */
@Test
void testBoundedTextFileSource(
@TempDir java.nio.file.Path tmpTestDir, @InjectMiniCluster MiniCluster miniCluster)
throws Exception {
testBoundedTextFileSource(tmpTestDir, FailoverType.NONE, miniCluster);
}
/**
* This test runs a job reading bounded input with a stream record format (text lines) and
* restarts TaskManager.
*/
@Test
void testBoundedTextFileSourceWithTaskManagerFailover(@TempDir java.nio.file.Path tmpTestDir)
throws Exception {
// This test will kill TM, so we run it in a new cluster to avoid affecting other tests
runTestWithNewMiniCluster(
miniCluster -> testBoundedTextFileSource(tmpTestDir, FailoverType.TM, miniCluster));
}
/**
* This test runs a job reading bounded input with a stream record format (text lines) and
* triggers JobManager failover.
*/
@Test
void testBoundedTextFileSourceWithJobManagerFailover(@TempDir java.nio.file.Path tmpTestDir)
throws Exception {
// This test will kill JM, so we run it in a new cluster to avoid affecting other tests
runTestWithNewMiniCluster(
miniCluster -> testBoundedTextFileSource(tmpTestDir, FailoverType.JM, miniCluster));
}
@Test
void testBoundedTextFileSourceWithDynamicParallelismInference(
@TempDir java.nio.file.Path tmpTestDir, @InjectMiniCluster MiniCluster miniCluster)
throws Exception {
testBoundedTextFileSource(tmpTestDir, FailoverType.NONE, miniCluster, true);
}
private void testBoundedTextFileSource(
java.nio.file.Path tmpTestDir, FailoverType failoverType, MiniCluster miniCluster)
throws Exception {
testBoundedTextFileSource(tmpTestDir, failoverType, miniCluster, false);
}
private void testBoundedTextFileSource(
java.nio.file.Path tmpTestDir,
FailoverType failoverType,
MiniCluster miniCluster,
boolean batchMode)
throws Exception {
final File testDir = tmpTestDir.toFile();
// our main test data
writeAllFiles(testDir);
// write some junk to hidden files test that common hidden file patterns are filtered by
// default
writeHiddenJunkFiles(testDir);
final FileSource<String> source =
FileSource.forRecordStreamFormat(
new TextLineInputFormat(), Path.fromLocalFile(testDir))
.build();
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
RestartStrategyUtils.configureFixedDelayRestartStrategy(env, 1, 0L);
env.setParallelism(PARALLELISM);
if (batchMode) {
env.setRuntimeMode(RuntimeExecutionMode.BATCH);
}
final DataStream<String> stream =
env.fromSource(source, WatermarkStrategy.noWatermarks(), "file-source")
.setMaxParallelism(PARALLELISM * 2);
final DataStream<String> streamFailingInTheMiddleOfReading =
RecordCounterToFail.wrapWithFailureAfter(stream, LINES.length / 2);
CloseableIterator<String> iterator = streamFailingInTheMiddleOfReading.collectAsync();
JobClient client = env.executeAsync("Bounded TextFiles Test");
final JobID jobId = client.getJobID();
RecordCounterToFail.waitToFail();
triggerFailover(failoverType, jobId, RecordCounterToFail::continueProcessing, miniCluster);
final List<String> result = new ArrayList<>();
while (iterator.hasNext()) {
result.add(iterator.next());
}
verifyResult(result);
if (batchMode) {
verifySourceParallelism(miniCluster.getExecutionGraph(jobId).get());
}
}
/**
* This test runs a job reading continuous input (files appearing over time) with a stream
* record format (text lines).
*/
@Test
void testContinuousTextFileSource(
@TempDir java.nio.file.Path tmpTestDir, @InjectMiniCluster MiniCluster miniCluster)
throws Exception {
testContinuousTextFileSource(tmpTestDir, FailoverType.NONE, miniCluster);
}
/**
* This test runs a job reading continuous input (files appearing over time) with a stream
* record format (text lines) and restarts TaskManager.
*/
@Test
void testContinuousTextFileSourceWithTaskManagerFailover(@TempDir java.nio.file.Path tmpTestDir)
throws Exception {
// This test will kill TM, so we run it in a new cluster to avoid affecting other tests
runTestWithNewMiniCluster(
miniCluster ->
testContinuousTextFileSource(tmpTestDir, FailoverType.TM, miniCluster));
}
/**
* This test runs a job reading continuous input (files appearing over time) with a stream
* record format (text lines) and triggers JobManager failover.
*/
@Test
void testContinuousTextFileSourceWithJobManagerFailover(@TempDir java.nio.file.Path tmpTestDir)
throws Exception {
// This test will kill JM, so we run it in a new cluster to avoid affecting other tests
runTestWithNewMiniCluster(
miniCluster ->
testContinuousTextFileSource(tmpTestDir, FailoverType.JM, miniCluster));
}
private void testContinuousTextFileSource(
java.nio.file.Path tmpTestDir, FailoverType type, MiniCluster miniCluster)
throws Exception {
final File testDir = tmpTestDir.toFile();
final FileSource<String> source =
FileSource.forRecordStreamFormat(
new TextLineInputFormat(), Path.fromLocalFile(testDir))
.monitorContinuously(Duration.ofMillis(5))
.build();
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(PARALLELISM);
env.enableCheckpointing(10L);
final DataStream<String> stream =
env.fromSource(source, WatermarkStrategy.noWatermarks(), "file-source");
// write one file, execute, and wait for its result
// that way we know that the application was running and the source has
// done its first chunk of work already
CloseableIterator<String> iter = stream.collectAsync();
JobClient client = env.executeAsync("jobExecutionName");
JobID jobId = client.getJobID();
final int numLinesFirst = LINES_PER_FILE[0].length;
final int numLinesAfter = LINES.length - numLinesFirst;
writeFile(testDir, 0);
final ArrayList<String> result1 = new ArrayList<>(numLinesFirst);
while (iter.hasNext()) {
result1.add(iter.next());
if (result1.size() == numLinesFirst) {
break;
}
}
// write the remaining files over time, after that collect the final result
for (int i = 1; i < LINES_PER_FILE.length; i++) {
Thread.sleep(10);
writeFile(testDir, i);
final boolean failAfterHalfOfInput = i == LINES_PER_FILE.length / 2;
if (failAfterHalfOfInput) {
triggerFailover(type, jobId, () -> {}, miniCluster);
}
}
final ArrayList<String> result2 = new ArrayList<>(numLinesAfter);
while (iter.hasNext()) {
result2.add(iter.next());
if (result2.size() == numLinesAfter) {
break;
}
}
// shut down the job, now that we have all the results we expected.
client.cancel().get();
result1.addAll(result2);
verifyResult(result1);
}
// ------------------------------------------------------------------------
// test utilities
// ------------------------------------------------------------------------
private
|
FileSourceTextLinesITCase
|
java
|
micronaut-projects__micronaut-core
|
context/src/test/groovy/io/micronaut/scheduling/exceptions/TypeSpecificHandler.java
|
{
"start": 820,
"end": 1282
}
|
class ____ implements TaskExceptionHandler<Object, InstantiationException> {
private Object bean;
private InstantiationException throwable;
@Override
public void handle(Object bean, InstantiationException throwable) {
this.bean = bean;
this.throwable = throwable;
}
public Object getBean() {
return bean;
}
public InstantiationException getThrowable() {
return throwable;
}
}
|
TypeSpecificHandler
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/sql/dialect/postgresql/visitor/PGASTVisitor.java
|
{
"start": 1033,
"end": 5698
}
|
interface ____ extends SQLASTVisitor {
default void endVisit(PGSelectQueryBlock x) {
endVisit((SQLSelectQueryBlock) x);
}
default boolean visit(PGSelectQueryBlock x) {
return visit((SQLSelectQueryBlock) x);
}
default void endVisit(PGSelectQueryBlock.FetchClause x) {
}
default boolean visit(PGSelectQueryBlock.FetchClause x) {
return true;
}
default void endVisit(PGSelectQueryBlock.ForClause x) {
}
default boolean visit(PGSelectQueryBlock.ForClause x) {
return true;
}
default void endVisit(PGDeleteStatement x) {
}
default boolean visit(PGDeleteStatement x) {
return true;
}
default void endVisit(PGInsertStatement x) {
}
default boolean visit(PGInsertStatement x) {
return true;
}
default void endVisit(PGSelectStatement x) {
endVisit((SQLSelectStatement) x);
}
default boolean visit(PGSelectStatement x) {
return visit((SQLSelectStatement) x);
}
default void endVisit(PGUpdateStatement x) {
}
default boolean visit(PGUpdateStatement x) {
return true;
}
default void endVisit(PGFunctionTableSource x) {
}
default boolean visit(PGFunctionTableSource x) {
return true;
}
default void endVisit(PGTypeCastExpr x) {
}
default boolean visit(PGTypeCastExpr x) {
return true;
}
default void endVisit(PGExtractExpr x) {
}
default boolean visit(PGExtractExpr x) {
return true;
}
default void endVisit(PGBoxExpr x) {
}
default boolean visit(PGBoxExpr x) {
return true;
}
default void endVisit(PGAttrExpr x) {
}
default boolean visit(PGAttrExpr x) {
return true;
}
default void endVisit(PGPointExpr x) {
}
default boolean visit(PGPointExpr x) {
return true;
}
default void endVisit(PGMacAddrExpr x) {
}
default boolean visit(PGMacAddrExpr x) {
return true;
}
default void endVisit(PGInetExpr x) {
}
default boolean visit(PGInetExpr x) {
return true;
}
default void endVisit(PGCidrExpr x) {
}
default boolean visit(PGCidrExpr x) {
return true;
}
default void endVisit(PGPolygonExpr x) {
}
default boolean visit(PGPolygonExpr x) {
return true;
}
default void endVisit(PGCircleExpr x) {
}
default boolean visit(PGCircleExpr x) {
return true;
}
default void endVisit(PGLineSegmentsExpr x) {
}
default boolean visit(PGLineSegmentsExpr x) {
return true;
}
default void endVisit(PGShowStatement x) {
}
default boolean visit(PGShowStatement x) {
return true;
}
default void endVisit(PGStartTransactionStatement x) {
}
default boolean visit(PGStartTransactionStatement x) {
return true;
}
default void endVisit(PGDoStatement x) {
}
default boolean visit(PGDoStatement x) {
return true;
}
default void endVisit(PGEndTransactionStatement x) {
}
default boolean visit(PGEndTransactionStatement x) {
return true;
}
default void endVisit(PGConnectToStatement x) {
}
default boolean visit(PGConnectToStatement x) {
return true;
}
default void endVisit(PGCreateSchemaStatement x) {
}
default boolean visit(PGCreateSchemaStatement x) {
return true;
}
default void endVisit(PGDropSchemaStatement x) {
}
default boolean visit(PGDropDatabaseStatement x) {
return true;
}
default void endVisit(PGDropDatabaseStatement x) {
}
default boolean visit(PGDropSchemaStatement x) {
return true;
}
default void endVisit(PGAlterSchemaStatement x) {
}
default boolean visit(PGAlterSchemaStatement x) {
return true;
}
default void endVisit(PGCharExpr x) {
endVisit((SQLCharExpr) x);
}
default boolean visit(PGCharExpr x) {
return visit((SQLCharExpr) x);
}
default void endVisit(PGAnalyzeStatement x) {
}
default boolean visit(PGAnalyzeStatement x) {
return true;
}
default void endVisit(PGVacuumStatement x) {
}
default boolean visit(PGVacuumStatement x) {
return true;
}
default boolean visit(PGCreateDatabaseStatement x) {
return true;
}
default void endVisit(PGCreateDatabaseStatement x) {
}
default void endVisit(PGAlterDatabaseStatement x) {
}
default boolean visit(PGAlterDatabaseStatement x) {
return true;
}
}
|
PGASTVisitor
|
java
|
junit-team__junit5
|
jupiter-tests/src/test/java/org/junit/jupiter/engine/descriptor/subpackage/Class2WithTestCases.java
|
{
"start": 437,
"end": 495
}
|
class ____ {
@Test
void test2() {
}
}
|
Class2WithTestCases
|
java
|
lettuce-io__lettuce-core
|
src/main/java/io/lettuce/core/HGetExArgs.java
|
{
"start": 641,
"end": 906
}
|
class ____ implements CompositeArgument {
private Long ex;
private Long exAt;
private Long px;
private Long pxAt;
private boolean persist = false;
/**
* Builder entry points for {@link HGetExArgs}.
*/
public static
|
HGetExArgs
|
java
|
lettuce-io__lettuce-core
|
src/main/java/io/lettuce/core/internal/LettuceClassUtils.java
|
{
"start": 3736,
"end": 4182
}
|
class ____ be used as fallback.
*
* @return the default ClassLoader (never <code>null</code>)
* @see java.lang.Thread#getContextClassLoader()
*/
private static ClassLoader getDefaultClassLoader() {
ClassLoader cl = null;
try {
cl = Thread.currentThread().getContextClassLoader();
} catch (Throwable ex) {
// Cannot access thread context ClassLoader - falling back to system
|
will
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/schemaupdate/SequenceReadingTest.java
|
{
"start": 1561,
"end": 2379
}
|
class ____ implements ServiceRegistryProducer {
@Override
public StandardServiceRegistry produceServiceRegistry(StandardServiceRegistryBuilder builder) {
return builder.applySetting( DIALECT, MyExtendedH2Dialect.class ).build();
}
@Test
public void testSequenceReading(DomainModelScope modelScope) {
var model = modelScope.getDomainModel();
model.orderColumns( false );
model.validate();
try {
// try to update the schema
new SchemaUpdate().execute( EnumSet.of( TargetType.DATABASE ), model );
}
finally {
try {
// clean up
new SchemaExport().drop( EnumSet.of( TargetType.DATABASE ), model );
}
catch (Exception ignore) {
}
}
}
/**
* An integral piece of the bug is Dialects which to not support sequence, so lets trick the test
*/
public static
|
SequenceReadingTest
|
java
|
spring-projects__spring-boot
|
module/spring-boot-devtools/src/main/java/org/springframework/boot/devtools/restart/server/HttpRestartServer.java
|
{
"start": 1298,
"end": 2904
}
|
class ____ {
private static final Log logger = LogFactory.getLog(HttpRestartServer.class);
private final RestartServer server;
/**
* Create a new {@link HttpRestartServer} instance.
* @param sourceDirectoryUrlFilter the source filter used to link remote directory to
* the local classpath
*/
public HttpRestartServer(SourceDirectoryUrlFilter sourceDirectoryUrlFilter) {
Assert.notNull(sourceDirectoryUrlFilter, "'sourceDirectoryUrlFilter' must not be null");
this.server = new RestartServer(sourceDirectoryUrlFilter);
}
/**
* Create a new {@link HttpRestartServer} instance.
* @param restartServer the underlying restart server
*/
public HttpRestartServer(RestartServer restartServer) {
Assert.notNull(restartServer, "'restartServer' must not be null");
this.server = restartServer;
}
/**
* Handle a server request.
* @param request the request
* @param response the response
* @throws IOException in case of I/O errors
*/
public void handle(ServerHttpRequest request, ServerHttpResponse response) throws IOException {
try {
Assert.state(request.getHeaders().getContentLength() > 0, "No content");
ObjectInputStream objectInputStream = new ObjectInputStream(request.getBody());
ClassLoaderFiles files = (ClassLoaderFiles) objectInputStream.readObject();
objectInputStream.close();
this.server.updateAndRestart(files);
response.setStatusCode(HttpStatus.OK);
}
catch (Exception ex) {
logger.warn("Unable to handler restart server HTTP request", ex);
response.setStatusCode(HttpStatus.INTERNAL_SERVER_ERROR);
}
}
}
|
HttpRestartServer
|
java
|
elastic__elasticsearch
|
test/framework/src/main/java/org/elasticsearch/datageneration/fields/leaf/Wrappers.java
|
{
"start": 702,
"end": 1818
}
|
class ____ {
/**
* Applies default wrappers for raw values - adds nulls and wraps values in arrays.
* @return
*/
public static Supplier<Object> defaults(Supplier<Object> rawValues, DataSource dataSource) {
var nulls = dataSource.get(new DataSourceRequest.NullWrapper());
var arrays = dataSource.get(new DataSourceRequest.ArrayWrapper());
return arrays.wrapper().compose(nulls.wrapper()).apply(rawValues::get);
}
/**
* Applies default wrappers for raw values and also adds malformed values.
* @return
*/
public static Supplier<Object> defaultsWithMalformed(
Supplier<Object> rawValues,
Supplier<Object> malformedValues,
DataSource dataSource
) {
var nulls = dataSource.get(new DataSourceRequest.NullWrapper());
var malformed = dataSource.get(new DataSourceRequest.MalformedWrapper(malformedValues));
var arrays = dataSource.get(new DataSourceRequest.ArrayWrapper());
return arrays.wrapper().compose(nulls.wrapper()).compose(malformed.wrapper()).apply(rawValues::get);
}
}
|
Wrappers
|
java
|
apache__flink
|
flink-streaming-java/src/main/java/org/apache/flink/streaming/api/functions/sink/filesystem/Buckets.java
|
{
"start": 2016,
"end": 14792
}
|
class ____<IN, BucketID> {
private static final Logger LOG = LoggerFactory.getLogger(Buckets.class);
// ------------------------ configuration fields --------------------------
private final Path basePath;
private final BucketFactory<IN, BucketID> bucketFactory;
private final BucketAssigner<IN, BucketID> bucketAssigner;
private final BucketWriter<IN, BucketID> bucketWriter;
private final RollingPolicy<IN, BucketID> rollingPolicy;
// --------------------------- runtime fields -----------------------------
private final int subtaskIndex;
private final Buckets.BucketerContext bucketerContext;
private final Map<BucketID, Bucket<IN, BucketID>> activeBuckets;
private long maxPartCounter;
private final OutputFileConfig outputFileConfig;
@Nullable private BucketLifeCycleListener<IN, BucketID> bucketLifeCycleListener;
@Nullable private FileLifeCycleListener<BucketID> fileLifeCycleListener;
// --------------------------- State Related Fields -----------------------------
private final BucketStateSerializer<BucketID> bucketStateSerializer;
/**
* A constructor creating a new empty bucket manager.
*
* @param basePath The base path for our buckets.
* @param bucketAssigner The {@link BucketAssigner} provided by the user.
* @param bucketFactory The {@link BucketFactory} to be used to create buckets.
* @param bucketWriter The {@link BucketWriter} to be used when writing data.
* @param rollingPolicy The {@link RollingPolicy} as specified by the user.
*/
public Buckets(
final Path basePath,
final BucketAssigner<IN, BucketID> bucketAssigner,
final BucketFactory<IN, BucketID> bucketFactory,
final BucketWriter<IN, BucketID> bucketWriter,
final RollingPolicy<IN, BucketID> rollingPolicy,
final int subtaskIndex,
final OutputFileConfig outputFileConfig) {
this.basePath = Preconditions.checkNotNull(basePath);
this.bucketAssigner = Preconditions.checkNotNull(bucketAssigner);
this.bucketFactory = Preconditions.checkNotNull(bucketFactory);
this.bucketWriter = Preconditions.checkNotNull(bucketWriter);
this.rollingPolicy = Preconditions.checkNotNull(rollingPolicy);
this.subtaskIndex = subtaskIndex;
this.outputFileConfig = Preconditions.checkNotNull(outputFileConfig);
this.activeBuckets = new HashMap<>();
this.bucketerContext = new Buckets.BucketerContext();
this.bucketStateSerializer =
new BucketStateSerializer<>(
bucketWriter.getProperties().getInProgressFileRecoverableSerializer(),
bucketWriter.getProperties().getPendingFileRecoverableSerializer(),
bucketAssigner.getSerializer());
this.maxPartCounter = 0L;
}
public void setBucketLifeCycleListener(
BucketLifeCycleListener<IN, BucketID> bucketLifeCycleListener) {
this.bucketLifeCycleListener = Preconditions.checkNotNull(bucketLifeCycleListener);
}
public void setFileLifeCycleListener(FileLifeCycleListener<BucketID> fileLifeCycleListener) {
this.fileLifeCycleListener = Preconditions.checkNotNull(fileLifeCycleListener);
}
/**
* Initializes the state after recovery from a failure.
*
* <p>During this process:
*
* <ol>
* <li>we set the initial value for part counter to the maximum value used before across all
* tasks and buckets. This guarantees that we do not overwrite valid data,
* <li>we commit any pending files for previous checkpoints (previous to the last successful
* one from which we restore),
* <li>we resume writing to the previous in-progress file of each bucket, and
* <li>if we receive multiple states for the same bucket, we merge them.
* </ol>
*
* @param bucketStates the state holding recovered state about active buckets.
* @param partCounterState the state holding the max previously used part counters.
* @throws Exception if anything goes wrong during retrieving the state or restoring/committing
* of any in-progress/pending part files
*/
public void initializeState(
final ListState<byte[]> bucketStates, final ListState<Long> partCounterState)
throws Exception {
initializePartCounter(partCounterState);
LOG.info(
"Subtask {} initializing its state (max part counter={}).",
subtaskIndex,
maxPartCounter);
initializeActiveBuckets(bucketStates);
}
private void initializePartCounter(final ListState<Long> partCounterState) throws Exception {
long maxCounter = 0L;
for (long partCounter : partCounterState.get()) {
maxCounter = Math.max(partCounter, maxCounter);
}
maxPartCounter = maxCounter;
}
private void initializeActiveBuckets(final ListState<byte[]> bucketStates) throws Exception {
for (byte[] serializedRecoveredState : bucketStates.get()) {
final BucketState<BucketID> recoveredState =
SimpleVersionedSerialization.readVersionAndDeSerialize(
bucketStateSerializer, serializedRecoveredState);
handleRestoredBucketState(recoveredState);
}
}
private void handleRestoredBucketState(final BucketState<BucketID> recoveredState)
throws Exception {
final BucketID bucketId = recoveredState.getBucketId();
if (LOG.isDebugEnabled()) {
LOG.debug("Subtask {} restoring: {}", subtaskIndex, recoveredState);
}
final Bucket<IN, BucketID> restoredBucket =
bucketFactory.restoreBucket(
subtaskIndex,
maxPartCounter,
bucketWriter,
rollingPolicy,
recoveredState,
fileLifeCycleListener,
outputFileConfig);
updateActiveBucketId(bucketId, restoredBucket);
}
private void updateActiveBucketId(
final BucketID bucketId, final Bucket<IN, BucketID> restoredBucket) throws IOException {
if (!restoredBucket.isActive()) {
notifyBucketInactive(restoredBucket);
return;
}
final Bucket<IN, BucketID> bucket = activeBuckets.get(bucketId);
if (bucket != null) {
bucket.merge(restoredBucket);
} else {
activeBuckets.put(bucketId, restoredBucket);
}
}
public void commitUpToCheckpoint(final long checkpointId) throws IOException {
final Iterator<Map.Entry<BucketID, Bucket<IN, BucketID>>> activeBucketIt =
activeBuckets.entrySet().iterator();
LOG.info(
"Subtask {} received completion notification for checkpoint with id={}.",
subtaskIndex,
checkpointId);
while (activeBucketIt.hasNext()) {
final Bucket<IN, BucketID> bucket = activeBucketIt.next().getValue();
bucket.onSuccessfulCompletionOfCheckpoint(checkpointId);
if (!bucket.isActive()) {
// We've dealt with all the pending files and the writer for this bucket is not
// currently open.
// Therefore this bucket is currently inactive and we can remove it from our state.
activeBucketIt.remove();
notifyBucketInactive(bucket);
}
}
}
public void snapshotState(
final long checkpointId,
final ListState<byte[]> bucketStatesContainer,
final ListState<Long> partCounterStateContainer)
throws Exception {
Preconditions.checkState(
bucketWriter != null && bucketStateSerializer != null,
"sink has not been initialized");
LOG.info(
"Subtask {} checkpointing for checkpoint with id={} (max part counter={}).",
subtaskIndex,
checkpointId,
maxPartCounter);
bucketStatesContainer.clear();
snapshotActiveBuckets(checkpointId, bucketStatesContainer);
partCounterStateContainer.update(Collections.singletonList(maxPartCounter));
}
private void snapshotActiveBuckets(
final long checkpointId, final ListState<byte[]> bucketStatesContainer)
throws Exception {
for (Bucket<IN, BucketID> bucket : activeBuckets.values()) {
final BucketState<BucketID> bucketState = bucket.onReceptionOfCheckpoint(checkpointId);
final byte[] serializedBucketState =
SimpleVersionedSerialization.writeVersionAndSerialize(
bucketStateSerializer, bucketState);
bucketStatesContainer.add(serializedBucketState);
if (LOG.isDebugEnabled()) {
LOG.debug("Subtask {} checkpointing: {}", subtaskIndex, bucketState);
}
}
}
@VisibleForTesting
public Bucket<IN, BucketID> onElement(final IN value, final SinkFunction.Context context)
throws Exception {
return onElement(
value,
context.currentProcessingTime(),
context.timestamp(),
context.currentWatermark());
}
public Bucket<IN, BucketID> onElement(
final IN value,
final long currentProcessingTime,
@Nullable final Long elementTimestamp,
final long currentWatermark)
throws Exception {
// setting the values in the bucketer context
bucketerContext.update(elementTimestamp, currentWatermark, currentProcessingTime);
final BucketID bucketId = bucketAssigner.getBucketId(value, bucketerContext);
final Bucket<IN, BucketID> bucket = getOrCreateBucketForBucketId(bucketId);
bucket.write(value, currentProcessingTime);
// we update the global max counter here because as buckets become inactive and
// get removed from the list of active buckets, at the time when we want to create
// another part file for the bucket, if we start from 0 we may overwrite previous parts.
this.maxPartCounter = Math.max(maxPartCounter, bucket.getPartCounter());
return bucket;
}
private Bucket<IN, BucketID> getOrCreateBucketForBucketId(final BucketID bucketId)
throws IOException {
Bucket<IN, BucketID> bucket = activeBuckets.get(bucketId);
if (bucket == null) {
final Path bucketPath = assembleBucketPath(bucketId);
bucket =
bucketFactory.getNewBucket(
subtaskIndex,
bucketId,
bucketPath,
maxPartCounter,
bucketWriter,
rollingPolicy,
fileLifeCycleListener,
outputFileConfig);
activeBuckets.put(bucketId, bucket);
notifyBucketCreate(bucket);
}
return bucket;
}
public void onProcessingTime(long timestamp) throws Exception {
for (Bucket<IN, BucketID> bucket : activeBuckets.values()) {
bucket.onProcessingTime(timestamp);
}
}
public void closePartFileForBucket(BucketID bucketID) throws Exception {
Bucket<IN, BucketID> bucket = activeBuckets.get(bucketID);
if (bucket != null) {
bucket.closePartFile();
}
}
public void close() {
if (activeBuckets != null) {
activeBuckets.values().forEach(Bucket::disposePartFile);
}
}
private Path assembleBucketPath(BucketID bucketId) {
final String child = bucketId.toString();
if ("".equals(child)) {
return basePath;
}
return new Path(basePath, child);
}
private void notifyBucketCreate(Bucket<IN, BucketID> bucket) {
if (bucketLifeCycleListener != null) {
bucketLifeCycleListener.bucketCreated(bucket);
}
}
private void notifyBucketInactive(Bucket<IN, BucketID> bucket) {
if (bucketLifeCycleListener != null) {
bucketLifeCycleListener.bucketInactive(bucket);
}
}
/**
* The {@link BucketAssigner.Context} exposed to the {@link BucketAssigner#getBucketId(Object,
* BucketAssigner.Context)} whenever a new incoming element arrives.
*/
private static final
|
Buckets
|
java
|
google__auto
|
value/src/main/java/com/google/auto/value/processor/AnnotatedTypeMirror.java
|
{
"start": 1388,
"end": 1453
}
|
interface ____<T> {
* @Nullable T thing();
* }
* abstract
|
Parent
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/pool/TestPoolPreparedStatement.java
|
{
"start": 958,
"end": 2923
}
|
class ____ extends TestCase {
private MockDriver driver;
private DruidDataSource dataSource;
protected void setUp() throws Exception {
DruidDataSourceStatManager.clear();
driver = new MockDriver();
dataSource = new DruidDataSource();
dataSource.setUrl("jdbc:mock:xxx");
dataSource.setDriver(driver);
dataSource.setInitialSize(1);
dataSource.setMaxActive(2);
dataSource.setMaxIdle(2);
dataSource.setMinIdle(1);
dataSource.setMinEvictableIdleTimeMillis(300 * 1000); // 300 / 10
dataSource.setTimeBetweenEvictionRunsMillis(10); // 180 / 10
dataSource.setTestWhileIdle(true);
dataSource.setTestOnBorrow(false);
dataSource.setValidationQuery("SELECT 1");
dataSource.setFilters("stat");
dataSource.setPoolPreparedStatements(true);
// ((StatFilter) dataSource.getProxyFilters().get(0)).setMaxSqlStatCount(100);
}
protected void tearDown() throws Exception {
dataSource.close();
assertEquals(0, DruidDataSourceStatManager.getInstance().getDataSourceList().size());
}
public void test_removeAbandoned() throws Exception {
MockPreparedStatement raw;
{
Connection conn = dataSource.getConnection();
PreparedStatement stmt = conn.prepareStatement("SELECT ?");
raw = stmt.unwrap(MockPreparedStatement.class);
stmt.execute();
stmt.close();
conn.close();
}
{
Connection conn = dataSource.getConnection();
PreparedStatement stmt = conn.prepareStatement("SELECT ?");
assertTrue(raw == stmt.unwrap(MockPreparedStatement.class));
stmt.execute();
stmt.close();
conn.close();
}
assertEquals(0, dataSource.getActiveCount());
assertEquals(1, dataSource.getPoolingCount());
}
}
|
TestPoolPreparedStatement
|
java
|
apache__kafka
|
tools/src/main/java/org/apache/kafka/tools/consumer/ConsoleShareConsumer.java
|
{
"start": 6304,
"end": 7684
}
|
class ____ {
final String topic;
final ShareConsumer<byte[], byte[]> consumer;
final long timeoutMs;
final Time time = Time.SYSTEM;
Iterator<ConsumerRecord<byte[], byte[]>> recordIter = Collections.emptyIterator();
public ConsumerWrapper(String topic,
ShareConsumer<byte[], byte[]> consumer,
long timeoutMs) {
this.topic = topic;
this.consumer = consumer;
this.timeoutMs = timeoutMs;
consumer.subscribe(List.of(topic));
}
ConsumerRecord<byte[], byte[]> receive() {
long startTimeMs = time.milliseconds();
while (!recordIter.hasNext()) {
recordIter = consumer.poll(Duration.ofMillis(timeoutMs)).iterator();
if (!recordIter.hasNext() && (time.milliseconds() - startTimeMs > timeoutMs)) {
throw new TimeoutException();
}
}
return recordIter.next();
}
void acknowledge(ConsumerRecord<byte[], byte[]> record, AcknowledgeType acknowledgeType) {
consumer.acknowledge(record, acknowledgeType);
}
void wakeup() {
this.consumer.wakeup();
}
void cleanup() {
this.consumer.close();
}
}
}
|
ConsumerWrapper
|
java
|
apache__thrift
|
lib/java/src/test/java/org/apache/thrift/transport/sasl/TestSaslNegotiationFrameReader.java
|
{
"start": 1275,
"end": 2955
}
|
class ____ {
@Test
public void testRead() throws TTransportException {
TMemoryInputTransport transport = new TMemoryInputTransport();
SaslNegotiationFrameReader negotiationReader = new SaslNegotiationFrameReader();
// No bytes received
negotiationReader.read(transport);
assertFalse(negotiationReader.isComplete(), "No bytes received");
assertFalse(negotiationReader.getHeader().isComplete(), "No bytes received");
// Read header
ByteBuffer buffer = ByteBuffer.allocate(5);
buffer.put(0, NegotiationStatus.OK.getValue());
buffer.putInt(1, 10);
transport.reset(buffer.array());
negotiationReader.read(transport);
assertFalse(negotiationReader.isComplete(), "Only header is complete");
assertTrue(negotiationReader.getHeader().isComplete(), "Header should be complete");
assertEquals(10, negotiationReader.getHeader().payloadSize(), "Payload size should be 10");
// Read payload
transport.reset(new byte[20]);
negotiationReader.read(transport);
assertTrue(negotiationReader.isComplete(), "Reader should be complete");
assertEquals(10, negotiationReader.getPayload().length, "Payload length should be 10");
}
@Test
public void testReadInvalidNegotiationStatus() throws TTransportException {
byte[] bytes = new byte[5];
// Invalid status byte.
bytes[0] = -1;
TMemoryInputTransport transport = new TMemoryInputTransport(bytes);
SaslNegotiationFrameReader negotiationReader = new SaslNegotiationFrameReader();
assertThrows(
TSaslNegotiationException.class,
() -> {
negotiationReader.read(transport);
});
}
}
|
TestSaslNegotiationFrameReader
|
java
|
spring-projects__spring-framework
|
spring-test/src/main/java/org/springframework/test/web/reactive/server/JsonEncoderDecoder.java
|
{
"start": 4518,
"end": 5457
}
|
class ____ implements JsonConverterDelegate {
@SuppressWarnings("unchecked")
@Override
public <T> T read(String content, ResolvableType targetType) {
byte[] bytes = content.getBytes(StandardCharsets.UTF_8);
DataBuffer buffer = DefaultDataBufferFactory.sharedInstance.wrap(bytes);
Object value = decoder().decode(buffer, targetType, MediaType.APPLICATION_JSON, null);
Assert.state(value != null, () -> "Could not decode JSON content: " + content);
return (T) value;
}
@SuppressWarnings("unchecked")
@Override
public <T> T map(Object value, ResolvableType targetType) {
DataBuffer buffer = ((Encoder<T>) encoder()).encodeValue((T) value,
DefaultDataBufferFactory.sharedInstance, targetType, MimeTypeUtils.APPLICATION_JSON, null);
try {
return read(buffer.toString(StandardCharsets.UTF_8), targetType);
}
finally {
DataBufferUtils.release(buffer);
}
}
}
}
|
CodecsJsonConverterDelegate
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/batch/OverAggregateBatchRestoreTest.java
|
{
"start": 1231,
"end": 1918
}
|
class ____ extends BatchRestoreTestBase {
public OverAggregateBatchRestoreTest() {
super(BatchExecOverAggregate.class);
}
@Override
public List<TableTestProgram> programs() {
return Arrays.asList(
// These tests fail due to FLINK-25802
// OverAggregateTestPrograms.OVER_AGGREGATE_TIME_BOUNDED_PARTITIONED_ROWS,
// OverAggregateTestPrograms.OVER_AGGREGATE_TIME_BOUNDED_NON_PARTITIONED_ROWS
OverAggregateTestPrograms.OVER_AGGREGATE_UNBOUNDED_PARTITIONED_ROWS,
OverAggregateTestPrograms.OVER_AGGREGATE_ROW_BOUNDED_PARTITIONED_PRECEDING_ROWS);
}
}
|
OverAggregateBatchRestoreTest
|
java
|
apache__spark
|
sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/VectorizedDeltaLengthByteArrayReader.java
|
{
"start": 1389,
"end": 3212
}
|
class ____ extends VectorizedReaderBase implements
VectorizedValuesReader {
private final VectorizedDeltaBinaryPackedReader lengthReader;
private ByteBufferInputStream in;
private WritableColumnVector lengthsVector;
private int currentRow = 0;
VectorizedDeltaLengthByteArrayReader() {
lengthReader = new VectorizedDeltaBinaryPackedReader();
}
@Override
public void initFromPage(int valueCount, ByteBufferInputStream in) throws IOException {
lengthsVector = new OnHeapColumnVector(valueCount, IntegerType);
lengthReader.initFromPage(valueCount, in);
lengthReader.readIntegers(lengthReader.getTotalValueCount(), lengthsVector, 0);
this.in = in.remainingStream();
}
@Override
public void readBinary(int total, WritableColumnVector c, int rowId) {
ByteBuffer buffer;
ByteBufferOutputWriter outputWriter = ByteBufferOutputWriter::writeArrayByteBuffer;
int length;
for (int i = 0; i < total; i++) {
length = lengthsVector.getInt(currentRow + i);
try {
buffer = in.slice(length);
} catch (EOFException e) {
throw new ParquetDecodingException("Failed to read " + length + " bytes");
}
outputWriter.write(c, rowId + i, buffer, length);
}
currentRow += total;
}
public ByteBuffer getBytes(int rowId) {
int length = lengthsVector.getInt(rowId);
try {
return in.slice(length);
} catch (EOFException e) {
throw new ParquetDecodingException("Failed to read " + length + " bytes");
}
}
@Override
public void skipBinary(int total) {
for (int i = 0; i < total; i++) {
int remaining = lengthsVector.getInt(currentRow + i);
while (remaining > 0) {
remaining -= in.skip(remaining);
}
}
currentRow += total;
}
}
|
VectorizedDeltaLengthByteArrayReader
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/converted/converter/generics/ParameterizedAttributeConverterParameterTypeTest.java
|
{
"start": 3647,
"end": 4481
}
|
class ____ implements AttributeConverter<List<Integer>,String> {
@Override
public String convertToDatabaseColumn(List<Integer> attribute) {
if ( attribute == null || attribute.isEmpty() ) {
return null;
}
else {
return StringHelper.join( ", ", attribute.iterator() );
}
}
@Override
public List<Integer> convertToEntityAttribute(String dbData) {
if ( dbData == null ) {
return null;
}
dbData = dbData.trim();
if ( dbData.length() == 0 ) {
return null;
}
final List<Integer> integers = new ArrayList<Integer>();
final StringTokenizer tokens = new StringTokenizer( dbData, "," );
while ( tokens.hasMoreTokens() ) {
integers.add( Integer.valueOf( tokens.nextToken() ) );
}
return integers;
}
}
@Converter( autoApply = true )
public static
|
IntegerListConverter
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adaptivebatch/ExecutionPlanSchedulingContext.java
|
{
"start": 1148,
"end": 2843
}
|
interface ____ {
/**
* Retrieves the parallelism of consumers connected to the specified intermediate data set.
*
* @param executionJobVertexParallelismRetriever A function that retrieves the parallelism of a
* job vertex.
* @param intermediateDataSet The intermediate data set whose consumer parallelism is queried.
* @return The parallelism of the consumers.
*/
int getConsumersParallelism(
Function<JobVertexID, Integer> executionJobVertexParallelismRetriever,
IntermediateDataSet intermediateDataSet);
/**
* Retrieves the maximum parallelism of consumers connected to the specified intermediate data
* set.
*
* @param executionJobVertexMaxParallelismRetriever A function that retrieves the maximum
* parallelism of a job vertex.
* @param intermediateDataSet The intermediate data set whose consumer maximum parallelism is
* queried.
* @return The maximum parallelism of the consumers.
*/
int getConsumersMaxParallelism(
Function<JobVertexID, Integer> executionJobVertexMaxParallelismRetriever,
IntermediateDataSet intermediateDataSet);
/**
* Retrieves the count of pending operators waiting to be transferred to job vertices.
*
* @return the number of pending operators.
*/
int getPendingOperatorCount();
/**
* Retrieves the JSON representation of the stream graph for the original job.
*
* @return the JSON representation of the stream graph, or null if the stream graph is not
* available.
*/
@Nullable
String getStreamGraphJson();
}
|
ExecutionPlanSchedulingContext
|
java
|
apache__camel
|
components/camel-salesforce/camel-salesforce-component/src/main/java/org/apache/camel/component/salesforce/internal/dto/LoginToken.java
|
{
"start": 1068,
"end": 2734
}
|
class ____ {
private String accessToken;
private String instanceUrl;
private String id;
private String signature;
private String issuedAt;
private String tokenType;
private String isReadOnly;
@JsonProperty("access_token")
public String getAccessToken() {
return accessToken;
}
@JsonProperty("access_token")
public void setAccessToken(String accessToken) {
this.accessToken = accessToken;
}
@JsonProperty("instance_url")
public String getInstanceUrl() {
return instanceUrl;
}
@JsonProperty("instance_url")
public void setInstanceUrl(String instanceUrl) {
this.instanceUrl = instanceUrl;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getSignature() {
return signature;
}
public void setSignature(String signature) {
this.signature = signature;
}
@JsonProperty("issued_at")
public String getIssuedAt() {
return issuedAt;
}
@JsonProperty("issued_at")
public void setIssuedAt(String issuedAt) {
this.issuedAt = issuedAt;
}
@JsonProperty("token_type")
public String getTokenType() {
return tokenType;
}
@JsonProperty("token_type")
public void setTokenType(String tokenType) {
this.tokenType = tokenType;
}
@JsonProperty("is_readonly")
public String getIsReadOnly() {
return isReadOnly;
}
@JsonProperty("is_readonly")
public void setIsReadOnly(String isReadOnly) {
this.isReadOnly = isReadOnly;
}
}
|
LoginToken
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/component/instance/ComponentInstance.java
|
{
"start": 42248,
"end": 45169
}
|
class ____ implements Runnable {
private ContainerId containerId;
private NodeId nodeId;
private NMClient nmClient;
private ComponentInstance instance;
LocalizationStatusRetriever(ServiceScheduler scheduler,
ContainerId containerId, ComponentInstance instance) {
this.nmClient = scheduler.getNmClient().getClient();
this.containerId = containerId;
this.instance = instance;
this.nodeId = instance.getNodeId();
}
@Override
public void run() {
List<org.apache.hadoop.yarn.api.records.LocalizationStatus>
statusesFromNM = null;
try {
statusesFromNM = nmClient.getLocalizationStatuses(containerId,
nodeId);
} catch (YarnException | IOException e) {
LOG.error("{} Failed to get localization statuses for {} {} ",
instance.compInstanceId, nodeId, containerId, e);
}
if (statusesFromNM != null && !statusesFromNM.isEmpty()) {
instance.updateLocalizationStatuses(statusesFromNM);
}
}
}
private void initializeLocalizationStatusRetriever(
ContainerId containerId) {
LOG.info("{} retrieve localization statuses", compInstanceId);
lclizationRetrieverFuture = scheduler.executorService.scheduleAtFixedRate(
new LocalizationStatusRetriever(scheduler, containerId, this),
0, 1, TimeUnit.SECONDS
);
}
private void cancelLclRetriever() {
if (lclizationRetrieverFuture != null &&
!lclizationRetrieverFuture.isDone()) {
LOG.info("{} cancelling localization retriever", compInstanceId);
lclizationRetrieverFuture.cancel(true);
}
}
@VisibleForTesting
boolean isLclRetrieverActive() {
return lclizationRetrieverFuture != null &&
!lclizationRetrieverFuture.isCancelled()
&& !lclizationRetrieverFuture.isDone();
}
public String getHostname() {
return getCompInstanceName() + getComponent().getHostnameSuffix();
}
@Override
public int compareTo(ComponentInstance to) {
return getCompInstanceId().compareTo(to.getCompInstanceId());
}
@Override public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
ComponentInstance instance = (ComponentInstance) o;
if (containerStartedTime != instance.containerStartedTime)
return false;
return compInstanceId.equals(instance.compInstanceId);
}
@Override public int hashCode() {
int result = compInstanceId.hashCode();
result = 31 * result + (int) (containerStartedTime ^ (containerStartedTime
>>> 32));
return result;
}
/**
* Returns container spec.
*/
public org.apache.hadoop.yarn.service.api.records
.Container getContainerSpec() {
readLock.lock();
try {
return containerSpec;
} finally {
readLock.unlock();
}
}
}
|
LocalizationStatusRetriever
|
java
|
reactor__reactor-core
|
reactor-core/src/main/java/reactor/core/publisher/FluxCreate.java
|
{
"start": 1659,
"end": 1728
}
|
class ____<T> extends Flux<T> implements SourceProducer<T> {
|
FluxCreate
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/MultibindingTest.java
|
{
"start": 22990,
"end": 23466
}
|
class ____ @Inject constructor(set: Set<Provider<MyInterface>>)");
Source parentModule =
CompilerTests.kotlinSource(
"test.ParentModule.kt",
"@file:Suppress(\"INLINE_FROM_HIGHER_PLATFORM\")", // Required to use TODO()
"package test",
"",
"import dagger.Module",
"import dagger.Provides",
"import dagger.multibindings.IntoSet",
"",
"@Module",
"
|
Usage
|
java
|
elastic__elasticsearch
|
distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/RemoveSettingKeyStoreCommand.java
|
{
"start": 884,
"end": 1934
}
|
class ____ extends BaseKeyStoreCommand {
private final OptionSpec<String> arguments;
RemoveSettingKeyStoreCommand() {
super("Remove settings from the keystore", true);
arguments = parser.nonOptions("setting names");
}
@Override
protected void executeCommand(Terminal terminal, OptionSet options, Environment env) throws Exception {
List<String> settings = arguments.values(options);
if (settings.isEmpty()) {
throw new UserException(ExitCodes.USAGE, "Must supply at least one setting to remove");
}
final KeyStoreWrapper keyStore = getKeyStore();
for (String setting : arguments.values(options)) {
if (keyStore.getSettingNames().contains(setting) == false) {
throw new UserException(ExitCodes.CONFIG, "Setting [" + setting + "] does not exist in the keystore.");
}
keyStore.remove(setting);
}
keyStore.save(env.configDir(), getKeyStorePassword().getChars());
}
}
|
RemoveSettingKeyStoreCommand
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/cluster/routing/GlobalRoutingTableTests.java
|
{
"start": 2918,
"end": 20739
}
|
class ____ extends GlobalRoutingTable {
public GlobalRoutingTableWithEquals(long version, ImmutableOpenMap<ProjectId, RoutingTable> routingTable) {
super(routingTable);
}
public GlobalRoutingTableWithEquals(GlobalRoutingTable other) {
super(other.routingTables());
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o instanceof GlobalRoutingTable that) {
return GlobalRoutingTableWithEquals.equals(this, that);
}
return false;
}
static boolean equals(GlobalRoutingTable left, GlobalRoutingTable right) {
if (left.size() != right.size()) {
return false;
}
if (left.routingTables().keySet().containsAll(right.routingTables().keySet()) == false) {
return false;
}
return left.routingTables()
.keySet()
.stream()
.allMatch(projectId -> equals(left.routingTable(projectId), right.routingTable(projectId)));
}
static boolean equals(RoutingTable left, RoutingTable right) {
return Objects.equals(left.indicesRouting(), right.indicesRouting());
}
@Override
public int hashCode() {
int result = 0;
// This is not pretty, but it's necessary because ImmutableOpenMap does not guarantee that the iteration order is identical
// for separate instances with the same elements
final var iterator = routingTables().entrySet().stream().sorted(Comparator.comparing(e -> e.getKey().id())).iterator();
while (iterator.hasNext()) {
final Map.Entry<ProjectId, RoutingTable> entry = iterator.next();
result = 31 * result + Objects.hash(entry.getKey(), entry.getValue().indicesRouting());
}
return result;
}
@Override
public Diff<GlobalRoutingTable> diff(GlobalRoutingTable previousState) {
return super.diff(previousState);
}
@Override
public String toString() {
return "<test:" + super.toString() + ">";
}
}
public void testNonEmptyDiff() throws Exception {
DiffableTestUtils.testDiffableSerialization(
this::createTestInstance,
this::mutateInstance,
getNamedWriteableRegistry(),
instanceReader(),
GlobalRoutingTable::readDiffFrom,
null,
GlobalRoutingTableWithEquals::equals
);
}
public void testEmptyDiff() throws Exception {
DiffableTestUtils.testDiffableSerialization(
this::createTestInstance,
Function.identity(),
getNamedWriteableRegistry(),
instanceReader(),
GlobalRoutingTable::readDiffFrom,
null,
GlobalRoutingTableWithEquals::equals
);
}
public final void testDiffSerializationPreMultiProject() throws IOException {
// BWC serialization only works for a routing table with a single project
final Function<GlobalRoutingTable, GlobalRoutingTable> mutator = instance -> new GlobalRoutingTable(
ImmutableOpenMap.builder(transformValues(instance.routingTables(), this::mutate)).build()
);
DiffableTestUtils.testDiffableSerialization(
() -> testRoutingTable(1),
mutator,
getNamedWriteableRegistry(),
instanceReader(),
GlobalRoutingTable::readDiffFrom,
PRE_MULTI_PROJECT_TRANSPORT_VERSION,
(original, reconstructed) -> {
// The round-trip will lose the version of the global table and replace it with the version from the inner routing table
return GlobalRoutingTableWithEquals.equals(original.getRoutingTable(), reconstructed.getRoutingTable());
}
);
}
public void testHasSameIndexRouting() {
final GlobalRoutingTable original = randomValueOtherThanMany(
grt -> grt.totalIndexCount() == 0, // The mutation below assume that there is at least 1 index
() -> this.testRoutingTable(randomIntBetween(1, 5))
);
// Exactly the same projects => same routing
GlobalRoutingTable updated = new GlobalRoutingTable(original.routingTables());
assertTrue(original.hasSameIndexRouting(updated));
assertTrue(updated.hasSameIndexRouting(original));
// Updated projects but with same routing => same routing
updated = new GlobalRoutingTable(
ImmutableOpenMap.builder(transformValues(original.routingTables(), rt -> RoutingTable.builder(rt).build())).build()
);
assertTrue(original.hasSameIndexRouting(updated));
assertTrue(updated.hasSameIndexRouting(original));
// Reconstructed index map (with same elements) => different routing
updated = new GlobalRoutingTable(ImmutableOpenMap.builder(transformValues(original.routingTables(), rt -> {
final RoutingTable.Builder builder = RoutingTable.builder();
rt.indicesRouting().values().forEach(builder::add);
return builder.build();
})).build());
assertFalse(original.hasSameIndexRouting(updated));
assertFalse(updated.hasSameIndexRouting(original));
// Mutated routing table => different routing
updated = new GlobalRoutingTable(mutate(original.routingTables()));
assertFalse(original.hasSameIndexRouting(updated));
assertFalse(updated.hasSameIndexRouting(original));
}
public void testInitializeProjects() {
final GlobalRoutingTableWithEquals table1 = this.testRoutingTable(randomIntBetween(3, 5));
assertThat(table1.initializeProjects(Set.of()), sameInstance(table1));
assertThat(table1.initializeProjects(Set.copyOf(randomSubsetOf(table1.routingTables().keySet()))), sameInstance(table1));
Set<ProjectId> addition = randomSet(
1,
5,
() -> randomValueOtherThanMany(table1.routingTables()::containsKey, ESTestCase::randomUniqueProjectId)
);
var table2 = table1.initializeProjects(addition);
assertThat(table2, not(sameInstance(table1)));
assertThat(table2.size(), equalTo(table1.size() + addition.size()));
for (var p : table1.routingTables().keySet()) {
assertThat(table2.routingTable(p), sameInstance(table1.routingTable(p)));
}
for (var p : addition) {
assertThat(table2.routingTable(p), sameInstance(RoutingTable.EMPTY_ROUTING_TABLE));
}
}
public void testBuilderFromEmpty() {
final int numberOfProjects = randomIntBetween(1, 10);
final ProjectId[] projectIds = randomArray(numberOfProjects, numberOfProjects, ProjectId[]::new, ESTestCase::randomUniqueProjectId);
final Integer[] projectIndexCount = randomArray(numberOfProjects, numberOfProjects, Integer[]::new, () -> randomIntBetween(0, 12));
final GlobalRoutingTable.Builder builder = GlobalRoutingTable.builder();
for (int i = 0; i < numberOfProjects; i++) {
builder.put(projectIds[i], addIndices(projectIndexCount[i], RoutingTable.builder()));
}
final GlobalRoutingTable builtRoutingTable = builder.build();
assertThat(builtRoutingTable.size(), equalTo(numberOfProjects));
assertThat(builtRoutingTable.routingTables().keySet(), containsInAnyOrder(projectIds));
for (int i = 0; i < numberOfProjects; i++) {
final ProjectId projectId = projectIds[i];
assertThat(builtRoutingTable.routingTables(), hasKey(projectId));
final RoutingTable projectRoutingTable = builtRoutingTable.routingTable(projectId);
assertThat(projectRoutingTable.indicesRouting().size(), equalTo(projectIndexCount[i]));
}
final int expectedIndexCount = Arrays.stream(projectIndexCount).mapToInt(Integer::intValue).sum();
assertThat(builtRoutingTable.totalIndexCount(), equalTo(expectedIndexCount));
}
public void testBuilderFromExisting() {
final GlobalRoutingTable initial = createTestInstance();
{
final var instance = GlobalRoutingTable.builder(initial).build();
assertTrue("Expected " + initial + " to equal " + instance, GlobalRoutingTableWithEquals.equals(initial, instance));
}
{
final var instance = GlobalRoutingTable.builder(initial).clear().build();
assertThat(instance.routingTables(), anEmptyMap());
}
{
final ProjectId projectId = randomProjectIdOrDefault();
final RoutingTable projectRouting = randomRoutingTable();
final var instance = GlobalRoutingTable.builder(initial).put(projectId, projectRouting).build();
assertThat(instance.routingTables(), aMapWithSize(initial.size() + 1));
assertThat(instance.routingTables(), hasEntry(projectId, projectRouting));
initial.routingTables().forEach((id, rt) -> assertThat(instance.routingTables(), hasEntry(id, rt)));
}
}
public void testRoutingNodesRoundtrip() {
final ClusterState clusterState = buildClusterState(
Map.ofEntries(
Map.entry(ProjectId.fromId(randomAlphaOfLength(11) + "1"), Set.of("test-a", "test-b", "test-c")),
Map.entry(ProjectId.fromId(randomAlphaOfLength(11) + "2"), Set.of("test-a", "test-z"))
)
);
final GlobalRoutingTable originalTable = clusterState.globalRoutingTable();
final RoutingNodes routingNodes = clusterState.getRoutingNodes();
final GlobalRoutingTable fromNodes = originalTable.rebuild(routingNodes, clusterState.metadata());
final Diff<GlobalRoutingTable> routingTableDiff = fromNodes.diff(originalTable);
assertSame(originalTable, routingTableDiff.apply(originalTable));
}
public void testRebuildAfterShardInitialized() {
final ProjectId project1 = ProjectId.fromId(randomAlphaOfLength(11) + "1");
final ProjectId project2 = ProjectId.fromId(randomAlphaOfLength(11) + "2");
final ClusterState clusterState = buildClusterState(
Map.ofEntries(Map.entry(project1, Set.of("test-a", "test-b", "test-c")), Map.entry(project2, Set.of("test-b", "test-z")))
);
final GlobalRoutingTable originalTable = clusterState.globalRoutingTable();
final RoutingNodes routingNodes = clusterState.getRoutingNodes();
final RoutingNodes mutate = routingNodes.mutableCopy();
final DiscoveryNode targetNode = randomFrom(clusterState.nodes().getNodes().values());
final RoutingChangesObserver emptyObserver = new RoutingChangesObserver() {
};
final int unassigned = mutate.unassigned().size();
var unassignedItr = mutate.unassigned().iterator();
while (unassignedItr.hasNext()) {
var shard = unassignedItr.next();
assertThat(shard, notNullValue());
if (shard.index().getName().equals("test-a")) {
// "test-a" only exists in project 1, so we know which project routing table should change
// (and which one should stay the same)
unassignedItr.initialize(targetNode.getId(), null, 0L, emptyObserver);
break;
}
}
assertThat(mutate.unassigned().size(), equalTo(unassigned - 1));
final GlobalRoutingTable fromNodes = originalTable.rebuild(mutate, clusterState.metadata());
final Diff<GlobalRoutingTable> routingTableDiff = fromNodes.diff(originalTable);
final GlobalRoutingTable updatedRouting = routingTableDiff.apply(originalTable);
assertThat(updatedRouting, not(sameInstance(originalTable)));
assertThat(updatedRouting.routingTable(project1), not(sameInstance(originalTable.routingTable(project1))));
assertThat(updatedRouting.routingTable(project2), sameInstance(originalTable.routingTable(project2)));
}
private ClusterState buildClusterState(Map<ProjectId, Set<String>> projectIndices) {
final Metadata.Builder mb = Metadata.builder();
projectIndices.forEach((projectId, indexNames) -> {
final ProjectMetadata.Builder project = ProjectMetadata.builder(projectId);
for (var indexName : indexNames) {
final IndexMetadata.Builder index = createIndexMetadata(indexName);
project.put(index);
}
mb.put(project);
});
final Metadata metadata = mb.build();
final ImmutableOpenMap.Builder<ProjectId, RoutingTable> routingTables = ImmutableOpenMap.builder(projectIndices.size());
projectIndices.forEach((projectId, indexNames) -> {
final RoutingTable.Builder rt = new RoutingTable.Builder();
for (var indexName : indexNames) {
final IndexMetadata indexMetadata = metadata.getProject(projectId).index(indexName);
final IndexRoutingTable indexRouting = new IndexRoutingTable.Builder(
TestShardRoutingRoleStrategies.DEFAULT_ROLE_ONLY,
indexMetadata.getIndex()
).initializeAsNew(indexMetadata).build();
rt.add(indexRouting);
}
routingTables.put(projectId, rt.build());
});
GlobalRoutingTable globalRoutingTable = new GlobalRoutingTable(routingTables.build());
DiscoveryNodes.Builder nodes = new DiscoveryNodes.Builder().add(buildRandomDiscoveryNode()).add(buildRandomDiscoveryNode());
return ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).routingTable(globalRoutingTable).nodes(nodes).build();
}
private DiscoveryNode buildRandomDiscoveryNode() {
return DiscoveryNodeUtils.builder(UUIDs.randomBase64UUID(random()))
.name(randomAlphaOfLength(10))
.ephemeralId(UUIDs.randomBase64UUID(random()))
.build();
}
private IndexMetadata.Builder createIndexMetadata(String indexName) {
Settings indexSettings = Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current())
.put(IndexMetadata.SETTING_INDEX_UUID, randomUUID())
.build();
return new IndexMetadata.Builder(indexName).settings(indexSettings)
.numberOfReplicas(randomIntBetween(0, 2))
.numberOfShards(randomIntBetween(1, 5));
}
@Override
protected Writeable.Reader<GlobalRoutingTable> instanceReader() {
return in -> {
var table = GlobalRoutingTable.readFrom(in);
return new GlobalRoutingTableWithEquals(table);
};
}
@Override
protected GlobalRoutingTable createTestInstance() {
return testRoutingTable(randomIntBetween(0, 10));
}
private GlobalRoutingTableWithEquals testRoutingTable(int projectCount) {
Map<ProjectId, RoutingTable> map = randomMap(
projectCount,
projectCount,
() -> new Tuple<>(randomUniqueProjectId(), randomRoutingTable())
);
return new GlobalRoutingTableWithEquals(randomLong(), ImmutableOpenMap.builder(map).build());
}
@Override
protected GlobalRoutingTable mutateInstance(GlobalRoutingTable instance) {
return new GlobalRoutingTable(mutate(instance.routingTables()));
}
private ImmutableOpenMap<ProjectId, RoutingTable> mutate(ImmutableOpenMap<ProjectId, RoutingTable> routingTables) {
if (routingTables.isEmpty()) {
return ImmutableOpenMap.builder(Map.of(randomProjectIdOrDefault(), randomRoutingTable())).build();
}
final Set<ProjectId> existingProjects = routingTables.keySet();
final ImmutableOpenMap.Builder<ProjectId, RoutingTable> builder = ImmutableOpenMap.builder(routingTables);
switch (randomIntBetween(1, 3)) {
case 1 -> {
final var project = randomFrom(existingProjects);
final var modified = mutate(routingTables.get(project));
builder.put(project, modified);
}
case 2 -> {
final var project = randomValueOtherThanMany(existingProjects::contains, GlobalRoutingTableTests::randomProjectIdOrDefault);
final var routingTable = randomRoutingTable();
builder.put(project, routingTable);
}
case 3 -> {
final var project = randomFrom(existingProjects);
builder.remove(project);
}
}
return builder.build();
}
private RoutingTable mutate(RoutingTable routingTable) {
if (routingTable.indicesRouting().isEmpty() || randomBoolean()) {
return addIndices(randomIntBetween(1, 3), new RoutingTable.Builder(routingTable));
} else {
final RoutingTable.Builder builder = new RoutingTable.Builder((routingTable));
builder.remove(randomFrom(routingTable.indicesRouting().keySet()));
return builder.build();
}
}
private RoutingTable randomRoutingTable() {
return addIndices(randomIntBetween(0, 10), new RoutingTable.Builder());
}
private static RoutingTable addIndices(int indexCount, RoutingTable.Builder builder) {
for (int i = 0; i < indexCount; i++) {
Index index = new Index(randomAlphaOfLengthBetween(3, 24), randomUUID());
builder.add(IndexRoutingTable.builder(index).build());
}
return builder.build();
}
}
|
GlobalRoutingTableWithEquals
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/BDDAssertions_then_Test.java
|
{
"start": 2517,
"end": 3374
}
|
class ____ {
private AssertFactory<String, StringAssert> stringAssertFactory = StringAssert::new;
private AssertFactory<Integer, IntegerAssert> integerAssertFactory = IntegerAssert::new;
@Test
void then_char() {
then('z').isGreaterThan('a');
}
@Test
void then_Character() {
then(Character.valueOf('A')).isEqualTo(Character.valueOf('A'));
}
@Test
void then_char_array() {
then(new char[] { 'a', 'b', 'c' }).contains('b');
}
@Test
void then_CharSequence() {
then("abc".subSequence(0, 1)).contains("a");
}
@Test
void then_Class() {
then("Foo".getClass()).isEqualTo(String.class);
}
@Test
void then_ClassLoader_succeeds() {
then("Foo".getClass().getClassLoader()).isEqualTo(String.class.getClassLoader());
}
@Test
void should_delegate_to_assert_comparable() {
|
BDDAssertions_then_Test
|
java
|
spring-projects__spring-framework
|
spring-core/src/test/java/org/springframework/core/convert/support/GenericConversionServiceTests.java
|
{
"start": 33792,
"end": 33968
}
|
class ____ implements Converter<String, Color> {
@Override
public Color convert(String source) {
return Color.decode(source.trim());
}
}
private static
|
ColorConverter
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/processor/NormalizerTest.java
|
{
"start": 1064,
"end": 2832
}
|
class ____ extends ContextTestSupport {
@Test
public void testSendToFirstWhen() throws Exception {
String employeeBody1 = "<employee><name>Jon</name></employee>";
String employeeBody2 = "<employee><name>Hadrian</name></employee>";
String employeeBody3 = "<employee><name>Claus</name></employee>";
String customerBody = "<customer name=\"James\"/>";
MockEndpoint result = getMockEndpoint("mock:result");
result.expectedMessageCount(4);
result.expectedBodiesReceivedInAnyOrder("<person name=\"Jon\"/>", "<person name=\"Hadrian\"/>",
"<person name=\"Claus\"/>", "<person name=\"James\"/>");
template.sendBody("direct:start", employeeBody1);
template.sendBody("direct:start", employeeBody2);
template.sendBody("direct:start", employeeBody3);
template.sendBody("direct:start", customerBody);
assertMockEndpointsSatisfied();
}
@Override
protected Registry createCamelRegistry() throws Exception {
Registry answer = super.createCamelRegistry();
answer.bind("normalizer", new MyNormalizer());
return answer;
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
// START SNIPPET: example
// we need to normalize two types of incoming messages
from("direct:start").choice().when().xpath("/employee").to("bean:normalizer?method=employeeToPerson").when()
.xpath("/customer")
.to("bean:normalizer?method=customerToPerson").end().to("mock:result");
// END SNIPPET: example
}
};
}
}
|
NormalizerTest
|
java
|
google__error-prone
|
check_api/src/test/java/com/google/errorprone/util/testdata/TargetTypeTest.java
|
{
"start": 16372,
"end": 16447
}
|
class ____<T> implements A<T> {
@Override
public void foo() {}
}
|
B
|
java
|
apache__camel
|
components/camel-hl7/src/main/java/org/apache/camel/component/hl7/Hl7TerserAnnotationExpressionFactory.java
|
{
"start": 1226,
"end": 2513
}
|
class ____ extends DefaultAnnotationExpressionFactory {
@Override
public Expression createExpression(
CamelContext camelContext, Annotation annotation, LanguageAnnotation languageAnnotation,
Class<?> expressionReturnType) {
String hl7 = getExpressionFromAnnotation(annotation);
Class<?> resultType = getResultType(annotation);
if (resultType.equals(Object.class)) {
resultType = expressionReturnType;
}
String source = getSource(annotation);
Expression input = ExpressionBuilder.singleInputExpression(source);
Expression exp = Hl7TerserLanguage.terser(input, hl7);
return ExpressionBuilder.convertToExpression(exp, resultType);
}
protected Class<?> getResultType(Annotation annotation) {
return (Class<?>) getAnnotationObjectValue(annotation, "resultType");
}
protected String getSource(Annotation annotation) {
String answer = null;
try {
answer = (String) getAnnotationObjectValue(annotation, "source");
} catch (Exception e) {
// Do Nothing
}
if (answer != null && answer.isBlank()) {
return null;
}
return answer;
}
}
|
Hl7TerserAnnotationExpressionFactory
|
java
|
quarkusio__quarkus
|
core/deployment/src/main/java/io/quarkus/deployment/pkg/builditem/JarBuildItem.java
|
{
"start": 509,
"end": 2262
}
|
class ____ extends SimpleBuildItem {
private final Path path;
private final Path originalArtifact;
private final Path libraryDir;
private final PackageConfig.JarConfig.JarType type;
private final String classifier;
private final ApplicationManifestConfig manifestConfig;
public JarBuildItem(Path path, Path originalArtifact, Path libraryDir, PackageConfig.JarConfig.JarType type,
String classifier) {
this(path, originalArtifact, libraryDir, type, classifier, null);
}
public JarBuildItem(Path path, Path originalArtifact, Path libraryDir, PackageConfig.JarConfig.JarType type,
String classifier, ApplicationManifestConfig manifestConfig) {
this.path = path;
this.originalArtifact = originalArtifact;
this.libraryDir = libraryDir;
this.type = type;
this.classifier = classifier;
this.manifestConfig = manifestConfig;
}
public boolean isUberJar() {
return libraryDir == null;
}
public Path getPath() {
return path;
}
public Path getLibraryDir() {
return libraryDir;
}
public Path getOriginalArtifact() {
return originalArtifact;
}
public PackageConfig.JarConfig.JarType getType() {
return type;
}
public String getClassifier() {
return classifier;
}
public ApplicationManifestConfig getManifestConfig() {
return manifestConfig;
}
public JarResult toJarResult() {
return toJarResult(null);
}
public JarResult toJarResult(Collection<SbomResult> sboms) {
return new JarResult(path, originalArtifact, libraryDir, type == MUTABLE_JAR,
classifier, sboms);
}
}
|
JarBuildItem
|
java
|
elastic__elasticsearch
|
server/src/internalClusterTest/java/org/elasticsearch/health/GetHealthActionIT.java
|
{
"start": 5159,
"end": 5425
}
|
class ____ extends TestHealthIndicatorService {
public IlmHealthIndicatorService(ClusterService clusterService) {
super(clusterService, ILM_INDICATOR_NAME, ILM_HEALTH_STATUS_SETTING);
}
}
public static final
|
IlmHealthIndicatorService
|
java
|
apache__avro
|
lang/java/avro/src/main/java/org/apache/avro/reflect/CustomEncoding.java
|
{
"start": 1294,
"end": 1397
}
|
class ____ objects that can be serialized with this encoder /
* decoder.
*/
public abstract
|
of
|
java
|
elastic__elasticsearch
|
modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/FinnishAnalyzerProvider.java
|
{
"start": 883,
"end": 1453
}
|
class ____ extends AbstractIndexAnalyzerProvider<FinnishAnalyzer> {
private final FinnishAnalyzer analyzer;
FinnishAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
super(name);
analyzer = new FinnishAnalyzer(
Analysis.parseStopWords(env, settings, FinnishAnalyzer.getDefaultStopSet()),
Analysis.parseStemExclusion(settings, CharArraySet.EMPTY_SET)
);
}
@Override
public FinnishAnalyzer get() {
return this.analyzer;
}
}
|
FinnishAnalyzerProvider
|
java
|
resilience4j__resilience4j
|
resilience4j-all/src/main/java/io/github/resilience4j/decorators/Decorators.java
|
{
"start": 11380,
"end": 14679
}
|
class ____<T> {
private Callable<T> callable;
private DecorateCallable(Callable<T> callable) {
this.callable = callable;
}
public DecorateCallable<T> withTimer(Timer timer) {
callable = Timer.decorateCallable(timer, callable);
return this;
}
public DecorateCallable<T> withCircuitBreaker(CircuitBreaker circuitBreaker) {
callable = CircuitBreaker.decorateCallable(circuitBreaker, callable);
return this;
}
public DecorateCallable<T> withRetry(Retry retryContext) {
callable = Retry.decorateCallable(retryContext, callable);
return this;
}
public DecorateCallable<T> withRateLimiter(RateLimiter rateLimiter) {
return withRateLimiter(rateLimiter, 1);
}
public DecorateCallable<T> withRateLimiter(RateLimiter rateLimiter, int permits) {
callable = RateLimiter.decorateCallable(rateLimiter, permits, callable);
return this;
}
public DecorateCallable<T> withBulkhead(Bulkhead bulkhead) {
callable = Bulkhead.decorateCallable(bulkhead, callable);
return this;
}
public DecorateCallable<T> withFallback(BiFunction<T, Throwable, T> handler) {
callable = CallableUtils.andThen(callable, handler);
return this;
}
public DecorateCallable<T> withFallback(Predicate<T> resultPredicate, UnaryOperator<T> resultHandler) {
callable = CallableUtils.recover(callable, resultPredicate, resultHandler);
return this;
}
public DecorateCallable<T> withFallback(List<Class<? extends Throwable>> exceptionTypes, Function<Throwable, T> exceptionHandler) {
callable = CallableUtils.recover(callable, exceptionTypes, exceptionHandler);
return this;
}
public DecorateCallable<T> withFallback(Function<Throwable, T> exceptionHandler) {
callable = CallableUtils.recover(callable, exceptionHandler);
return this;
}
public <X extends Throwable> DecorateCallable<T> withFallback(Class<X> exceptionType, Function<Throwable, T> exceptionHandler) {
callable = CallableUtils.recover(callable, exceptionType, exceptionHandler);
return this;
}
public DecorateCompletionStage<T> withThreadPoolBulkhead(ThreadPoolBulkhead threadPoolBulkhead) {
return Decorators.ofCompletionStage(getCompletionStageSupplier(threadPoolBulkhead));
}
private Supplier<CompletionStage<T>> getCompletionStageSupplier(
ThreadPoolBulkhead threadPoolBulkhead) {
return () -> {
try {
return threadPoolBulkhead.executeCallable(callable);
} catch (BulkheadFullException ex) {
CompletableFuture<T> future = new CompletableFuture<>();
future.completeExceptionally(ex);
return future;
}
};
}
public Callable<T> decorate() {
return callable;
}
public T call() throws Exception {
return callable.call();
}
}
|
DecorateCallable
|
java
|
apache__camel
|
components/camel-opentelemetry-metrics/src/main/java/org/apache/camel/opentelemetry/metrics/AbstractOpenTelemetryProducer.java
|
{
"start": 1795,
"end": 5280
}
|
class ____<T> extends DefaultProducer {
private static final String HEADER_PATTERN = HEADER_PREFIX + "*";
private Attributes attributes;
protected AbstractOpenTelemetryProducer(OpenTelemetryEndpoint endpoint) {
super(endpoint);
}
@Override
protected void doBuild() throws Exception {
super.doBuild();
this.attributes = getEndpoint().createAttributes();
}
@Override
public OpenTelemetryEndpoint getEndpoint() {
return (OpenTelemetryEndpoint) super.getEndpoint();
}
@Override
public void process(Exchange exchange) {
Message in = exchange.getIn();
String defaultMetricsName = simple(exchange, getEndpoint().getMetricName(), String.class);
String finalMetricsName = getStringHeader(in, HEADER_METRIC_NAME, defaultMetricsName);
String defaultMetricsDescription = simple(exchange, getEndpoint().getMetricsDescription(), String.class);
String finalMetricsDescription = getStringHeader(in, HEADER_METRIC_DESCRIPTION, defaultMetricsDescription);
Map<AttributeKey<?>, Object> map = new HashMap<>(attributes.asMap());
map.putAll(getAttributesHeader(in, HEADER_METRIC_ATTRIBUTES, Attributes.empty()).asMap());
AttributesBuilder ab = Attributes.builder();
for (Map.Entry<AttributeKey<?>, Object> entry : map.entrySet()) {
ab.put(simple(exchange, entry.getKey().toString(), String.class),
simple(exchange, entry.getValue().toString(), String.class));
}
ab.put(CAMEL_CONTEXT_ATTRIBUTE, getEndpoint().getCamelContext().getName());
try {
doProcess(exchange, finalMetricsName, finalMetricsDescription, ab.build());
} catch (Exception e) {
exchange.setException(e);
} finally {
clearMetricsHeaders(in);
}
}
protected void doProcess(Exchange exchange, String metricsName, String description, Attributes attributes) {
doProcess(exchange, metricsName, getInstrument(metricsName, description), attributes);
}
protected abstract T getInstrument(String name, String description);
protected abstract void doProcess(
Exchange exchange, String metricsName, T meter, Attributes attributes);
protected <C> C simple(Exchange exchange, String expression, Class<C> clazz) {
if (expression != null) {
Language language = exchange.getContext().resolveLanguage("simple");
Expression simple = language.createExpression(expression);
if (simple != null) {
return simple.evaluate(exchange, clazz);
}
}
return getEndpoint().getCamelContext().getTypeConverter().convertTo(clazz, expression);
}
protected String getStringHeader(Message in, String header, String defaultValue) {
String headerValue = in.getHeader(header, String.class);
return ObjectHelper.isNotEmpty(headerValue) ? headerValue : defaultValue;
}
protected Long getLongHeader(Message in, String header, Long defaultValue) {
return in.getHeader(header, defaultValue, Long.class);
}
protected Attributes getAttributesHeader(Message in, String header, Attributes defaultAttributes) {
return in.getHeader(header, defaultAttributes, Attributes.class);
}
protected boolean clearMetricsHeaders(Message in) {
return in.removeHeaders(HEADER_PATTERN);
}
}
|
AbstractOpenTelemetryProducer
|
java
|
spring-projects__spring-framework
|
spring-web/src/test/java/org/springframework/web/multipart/support/DefaultMultipartHttpServletRequestTests.java
|
{
"start": 1127,
"end": 2885
}
|
class ____ {
private final MockHttpServletRequest servletRequest = new MockHttpServletRequest();
private final Map<String, String[]> multipartParams = new LinkedHashMap<>();
private final MultiValueMap<String, String> queryParams = new LinkedMultiValueMap<>();
@Test // SPR-16590
public void parameterValues() {
this.multipartParams.put("key", new String[] {"p"});
this.queryParams.add("key", "q");
String[] values = createMultipartRequest().getParameterValues("key");
assertThat(values).isEqualTo(new String[] {"p", "q"});
}
@Test // SPR-16590
public void parameterMap() {
this.multipartParams.put("key1", new String[] {"p1"});
this.multipartParams.put("key2", new String[] {"p2"});
this.queryParams.add("key1", "q1");
this.queryParams.add("key3", "q3");
Map<String, String[]> map = createMultipartRequest().getParameterMap();
assertThat(map).hasSize(3);
assertThat(map.get("key1")).isEqualTo(new String[] {"p1", "q1"});
assertThat(map.get("key2")).isEqualTo(new String[] {"p2"});
assertThat(map.get("key3")).isEqualTo(new String[] {"q3"});
}
private DefaultMultipartHttpServletRequest createMultipartRequest() {
insertQueryParams();
return new DefaultMultipartHttpServletRequest(this.servletRequest, new LinkedMultiValueMap<>(),
this.multipartParams, new HashMap<>());
}
private void insertQueryParams() {
StringBuilder query = new StringBuilder();
for (String key : this.queryParams.keySet()) {
for (String value : this.queryParams.get(key)) {
this.servletRequest.addParameter(key, value);
query.append(query.length() > 0 ? "&" : "").append(key).append('=').append(value);
}
}
this.servletRequest.setQueryString(query.toString());
}
}
|
DefaultMultipartHttpServletRequestTests
|
java
|
mybatis__mybatis-3
|
src/test/java/org/apache/ibatis/submitted/enum_interface_type_handler/HasValueEnumTypeHandler.java
|
{
"start": 1011,
"end": 1512
}
|
class ____<E extends Enum<E> & HasValue> extends BaseTypeHandler<E> {
private Class<E> type;
private final E[] enums;
public HasValueEnumTypeHandler(Class<E> type) {
if (type == null) {
throw new IllegalArgumentException("Type argument cannot be null");
}
this.type = type;
this.enums = type.getEnumConstants();
if (!type.isInterface() && this.enums == null) {
throw new IllegalArgumentException(type.getSimpleName() + " does not represent an
|
HasValueEnumTypeHandler
|
java
|
apache__kafka
|
tools/src/test/java/org/apache/kafka/tools/GetOffsetShellTest.java
|
{
"start": 8142,
"end": 21617
}
|
class ____ {
private final String name;
private final int partition;
private final Long offset;
public Row(String name, int partition, Long offset) {
this.name = name;
this.partition = partition;
this.offset = offset;
}
@Override
public String toString() {
return "Row[name:" + name + ",partition:" + partition + ",offset:" + offset;
}
@Override
public boolean equals(Object o) {
if (o == this) return true;
if (!(o instanceof Row r)) return false;
return name.equals(r.name) && partition == r.partition && Objects.equals(offset, r.offset);
}
@Override
public int hashCode() {
return Objects.hash(name, partition, offset);
}
}
@ClusterTest
public void testNoFilterOptions() {
setUp();
List<Row> output = executeAndParse();
assertEquals(expectedTestTopicOffsets(), output);
}
@ClusterTest
public void testInternalExcluded() {
setUp();
List<Row> output = executeAndParse("--exclude-internal-topics");
assertEquals(expectedTestTopicOffsets(), output);
}
@ClusterTest
public void testTopicNameArg() {
setUp();
IntStream.range(1, topicCount + 1).forEach(i -> {
List<Row> offsets = executeAndParse("--topic", getTopicName(i));
assertEquals(expectedOffsetsForTopic(i), offsets, () -> "Offset output did not match for " + getTopicName(i));
}
);
}
@ClusterTest
public void testTopicPatternArg() {
setUp();
List<Row> offsets = executeAndParse("--topic", "topic.*");
assertEquals(expectedTestTopicOffsets(), offsets);
}
@ClusterTest
public void testPartitionsArg() {
setUp();
List<Row> offsets = executeAndParse("--partitions", "0,1");
assertEquals(expectedTestTopicOffsets().stream().filter(r -> r.partition <= 1).toList(), offsets);
}
@ClusterTest
public void testTopicPatternArgWithPartitionsArg() {
setUp();
List<Row> offsets = executeAndParse("--topic", "topic.*", "--partitions", "0,1");
assertEquals(expectedTestTopicOffsets().stream().filter(r -> r.partition <= 1).toList(), offsets);
}
@ClusterTest
public void testTopicPartitionsArg() {
setUp();
createConsumerAndPoll();
List<Row> offsets = executeAndParse("--topic-partitions", "topic1:0,topic2:1,topic(3|4):2,__.*:3");
List<Row> expected = List.of(
new Row("__consumer_offsets", 3, 0L),
new Row("topic1", 0, 1L),
new Row("topic2", 1, 2L),
new Row("topic3", 2, 3L),
new Row("topic4", 2, 4L)
);
assertEquals(expected, offsets);
}
@ClusterTest
public void testGetLatestOffsets() {
setUp();
for (String time : new String[] {"-1", "latest"}) {
List<Row> offsets = executeAndParse("--topic-partitions", "topic.*:0", "--time", time);
List<Row> expected = List.of(
new Row("topic1", 0, 1L),
new Row("topic2", 0, 2L),
new Row("topic3", 0, 3L),
new Row("topic4", 0, 4L)
);
assertEquals(expected, offsets);
}
}
@ClusterTest
public void testGetEarliestOffsets() {
setUp();
for (String time : new String[] {"-2", "earliest"}) {
List<Row> offsets = executeAndParse("--topic-partitions", "topic.*:0", "--time", time);
List<Row> expected = List.of(
new Row("topic1", 0, 0L),
new Row("topic2", 0, 0L),
new Row("topic3", 0, 0L),
new Row("topic4", 0, 0L)
);
assertEquals(expected, offsets);
}
}
@ClusterTest
public void testGetOffsetsByMaxTimestamp() {
setUp();
for (String time : new String[] {"-3", "max-timestamp"}) {
List<Row> offsets = executeAndParse("--topic-partitions", "topic.*", "--time", time);
offsets.forEach(
row -> assertTrue(row.offset >= 0 && row.offset <= Integer.parseInt(row.name.replace("topic", "")))
);
}
}
@ClusterTemplate("withRemoteStorage")
public void testGetOffsetsByEarliestLocalSpec() throws InterruptedException {
setUp();
setUpRemoteLogTopics();
for (String time : new String[] {"-4", "earliest-local"}) {
// test topics disable remote log storage
// as remote log disabled, broker return the same result as earliest offset
TestUtils.waitForCondition(() ->
List.of(
new Row("topic1", 0, 0L),
new Row("topic2", 0, 0L),
new Row("topic3", 0, 0L),
new Row("topic4", 0, 0L))
.equals(executeAndParse("--topic-partitions", "topic\\d+.*:0", "--time", time)),
"testGetOffsetsByEarliestLocalSpec get topics with remote log disabled result not match");
// test topics enable remote log storage
TestUtils.waitForCondition(() ->
List.of(
new Row("topicRLS1", 0, 0L),
new Row("topicRLS2", 0, 1L),
new Row("topicRLS3", 0, 2L),
new Row("topicRLS4", 0, 3L))
.equals(executeAndParse("--topic-partitions", "topicRLS.*:0", "--time", time)),
"testGetOffsetsByEarliestLocalSpec get topics with remote log enabled result not match");
}
}
@ClusterTemplate("withRemoteStorage")
public void testGetOffsetsByLatestTieredSpec() throws InterruptedException {
setUp();
setUpRemoteLogTopics();
for (String time : new String[] {"-5", "latest-tiered"}) {
// test topics disable remote log storage
// as remote log not enabled, broker return unknown offset for each topic partition and these
// unknown offsets are ignored by GetOffsetShell hence we have empty result here.
assertEquals(List.of(),
executeAndParse("--topic-partitions", "topic\\d+:0", "--time", time));
// test topics enable remote log storage
// topicRLS1 has no result because there's no log segments being uploaded to the remote storage
TestUtils.waitForCondition(() ->
List.of(
new Row("topicRLS2", 0, 0L),
new Row("topicRLS3", 0, 1L),
new Row("topicRLS4", 0, 2L))
.equals(executeAndParse("--topic-partitions", "topicRLS.*:0", "--time", time)),
"testGetOffsetsByLatestTieredSpec result not match");
}
}
@ClusterTemplate("withRemoteStorage")
public void testGetOffsetsByEarliestTieredSpec() throws InterruptedException {
setUp();
setUpRemoteLogTopics();
for (String time : new String[] {"-6", "earliest-pending-upload"}) {
// test topics disable remote log storage
// as remote log disabled, broker returns unknown offset of each topic partition and these
// unknown offsets are ignore by GetOffsetShell, hence we have empty result here.
assertEquals(List.of(),
executeAndParse("--topic-partitions", "topic\\d+:0", "--time", time));
// test topics enable remote log storage
TestUtils.waitForCondition(() ->
List.of(
new Row("topicRLS1", 0, 0L),
new Row("topicRLS2", 0, 1L),
new Row("topicRLS3", 0, 2L),
new Row("topicRLS4", 0, 3L))
.equals(executeAndParse("--topic-partitions", "topicRLS.*:0", "--time", time)),
"testGetOffsetsByEarliestTieredSpec result not match");
}
}
@ClusterTest
public void testGetOffsetsByTimestamp() {
setUp();
String time = String.valueOf(System.currentTimeMillis() / 2);
List<Row> offsets = executeAndParse("--topic-partitions", "topic.*:0", "--time", time);
List<Row> expected = List.of(
new Row("topic1", 0, 0L),
new Row("topic2", 0, 0L),
new Row("topic3", 0, 0L),
new Row("topic4", 0, 0L)
);
assertEquals(expected, offsets);
}
@ClusterTest
public void testNoOffsetIfTimestampGreaterThanLatestRecord() {
setUp();
String time = String.valueOf(System.currentTimeMillis() * 2);
List<Row> offsets = executeAndParse("--topic-partitions", "topic.*", "--time", time);
assertEquals(new ArrayList<Row>(), offsets);
}
@ClusterTest
public void testTopicPartitionsArgWithInternalExcluded() {
setUp();
List<Row> offsets = executeAndParse("--topic-partitions", "topic1:0,topic2:1,topic(3|4):2,__.*:3", "--exclude-internal-topics");
List<Row> expected = List.of(
new Row("topic1", 0, 1L),
new Row("topic2", 1, 2L),
new Row("topic3", 2, 3L),
new Row("topic4", 2, 4L)
);
assertEquals(expected, offsets);
}
@ClusterTest
public void testTopicPartitionsArgWithInternalIncluded() {
setUp();
createConsumerAndPoll();
List<Row> offsets = executeAndParse("--topic-partitions", "__.*:0");
assertEquals(List.of(new Row("__consumer_offsets", 0, 0L)), offsets);
}
@ClusterTest
public void testTopicPartitionsNotFoundForNonExistentTopic() {
assertExitCodeIsOne("--topic", "some_nonexistent_topic");
}
@ClusterTest
public void testTopicPartitionsNotFoundForExcludedInternalTopic() {
assertExitCodeIsOne("--topic", "some_nonexistent_topic:*");
}
@ClusterTest
public void testTopicPartitionsNotFoundForNonMatchingTopicPartitionPattern() {
assertExitCodeIsOne("--topic-partitions", "__consumer_offsets", "--exclude-internal-topics");
}
@ClusterTest
public void testTopicPartitionsFlagWithTopicFlagCauseExit() {
assertExitCodeIsOne("--topic-partitions", "__consumer_offsets", "--topic", "topic1");
}
@ClusterTest
public void testTopicPartitionsFlagWithPartitionsFlagCauseExit() {
assertExitCodeIsOne("--topic-partitions", "__consumer_offsets", "--partitions", "0");
}
@ClusterTest
public void testPrintHelp() {
Exit.setExitProcedure((statusCode, message) -> { });
try {
String out = ToolsTestUtils.captureStandardErr(() -> GetOffsetShell.mainNoExit("--help"));
assertTrue(out.startsWith(GetOffsetShell.USAGE_TEXT));
} finally {
Exit.resetExitProcedure();
}
}
@ClusterTest
public void testPrintVersion() {
String out = ToolsTestUtils.captureStandardOut(() -> GetOffsetShell.mainNoExit("--version"));
assertEquals(AppInfoParser.getVersion(), out);
}
private void assertExitCodeIsOne(String... args) {
final int[] exitStatus = new int[1];
Exit.setExitProcedure((statusCode, message) -> {
exitStatus[0] = statusCode;
throw new RuntimeException();
});
try {
GetOffsetShell.main(addBootstrapServer(args));
} catch (RuntimeException ignored) {
} finally {
Exit.resetExitProcedure();
}
assertEquals(1, exitStatus[0]);
}
private List<Row> expectedOffsetsWithInternal() {
List<Row> consOffsets = IntStream.range(0, 4)
.mapToObj(i -> new Row("__consumer_offsets", i, 0L))
.toList();
return Stream.concat(consOffsets.stream(), expectedTestTopicOffsets().stream()).toList();
}
private List<Row> expectedTestTopicOffsets() {
List<Row> offsets = new ArrayList<>(topicCount + 1);
for (int i = 0; i < topicCount + 1; i++) {
offsets.addAll(expectedOffsetsForTopic(i));
}
return offsets;
}
private List<Row> expectedOffsetsForTopic(int i) {
String name = getTopicName(i);
return IntStream.range(0, i).mapToObj(p -> new Row(name, p, (long) i)).toList();
}
private List<Row> executeAndParse(String... args) {
String out = ToolsTestUtils.captureStandardOut(() -> GetOffsetShell.mainNoExit(addBootstrapServer(args)));
return Arrays.stream(out.split(System.lineSeparator()))
.map(i -> i.split(":"))
.filter(i -> i.length >= 2)
.map(line -> new Row(line[0], Integer.parseInt(line[1]), (line.length == 2 || line[2].isEmpty()) ? null : Long.parseLong(line[2])))
.toList();
}
private String[] addBootstrapServer(String... args) {
ArrayList<String> newArgs = new ArrayList<>(List.of(args));
newArgs.add("--bootstrap-server");
newArgs.add(cluster.bootstrapServers());
return newArgs.toArray(new String[0]);
}
}
|
Row
|
java
|
hibernate__hibernate-orm
|
hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/cache/QueryCache.java
|
{
"start": 765,
"end": 2272
}
|
class ____ {
private Integer id1;
@BeforeClassTemplate
public void initData(EntityManagerFactoryScope scope) {
// Revision 1
scope.inTransaction( em -> {
IntTestEntity ite = new IntTestEntity( 10 );
em.persist( ite );
id1 = ite.getId();
} );
// Revision 2
scope.inTransaction( em -> {
IntTestEntity ite = em.find( IntTestEntity.class, id1 );
ite.setNumber( 20 );
} );
}
@Test
public void testCacheFindAfterRevisionsOfEntityQuery(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
final var auditReader = AuditReaderFactory.get( em );
List entsFromQuery = auditReader.createQuery()
.forRevisionsOfEntity( IntTestEntity.class, true, false )
.getResultList();
IntTestEntity entFromFindRev1 = auditReader.find( IntTestEntity.class, id1, 1 );
IntTestEntity entFromFindRev2 = auditReader.find( IntTestEntity.class, id1, 2 );
assertSame( entsFromQuery.get( 0 ), entFromFindRev1 );
assertSame( entsFromQuery.get( 1 ), entFromFindRev2 );
} );
}
@Test
public void testCacheFindAfterEntitiesAtRevisionQuery(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
final var auditReader = AuditReaderFactory.get( em );
IntTestEntity entFromQuery = (IntTestEntity) auditReader.createQuery()
.forEntitiesAtRevision( IntTestEntity.class, 1 )
.getSingleResult();
IntTestEntity entFromFind = auditReader.find( IntTestEntity.class, id1, 1 );
assertSame( entFromQuery, entFromFind );
} );
}
}
|
QueryCache
|
java
|
playframework__playframework
|
web/play-java-forms/src/main/java/play/data/FormFactory.java
|
{
"start": 1476,
"end": 1819
}
|
class ____ map to a form.
* @return a new form that wraps the specified class.
*/
public <T> Form<T> form(String name, Class<T> clazz) {
return new Form<>(name, clazz, messagesApi, formatters, validatorFactory, config);
}
/**
* @param <T> the type of value in the form.
* @param name the form's name
* @param clazz the
|
to
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest-client/deployment/src/test/java/io/quarkus/rest/client/reactive/multipart/MultipartProgrammaticTest.java
|
{
"start": 2020,
"end": 2207
}
|
interface ____ {
@POST
@Consumes(MediaType.MULTIPART_FORM_DATA)
String postMultipart(ClientMultipartForm form);
}
@Path("/multipart")
public static
|
Client
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.