language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
spring-cloud__spring-cloud-gateway
|
spring-cloud-gateway-proxyexchange-webmvc/src/test/java/org/springframework/cloud/gateway/mvc/GetWithBodyRequestTests.java
|
{
"start": 5317,
"end": 5902
}
|
class ____ {
private URI home;
public void setHome(URI home) {
this.home = home;
}
@GetMapping("/proxy/{id}")
public ResponseEntity<?> proxyFoos(@PathVariable Integer id, ProxyExchange<?> proxy) throws Exception {
return proxy.uri(home.toString() + "/foos/" + id).get();
}
@GetMapping("/proxy/get-with-body-request")
public ResponseEntity<?> proxyFooWithBody(@RequestBody Foo foo, ProxyExchange<?> proxy) throws Exception {
return proxy.uri(home.toString() + "/foo/get-with-body-request").get();
}
}
@RestController
static
|
ProxyController
|
java
|
spring-projects__spring-framework
|
spring-web/src/main/java/org/springframework/web/accept/ApiVersionParser.java
|
{
"start": 848,
"end": 1075
}
|
interface ____<V extends Comparable<V>> {
/**
* Parse the version into an Object.
* @param version the value to parse
* @return an Object that represents the version
*/
V parseVersion(String version);
}
|
ApiVersionParser
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ValueExtractorForAggregateMetricDouble.java
|
{
"start": 568,
"end": 1957
}
|
class ____ implements ValueExtractor {
private final AggregateMetricDoubleBlock block;
ValueExtractorForAggregateMetricDouble(TopNEncoder encoder, AggregateMetricDoubleBlock block) {
assert encoder == TopNEncoder.DEFAULT_UNSORTABLE;
this.block = block;
}
@Override
public void writeValue(BreakingBytesRefBuilder values, int position) {
TopNEncoder.DEFAULT_UNSORTABLE.encodeVInt(1, values);
for (DoubleBlock doubleBlock : List.of(block.minBlock(), block.maxBlock(), block.sumBlock())) {
if (doubleBlock.isNull(position)) {
TopNEncoder.DEFAULT_UNSORTABLE.encodeBoolean(false, values);
} else {
TopNEncoder.DEFAULT_UNSORTABLE.encodeBoolean(true, values);
TopNEncoder.DEFAULT_UNSORTABLE.encodeDouble(doubleBlock.getDouble(position), values);
}
}
IntBlock intBlock = block.countBlock();
if (intBlock.isNull(position)) {
TopNEncoder.DEFAULT_UNSORTABLE.encodeBoolean(false, values);
} else {
TopNEncoder.DEFAULT_UNSORTABLE.encodeBoolean(true, values);
TopNEncoder.DEFAULT_UNSORTABLE.encodeInt(intBlock.getInt(position), values);
}
}
@Override
public String toString() {
return "ValueExtractorForAggregateMetricDouble";
}
}
|
ValueExtractorForAggregateMetricDouble
|
java
|
hibernate__hibernate-orm
|
hibernate-testing/src/main/java/org/hibernate/testing/junit4/BaseUnitTestCase.java
|
{
"start": 955,
"end": 2574
}
|
class ____ {
protected final Logger log = Logger.getLogger( getClass() );
private static boolean enableConnectionLeakDetection = Boolean.TRUE.toString()
.equals( System.getenv( "HIBERNATE_CONNECTION_LEAK_DETECTION" ) );
private ConnectionLeakUtil connectionLeakUtil;
protected final ExecutorService executorService = Executors.newSingleThreadExecutor();
@Rule
public TestRule globalTimeout = Timeout.millis( TimeUnit.MINUTES.toMillis( 30 ) ); // no test should run longer than 30 minutes
public BaseUnitTestCase() {
if ( enableConnectionLeakDetection ) {
connectionLeakUtil = new ConnectionLeakUtil();
}
}
@AfterClassOnce
public void assertNoLeaks() {
if ( enableConnectionLeakDetection ) {
connectionLeakUtil.assertNoLeaks();
}
}
@After
public void releaseTransactions() {
if ( JtaStatusHelper.isActive( TestingJtaPlatformImpl.INSTANCE.getTransactionManager() ) ) {
log.warn( "Cleaning up unfinished transaction" );
try {
TestingJtaPlatformImpl.INSTANCE.getTransactionManager().rollback();
}
catch (SystemException ignored) {
}
}
}
protected void sleep(long millis) {
try {
Thread.sleep( millis );
}
catch ( InterruptedException e ) {
Thread.interrupted();
}
}
protected Future<?> executeAsync(Runnable callable) {
return executorService.submit(callable);
}
protected void executeSync(Runnable callable) {
try {
executeAsync( callable ).get();
}
catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
catch (ExecutionException e) {
throw new RuntimeException( e.getCause() );
}
}
}
|
BaseUnitTestCase
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/launcher/TestContainerLauncherImpl.java
|
{
"start": 22121,
"end": 22339
}
|
class ____ extends YarnException {
public ContainerException(String message) {
super(message);
}
@Override
public YarnException getCause() {
return null;
}
}
}
|
ContainerException
|
java
|
quarkusio__quarkus
|
devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/tooling/GradleApplicationModelBuilder.java
|
{
"start": 3168,
"end": 34212
}
|
class ____ implements ParameterizedToolingModelBuilder<ModelParameter> {
private static final String MAIN_RESOURCES_OUTPUT = "build/resources/main";
private static final String CLASSES_OUTPUT = "build/classes";
/* @formatter:off */
private static final byte COLLECT_TOP_EXTENSION_RUNTIME_NODES = 0b001;
private static final byte COLLECT_DIRECT_DEPS = 0b010;
private static final byte COLLECT_RELOADABLE_MODULES = 0b100;
/* @formatter:on */
private final TaskDependencyFactory taskDepFactory;
public GradleApplicationModelBuilder() {
taskDepFactory = null;
}
public GradleApplicationModelBuilder(TaskDependencyFactory taskDepFactory) {
this.taskDepFactory = taskDepFactory;
}
@Override
public boolean canBuild(String modelName) {
return modelName.equals(ApplicationModel.class.getName());
}
@Override
public Class<ModelParameter> getParameterType() {
return ModelParameter.class;
}
@Override
public Object buildAll(String modelName, Project project) {
final ModelParameterImpl modelParameter = new ModelParameterImpl();
modelParameter.setMode(LaunchMode.DEVELOPMENT.toString());
return buildAll(modelName, modelParameter, project);
}
@Override
public Object buildAll(String modelName, ModelParameter parameter, Project project) {
final LaunchMode mode = LaunchMode.valueOf(parameter.getMode());
final ApplicationDeploymentClasspathBuilder classpathBuilder = new ApplicationDeploymentClasspathBuilder(project, mode,
taskDepFactory);
final Configuration classpathConfig = classpathBuilder.getRuntimeConfiguration();
final Configuration deploymentConfig = classpathBuilder.getDeploymentConfiguration();
final PlatformImports platformImports = classpathBuilder.getPlatformImports();
boolean workspaceDiscovery = LaunchMode.DEVELOPMENT.equals(mode) || LaunchMode.TEST.equals(mode)
|| Boolean.parseBoolean(System.getProperty(BootstrapConstants.QUARKUS_BOOTSTRAP_WORKSPACE_DISCOVERY));
if (!workspaceDiscovery) {
Object o = project.getProperties().get(BootstrapConstants.QUARKUS_BOOTSTRAP_WORKSPACE_DISCOVERY);
if (o != null) {
workspaceDiscovery = Boolean.parseBoolean(o.toString());
}
}
final ResolvedDependencyBuilder appArtifact = getProjectArtifact(project, workspaceDiscovery);
final ApplicationModelBuilder modelBuilder = new ApplicationModelBuilder()
.setAppArtifact(appArtifact)
.addReloadableWorkspaceModule(appArtifact.getKey())
.setPlatformImports(platformImports);
collectDependencies(classpathConfig.getResolvedConfiguration(), classpathConfig.getIncoming(), workspaceDiscovery,
project, modelBuilder, appArtifact.getWorkspaceModule().mutable());
collectExtensionDependencies(project, deploymentConfig, modelBuilder);
for (var dep : modelBuilder.getDependencies()) {
if (dep.isRuntimeCp()) {
dep.setDeploymentCp();
}
}
addCompileOnly(project, classpathBuilder, modelBuilder);
return modelBuilder.build();
}
private static void addCompileOnly(Project project, ApplicationDeploymentClasspathBuilder classpathBuilder,
ApplicationModelBuilder modelBuilder) {
var compileOnlyConfig = classpathBuilder.getCompileOnly();
final List<org.gradle.api.artifacts.ResolvedDependency> queue = new ArrayList<>(
compileOnlyConfig.getResolvedConfiguration().getFirstLevelModuleDependencies());
for (int i = 0; i < queue.size(); ++i) {
var d = queue.get(i);
boolean skip = true;
for (var a : d.getModuleArtifacts()) {
if (!isDependency(a)) {
continue;
}
var moduleId = a.getModuleVersion().getId();
var appDep = modelBuilder
.getDependency(ArtifactKey.of(moduleId.getGroup(), moduleId.getName(), a.getClassifier(), a.getType()));
if (appDep == null) {
appDep = addArtifactDependency(project, modelBuilder, a);
appDep.clearFlag(DependencyFlags.DEPLOYMENT_CP);
}
if (!appDep.isFlagSet(DependencyFlags.COMPILE_ONLY)) {
skip = false;
appDep.setFlags(DependencyFlags.COMPILE_ONLY);
}
}
if (!skip) {
queue.addAll(d.getChildren());
}
}
}
public static ResolvedDependencyBuilder getProjectArtifact(Project project, boolean workspaceDiscovery) {
final ResolvedDependencyBuilder appArtifact = ResolvedDependencyBuilder.newInstance()
.setGroupId(project.getGroup().toString())
.setArtifactId(project.getName())
.setVersion(project.getVersion().toString());
final SourceSetContainer sourceSets = project.getExtensions().getByType(SourceSetContainer.class);
final WorkspaceModule.Mutable mainModule = WorkspaceModule.builder()
.setModuleId(
WorkspaceModuleId.of(appArtifact.getGroupId(), appArtifact.getArtifactId(), appArtifact.getVersion()))
.setModuleDir(project.getProjectDir().toPath())
.setBuildDir(project.getLayout().getBuildDirectory().getAsFile().get().toPath())
.setBuildFile(project.getBuildFile().toPath());
initProjectModule(project, mainModule, sourceSets.getByName(SourceSet.MAIN_SOURCE_SET_NAME), ArtifactSources.MAIN);
if (workspaceDiscovery) {
appArtifact.setReloadable();
final TaskCollection<Test> testTasks = project.getTasks().withType(Test.class);
if (!testTasks.isEmpty()) {
final Map<File, SourceSet> sourceSetsByClassesDir = new HashMap<>();
sourceSets.forEach(s -> {
s.getOutput().getClassesDirs().forEach(d -> {
if (d.exists()) {
sourceSetsByClassesDir.put(d, s);
}
});
});
testTasks.forEach(t -> {
if (t.getEnabled()) {
t.getTestClassesDirs().forEach(d -> {
if (d.exists()) {
final SourceSet sourceSet = sourceSetsByClassesDir.remove(d);
if (sourceSet != null) {
initProjectModule(project, mainModule, sourceSet,
sourceSet.getName().equals(SourceSet.TEST_SOURCE_SET_NAME)
? ArtifactSources.TEST
: sourceSet.getName());
}
}
});
}
});
}
}
final PathList.Builder paths = PathList.builder();
collectDestinationDirs(mainModule.getMainSources().getSourceDirs(), paths);
collectDestinationDirs(mainModule.getMainSources().getResourceDirs(), paths);
return appArtifact.setWorkspaceModule(mainModule).setResolvedPaths(paths.build());
}
private static void collectDestinationDirs(Collection<SourceDir> sources, final PathList.Builder paths) {
for (SourceDir src : sources) {
final Path path = src.getOutputDir();
if (paths.contains(path) || !Files.exists(path)) {
continue;
}
paths.add(path);
}
}
private void collectExtensionDependencies(Project project, Configuration deploymentConfiguration,
ApplicationModelBuilder modelBuilder) {
final ResolvedConfiguration rc = deploymentConfiguration.getResolvedConfiguration();
final Set<ArtifactKey> processedDeps = new HashSet<>();
for (var dep : rc.getFirstLevelModuleDependencies()) {
processDeploymentDependency(project, dep, modelBuilder, false, processedDeps);
}
}
private static void processDeploymentDependency(Project project, ResolvedDependency resolvedDep,
ApplicationModelBuilder modelBuilder, boolean clearReloadableFlag, Set<ArtifactKey> processedDeps) {
final Set<ResolvedArtifact> resolvedArtifacts = resolvedDep.getModuleArtifacts();
boolean processChildren = resolvedArtifacts.isEmpty();
for (var a : resolvedArtifacts) {
final ArtifactKey artifactKey = getKey(a);
if (!processedDeps.add(artifactKey)) {
continue;
}
processChildren = true;
ResolvedDependencyBuilder dep = modelBuilder.getDependency(artifactKey);
if (dep == null) {
if (isApplicationRoot(modelBuilder, artifactKey)) {
// An application root artifact may be found among the dependencies in a could of cases:
// test fixtures in an application project and as a deployment module in an extension project
// running deployment module tests.
// In case of test fixtures, the root artifact does not have to be added to the model as a dependency,
// it can simply be skipped.
// In case of a deployment test, it has to be added as a dependency, since otherwise, the deployment
// module will appear to be missing.
// This part here looks like a hack but appears to work for both cases so far.
dep = modelBuilder.getApplicationArtifact();
} else if (a.getId()
.getComponentIdentifier() instanceof ProjectComponentIdentifier projectComponentIdentifier) {
var includedBuild = ToolingUtils.includedBuild(project,
projectComponentIdentifier.getBuild().getBuildPath());
final Project projectDep;
if (includedBuild != null) {
projectDep = ToolingUtils.includedBuildProject((IncludedBuildInternal) includedBuild,
projectComponentIdentifier.getProjectPath());
} else {
projectDep = project.getRootProject().findProject(projectComponentIdentifier.getProjectPath());
}
Objects.requireNonNull(projectDep,
() -> "project " + projectComponentIdentifier.getProjectPath() + " should exist");
SourceSetContainer sourceSets = projectDep.getExtensions().getByType(SourceSetContainer.class);
SourceSet mainSourceSet = sourceSets.findByName(SourceSet.MAIN_SOURCE_SET_NAME);
if (mainSourceSet == null) {
// try Kotlin multiplatform
mainSourceSet = sourceSets.findByName("jvmMain");
if (mainSourceSet == null) {
var msg = new StringBuilder()
.append("Failed to determine the main source set of ").append(projectDep.getPath());
var i = sourceSets.iterator();
if (!i.hasNext()) {
msg.append(" because the project does not have any source set");
} else {
msg.append(" among the following source sets: ").append(i.next().getName());
while (i.hasNext()) {
msg.append(", ").append(i.next().getName());
}
}
throw new RuntimeException(msg.toString());
}
}
dep = toDependency(a, mainSourceSet);
modelBuilder.addDependency(dep);
} else if (isDependency(a)) {
dep = toDependency(a);
modelBuilder.addDependency(dep);
}
if (dep != null) {
modelBuilder.addDependency(dep);
clearReloadableFlag = true;
}
}
if (dep != null) {
if (dep.isRuntimeExtensionArtifact()) {
clearReloadableFlag = true;
}
dep.setDeploymentCp();
if (clearReloadableFlag && dep != modelBuilder.getApplicationArtifact()) {
dep.clearFlag(DependencyFlags.RELOADABLE);
}
}
}
if (processChildren) {
for (var child : resolvedDep.getChildren()) {
processDeploymentDependency(project, child, modelBuilder, clearReloadableFlag, processedDeps);
}
}
}
private static boolean isApplicationRoot(ApplicationModelBuilder modelBuilder, ArtifactKey artifactKey) {
return modelBuilder.getApplicationArtifact().getKey().equals(artifactKey);
}
private static ResolvedDependencyBuilder addArtifactDependency(Project project, ApplicationModelBuilder modelBuilder,
ResolvedArtifact a) {
ResolvedDependencyBuilder dep = modelBuilder.getDependency(getKey(a));
if (dep == null) {
if (a.getId().getComponentIdentifier() instanceof ProjectComponentIdentifier projectComponentIdentifier) {
var includedBuild = ToolingUtils.includedBuild(project, projectComponentIdentifier.getBuild().getBuildPath());
final Project projectDep;
if (includedBuild != null) {
projectDep = ToolingUtils.includedBuildProject((IncludedBuildInternal) includedBuild,
projectComponentIdentifier.getProjectPath());
} else {
projectDep = project.getRootProject().findProject(projectComponentIdentifier.getProjectPath());
}
Objects.requireNonNull(projectDep,
() -> "project " + projectComponentIdentifier.getProjectPath() + " should exist");
SourceSetContainer sourceSets = projectDep.getExtensions().getByType(SourceSetContainer.class);
dep = toDependency(a, sourceSets.getByName(SourceSet.MAIN_SOURCE_SET_NAME));
modelBuilder.addDependency(dep);
} else if (isDependency(a)) {
dep = toDependency(a);
modelBuilder.addDependency(dep);
}
}
if (dep != null) {
dep.setDeploymentCp();
dep.clearFlag(DependencyFlags.RELOADABLE);
}
return dep;
}
private void collectDependencies(ResolvedConfiguration configuration, ResolvableDependencies dependencies,
boolean workspaceDiscovery, Project project, ApplicationModelBuilder modelBuilder,
WorkspaceModule.Mutable wsModule) {
final Set<File> artifactFiles = getArtifactFilesOrNull(configuration, dependencies);
for (ResolvedDependency d : configuration.getFirstLevelModuleDependencies()) {
collectDependencies(d, workspaceDiscovery, project, artifactFiles, modelBuilder, wsModule,
(byte) (COLLECT_TOP_EXTENSION_RUNTIME_NODES | COLLECT_DIRECT_DEPS | COLLECT_RELOADABLE_MODULES));
}
if (artifactFiles != null) {
// detect FS paths that aren't provided by the resolved artifacts
for (File f : dependencies.getFiles().getFiles()) {
if (artifactFiles.contains(f) || !f.exists()) {
continue;
}
// here we are trying to represent a direct FS path dependency
// as an artifact dependency
// SHA1 hash is used to avoid long file names in the lib dir
final String parentPath = f.getParent();
final String group = HashUtil.sha1(parentPath == null ? f.getName() : parentPath);
String name = f.getName();
String type = ArtifactCoords.TYPE_JAR;
if (!f.isDirectory()) {
final int dot = f.getName().lastIndexOf('.');
if (dot > 0) {
name = f.getName().substring(0, dot);
type = f.getName().substring(dot + 1);
}
}
// hash could be a better way to represent the version
final String version = String.valueOf(f.lastModified());
final ResolvedDependencyBuilder artifactBuilder = ResolvedDependencyBuilder.newInstance()
.setGroupId(group)
.setArtifactId(name)
.setType(type)
.setVersion(version)
.setResolvedPath(f.toPath())
.setDirect(true)
.setRuntimeCp()
.setDeploymentCp();
processQuarkusDependency(artifactBuilder, modelBuilder);
modelBuilder.addDependency(artifactBuilder);
}
}
}
private static Set<File> getArtifactFilesOrNull(ResolvedConfiguration configuration, ResolvableDependencies dependencies) {
final Set<ResolvedArtifact> resolvedArtifacts = configuration.getResolvedArtifacts();
// if the number of artifacts is less than the number of files then probably
// the project includes direct file dependencies
return resolvedArtifacts.size() < dependencies.getFiles().getFiles().size()
? new HashSet<>(resolvedArtifacts.size())
: null;
}
private void collectDependencies(org.gradle.api.artifacts.ResolvedDependency resolvedDep, boolean workspaceDiscovery,
Project project, Set<File> artifactFiles, ApplicationModelBuilder modelBuilder,
WorkspaceModule.Mutable parentModule,
byte flags) {
WorkspaceModule.Mutable projectModule = null;
final Set<ResolvedArtifact> resolvedArtifacts = resolvedDep.getModuleArtifacts();
boolean processChildren = resolvedArtifacts.isEmpty();
for (ResolvedArtifact a : resolvedArtifacts) {
if (!isDependency(a)) {
continue;
}
final ArtifactKey artifactKey = getKey(a);
if (isApplicationRoot(modelBuilder, artifactKey)) {
continue;
}
var depBuilder = modelBuilder.getDependency(artifactKey);
if (depBuilder == null) {
processChildren = true;
final ArtifactCoords depCoords = getArtifactCoords(a);
depBuilder = ResolvedDependencyBuilder.newInstance()
.setCoords(depCoords)
.setRuntimeCp();
if (parentModule != null) {
parentModule.addDependency(new ArtifactDependency(depCoords));
}
PathCollection paths = null;
if (workspaceDiscovery && a.getId().getComponentIdentifier() instanceof ProjectComponentIdentifier compId) {
Project projectDep = project.getRootProject().findProject(compId.getProjectPath());
final String classifier = a.getClassifier();
if (classifier == null || classifier.isEmpty()) {
final IncludedBuild includedBuild = ToolingUtils.includedBuild(project.getRootProject(),
compId.getBuild().getBuildPath());
if (includedBuild != null) {
if (includedBuild instanceof IncludedBuildInternal ib) {
projectDep = ToolingUtils.includedBuildProject(ib, compId.getProjectPath());
}
if (projectDep != null) {
initProjectModuleAndBuildPaths(projectDep, a, modelBuilder, depBuilder);
} else {
final PathList.Builder pathBuilder = PathList.builder();
addSubstitutedProject(pathBuilder, includedBuild.getProjectDir());
paths = pathBuilder.build();
}
} else {
initProjectModuleAndBuildPaths(projectDep, a, modelBuilder, depBuilder);
}
} else {
initProjectModuleAndBuildPaths(projectDep, a, modelBuilder, depBuilder);
}
}
depBuilder.setResolvedPaths(paths == null ? PathList.of(a.getFile().toPath()) : paths);
if (processQuarkusDependency(depBuilder, modelBuilder)) {
flags = clearFlag(flags, COLLECT_RELOADABLE_MODULES);
}
modelBuilder.addDependency(depBuilder);
if (artifactFiles != null) {
artifactFiles.add(a.getFile());
}
}
if (projectModule == null && depBuilder.getWorkspaceModule() != null) {
projectModule = depBuilder.getWorkspaceModule().mutable();
}
if (isFlagOn(flags, COLLECT_DIRECT_DEPS)) {
depBuilder.setDirect(true);
flags = clearFlag(flags, COLLECT_DIRECT_DEPS);
}
if (depBuilder.isRuntimeExtensionArtifact()) {
if (isFlagOn(flags, COLLECT_RELOADABLE_MODULES)) {
flags = clearFlag(flags, COLLECT_RELOADABLE_MODULES);
processChildren = true;
}
if (isFlagOn(flags, COLLECT_TOP_EXTENSION_RUNTIME_NODES)
&& !depBuilder.isFlagSet(DependencyFlags.TOP_LEVEL_RUNTIME_EXTENSION_ARTIFACT)) {
depBuilder.setFlags(DependencyFlags.TOP_LEVEL_RUNTIME_EXTENSION_ARTIFACT);
flags = clearFlag(flags, COLLECT_TOP_EXTENSION_RUNTIME_NODES);
}
}
if (!isFlagOn(flags, COLLECT_RELOADABLE_MODULES)) {
depBuilder.clearFlag(DependencyFlags.RELOADABLE);
}
}
if (processChildren) {
for (org.gradle.api.artifacts.ResolvedDependency child : resolvedDep.getChildren()) {
collectDependencies(child, workspaceDiscovery, project, artifactFiles, modelBuilder, projectModule, flags);
}
}
}
private void initProjectModuleAndBuildPaths(final Project project,
ResolvedArtifact resolvedArtifact, ApplicationModelBuilder appModel, final ResolvedDependencyBuilder appDep) {
if (project == null) {
System.err.println("Error: could not find project for " + resolvedArtifact.getId().getDisplayName());
throw new IllegalStateException("Could not find project for " + resolvedArtifact.getId().getDisplayName());
}
appDep.setWorkspaceModule().setReloadable();
if (appDep.getWorkspaceModule() == null) {
final WorkspaceModule.Mutable projectModule = appModel.getOrCreateProjectModule(
WorkspaceModuleId.of(resolvedArtifact.getModuleVersion().getId().getGroup(), resolvedArtifact.getName(),
resolvedArtifact.getModuleVersion().getId().getVersion()),
project.getProjectDir(),
project.getLayout().getBuildDirectory().get().getAsFile())
.setBuildFile(project.getBuildFile().toPath());
ProjectDescriptorBuilder.initSourceDirs(project, projectModule);
appDep.setWorkspaceModule(projectModule);
}
appModel.addReloadableWorkspaceModule(appDep.getKey());
}
private boolean processQuarkusDependency(ResolvedDependencyBuilder artifactBuilder, ApplicationModelBuilder modelBuilder) {
for (Path artifactPath : artifactBuilder.getResolvedPaths()) {
if (!Files.exists(artifactPath) || !artifactBuilder.getType().equals(ArtifactCoords.TYPE_JAR)) {
break;
}
if (Files.isDirectory(artifactPath)) {
return processQuarkusDir(artifactBuilder, artifactPath.resolve(BootstrapConstants.META_INF), modelBuilder);
} else {
try (FileSystem artifactFs = ZipUtils.newFileSystem(artifactPath)) {
return processQuarkusDir(artifactBuilder, artifactFs.getPath(BootstrapConstants.META_INF), modelBuilder);
} catch (IOException e) {
throw new RuntimeException("Failed to process " + artifactPath, e);
}
}
}
return false;
}
private static boolean processQuarkusDir(ResolvedDependencyBuilder artifactBuilder, Path quarkusDir,
ApplicationModelBuilder modelBuilder) {
if (!Files.exists(quarkusDir)) {
return false;
}
final Path quarkusDescr = quarkusDir.resolve(BootstrapConstants.DESCRIPTOR_FILE_NAME);
if (!Files.exists(quarkusDescr)) {
return false;
}
final Properties extProps = readDescriptor(quarkusDescr);
if (extProps == null) {
return false;
}
artifactBuilder.setRuntimeExtensionArtifact();
modelBuilder.handleExtensionProperties(extProps, artifactBuilder.getKey());
final String providesCapabilities = extProps.getProperty(BootstrapConstants.PROP_PROVIDES_CAPABILITIES);
if (providesCapabilities != null) {
modelBuilder
.addExtensionCapabilities(
CapabilityContract.of(artifactBuilder.toGACTVString(), providesCapabilities, null));
}
return true;
}
private static Properties readDescriptor(final Path path) {
final Properties rtProps;
if (!Files.exists(path)) {
// not a platform artifact
return null;
}
rtProps = new Properties();
try (BufferedReader reader = Files.newBufferedReader(path)) {
rtProps.load(reader);
} catch (IOException e) {
throw new UncheckedIOException("Failed to load extension description " + path, e);
}
return rtProps;
}
private static void initProjectModule(Project project, WorkspaceModule.Mutable module, SourceSet sourceSet,
String classifier) {
if (sourceSet == null) {
return;
}
final FileCollection allClassesDirs = sourceSet.getOutput().getClassesDirs();
// some plugins do not add source directories to source sets and they may be missing from sourceSet.getAllJava()
// see https://github.com/quarkusio/quarkus/issues/20755
final List<SourceDir> sourceDirs = new ArrayList<>(1);
project.getTasks().withType(AbstractCompile.class,
t -> configureCompileTask(t.getSource(), t.getDestinationDirectory(), allClassesDirs, sourceDirs, t,
sourceSet));
maybeConfigureKotlinJvmCompile(project, allClassesDirs, sourceDirs, sourceSet);
final LinkedHashMap<File, Path> resourceDirs = new LinkedHashMap<>(1);
final File resourcesOutputDir = sourceSet.getOutput().getResourcesDir();
project.getTasks().withType(ProcessResources.class, t -> {
if (!t.getEnabled()) {
return;
}
final FileCollection source = t.getSource();
if (source.isEmpty()) {
return;
}
if (!t.getDestinationDir().equals(resourcesOutputDir)) {
return;
}
final Path destDir = t.getDestinationDir().toPath();
source.getAsFileTree().visit(a -> {
// we are looking for the root dirs containing sources
if (a.getRelativePath().getSegments().length == 1) {
final File srcDir = a.getFile().getParentFile();
resourceDirs.put(srcDir, destDir);
}
});
});
// there could be a task generating resources
if (resourcesOutputDir.exists() && resourceDirs.isEmpty()) {
sourceSet.getResources().getSrcDirs()
.forEach(srcDir -> resourceDirs.put(srcDir, resourcesOutputDir.toPath()));
}
final List<SourceDir> resources = new ArrayList<>(resourceDirs.size());
for (Map.Entry<File, Path> e : resourceDirs.entrySet()) {
resources.add(new DefaultSourceDir(e.getKey().toPath(), e.getValue(), null));
}
module.addArtifactSources(new DefaultArtifactSources(classifier, sourceDirs, resources));
}
private static void maybeConfigureKotlinJvmCompile(Project project, FileCollection allClassesDirs,
List<SourceDir> sourceDirs, SourceSet sourceSet) {
for (var task : project.getTasks()) {
if (task.getName().contains("compileKotlin") && task.getEnabled()) {
int originalSourceDirsSize = sourceDirs.size();
// This "try/catch" is needed because of the way the "quarkus-cli" Gradle tests work. Without it, the tests fail.
try {
Class.forName("org.jetbrains.kotlin.gradle.tasks.KotlinCompileTool");
doConfigureKotlinJvmCompile(project, allClassesDirs, sourceDirs, sourceSet);
} catch (ClassNotFoundException e) {
// ignore
}
// if the above failed, there could still be a KotlinCompile task that's not easily discoverable
if (originalSourceDirsSize == sourceDirs.size()) {
final File outputDir = getClassesOutputDir(task);
if (outputDir != null && task.getInputs().getHasInputs()) {
task.getInputs().getSourceFiles().getAsFileTree().visit(visitor -> {
if (visitor.getRelativePath().getSegments().length == 1) {
sourceDirs.add(SourceDir.of(visitor.getFile().getParentFile().toPath(), outputDir.toPath()));
}
});
}
break;
}
}
}
}
private static void doConfigureKotlinJvmCompile(Project project, FileCollection allClassesDirs,
List<SourceDir> sourceDirs, SourceSet sourceSet) {
// Use KotlinJvmCompile.
|
GradleApplicationModelBuilder
|
java
|
spring-projects__spring-boot
|
module/spring-boot-webmvc-test/src/test/java/org/springframework/boot/webmvc/test/autoconfigure/mockmvc/WebMvcTestHtmlUnitWebClientIntegrationTests.java
|
{
"start": 1349,
"end": 1704
}
|
class ____ {
@Autowired
private WebClient webClient;
@Test
void shouldAutoConfigureWebClient() throws Exception {
HtmlPage page = this.webClient.getPage("/html");
assertThat(page.getBody().getTextContent()).isEqualTo("Hello");
assertThat(page.getBaseURI()).isEqualTo("http://localhost:8181/html");
}
}
|
WebMvcTestHtmlUnitWebClientIntegrationTests
|
java
|
mapstruct__mapstruct
|
integrationtest/src/test/resources/namingStrategyTest/usage/src/main/java/org/mapstruct/itest/naming/GolfPlayerDto.java
|
{
"start": 226,
"end": 585
}
|
class ____ {
private double handicap;
private String name;
public double handicap() {
return handicap;
}
public void withHandicap(double handicap) {
this.handicap = handicap;
}
public String name() {
return name;
}
public void withName(String name) {
this.name = name;
}
}
|
GolfPlayerDto
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/ComponentProcessorTest.java
|
{
"start": 20899,
"end": 21149
}
|
class ____ {",
" @Inject BClass(AClass a) {}",
"}");
Source aModule = CompilerTests.javaSource("AModule",
"import dagger.Module;",
"import dagger.Provides;",
"",
"@Module
|
BClass
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/internal/bigintegers/BigIntegers_assertLessThanOrEqualTo_Test.java
|
{
"start": 1510,
"end": 3807
}
|
class ____ extends BigIntegersBaseTest {
@Test
void should_fail_if_actual_is_null() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> numbers.assertLessThanOrEqualTo(someInfo(), null, ONE))
.withMessage(actualIsNull());
}
@Test
void should_pass_if_actual_is_less_than_other() {
numbers.assertLessThanOrEqualTo(someInfo(), ONE, TEN);
}
@Test
void should_pass_if_actual_is_equal_to_other() {
numbers.assertLessThanOrEqualTo(someInfo(), ONE, ONE);
}
@Test
void should_pass_if_actual_is_equal_to_other_by_comparison() {
numbers.assertLessThanOrEqualTo(someInfo(), ONE, new BigInteger("1"));
}
@Test
void should_fail_if_actual_is_less_than_other() {
AssertionInfo info = someInfo();
Throwable error = catchThrowable(() -> numbers.assertLessThanOrEqualTo(info, TEN, ONE));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldBeLessOrEqual(TEN, ONE));
}
// ------------------------------------------------------------------------------------------------------------------
// tests using a custom comparison strategy
// ------------------------------------------------------------------------------------------------------------------
@Test
void should_pass_if_actual_is_less_than_other_according_to_custom_comparison_strategy() {
numbersWithAbsValueComparisonStrategy.assertLessThanOrEqualTo(someInfo(), ONE, TEN.negate());
}
@Test
void should_pass_if_actual_is_equal_to_other_according_to_custom_comparison_strategy() {
numbersWithAbsValueComparisonStrategy.assertLessThanOrEqualTo(someInfo(), ONE.negate(), ONE);
}
@Test
void should_fail_if_actual_is_less_than_other_according_to_custom_comparison_strategy() {
AssertionInfo info = someInfo();
Throwable error = catchThrowable(() -> numbersWithAbsValueComparisonStrategy.assertLessThanOrEqualTo(info, TEN.negate(),
ONE));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldBeLessOrEqual(TEN.negate(), ONE, absValueComparisonStrategy));
}
}
|
BigIntegers_assertLessThanOrEqualTo_Test
|
java
|
google__guava
|
guava/src/com/google/common/collect/CollectSpliterators.java
|
{
"start": 19135,
"end": 19765
}
|
class ____<InElementT extends @Nullable Object>
extends FlatMapSpliteratorOfPrimitive<InElementT, Long, LongConsumer, Spliterator.OfLong>
implements Spliterator.OfLong {
FlatMapSpliteratorOfLong(
Spliterator.@Nullable OfLong prefix,
Spliterator<InElementT> from,
Function<? super InElementT, Spliterator.@Nullable OfLong> function,
int characteristics,
long estimatedSize) {
super(prefix, from, function, FlatMapSpliteratorOfLong::new, characteristics, estimatedSize);
}
}
/** Implementation of {@link #flatMapToDouble}. */
static final
|
FlatMapSpliteratorOfLong
|
java
|
spring-projects__spring-security
|
web/src/main/java/org/springframework/security/web/firewall/CompositeRequestRejectedHandler.java
|
{
"start": 1103,
"end": 2070
}
|
class ____ implements RequestRejectedHandler {
private final List<RequestRejectedHandler> requestRejectedhandlers;
/**
* Creates a new instance.
* @param requestRejectedhandlers the {@link RequestRejectedHandler} instances to
* handle {@link org.springframework.security.web.firewall.RequestRejectedException}
*/
public CompositeRequestRejectedHandler(RequestRejectedHandler... requestRejectedhandlers) {
Assert.notEmpty(requestRejectedhandlers, "requestRejectedhandlers cannot be empty");
this.requestRejectedhandlers = Arrays.asList(requestRejectedhandlers);
}
@Override
public void handle(HttpServletRequest request, HttpServletResponse response,
RequestRejectedException requestRejectedException) throws IOException, ServletException {
for (RequestRejectedHandler requestRejectedhandler : this.requestRejectedhandlers) {
requestRejectedhandler.handle(request, response, requestRejectedException);
}
}
}
|
CompositeRequestRejectedHandler
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/engine/jdbc/mutation/MutationExecutor.java
|
{
"start": 783,
"end": 1995
}
|
interface ____ {
/**
* Get the delegate to be used to coordinate JDBC parameter binding.
*/
JdbcValueBindings getJdbcValueBindings();
/**
* Details about the {@link java.sql.PreparedStatement} for mutating
* the given table.
*/
PreparedStatementDetails getPreparedStatementDetails(String tableName);
/**
* Perform the execution, returning any generated value.
*
* @param inclusionChecker The ability to skip the execution for a
* specific table; passing {@code null} indicates no filtering
* @param resultChecker Custom result checking; pass {@code null} to perform
* the standard check using the statement's {@linkplain org.hibernate.jdbc.Expectation expectation}
*/
GeneratedValues execute(
Object modelReference,
ValuesAnalysis valuesAnalysis,
TableInclusionChecker inclusionChecker,
OperationResultChecker resultChecker,
SharedSessionContractImplementor session);
GeneratedValues execute(
Object modelReference,
ValuesAnalysis valuesAnalysis,
TableInclusionChecker inclusionChecker,
OperationResultChecker resultChecker,
SharedSessionContractImplementor session,
Batch.StaleStateMapper staleStateMapper);
void release();
}
|
MutationExecutor
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/index/mapper/BlockLoader.java
|
{
"start": 9178,
"end": 9400
}
|
interface ____ {
/**
* Checks if the reader can be used to read a range documents starting with the given docID by the current thread.
*/
boolean canReuse(int startingDocID);
}
|
Reader
|
java
|
google__dagger
|
hilt-compiler/main/java/dagger/hilt/android/processor/internal/androidentrypoint/Generators.java
|
{
"start": 3286,
"end": 11539
}
|
class ____ bytecode transformation.",
annotation);
}
/** Copies all constructors with arguments to the builder. */
static void copyConstructors(
XTypeElement baseClass, TypeSpec.Builder builder, XTypeElement subclassReference) {
copyConstructors(baseClass, CodeBlock.builder().build(), builder, subclassReference);
}
/** Copies all constructors with arguments along with an appended body to the builder. */
static void copyConstructors(
XTypeElement baseClass,
CodeBlock body,
TypeSpec.Builder builder,
XTypeElement subclassReference) {
ImmutableList<XConstructorElement> constructors =
baseClass.getConstructors().stream()
.filter(constructor -> isConstructorVisibleToSubclass(constructor, subclassReference))
.collect(toImmutableList());
if (constructors.size() == 1
&& getOnlyElement(constructors).getParameters().isEmpty()
&& body.isEmpty()) {
// No need to copy the constructor if the default constructor will handle it.
return;
}
constructors.forEach(constructor -> builder.addMethod(copyConstructor(constructor, body)));
}
/**
* Returns true if the constructor is visible to a subclass in the same package as the reference.
* A reference is used because usually for generators the subclass is being generated and so
* doesn't actually exist.
*/
static boolean isConstructorVisibleToSubclass(
XConstructorElement constructor, XTypeElement subclassReference) {
// Check if the constructor has package private visibility and we're outside the package
if (Processors.hasJavaPackagePrivateVisibility(constructor)
&& !constructor
.getEnclosingElement()
.getPackageName()
.contentEquals(subclassReference.getPackageName())) {
return false;
// Or if it is private, we know generated code can't be in the same file
} else if (constructor.isPrivate()) {
return false;
}
// Assume this is for a subclass per the name, so both protected and public methods are always
// accessible.
return true;
}
/** Returns Optional with AnnotationSpec for Nullable if found on element, empty otherwise. */
private static Optional<AnnotationSpec> getNullableAnnotationSpec(XElement element) {
for (XAnnotation annotation : element.getAllAnnotations()) {
if (annotation.getClassName().simpleName().contentEquals("Nullable")) {
AnnotationSpec annotationSpec = toAnnotationSpec(annotation);
// If using the android internal Nullable, convert it to the externally-visible version.
return AndroidClassNames.NULLABLE_INTERNAL.equals(annotationSpec.type)
? Optional.of(AnnotationSpec.builder(AndroidClassNames.NULLABLE).build())
: Optional.of(annotationSpec);
}
}
return Optional.empty();
}
/** Returns a TypeName for the given type, including any @Nullable annotations on it. */
private static TypeName withAnyNullnessAnnotation(XType type) {
for (XAnnotation annotation : type.getAllAnnotations()) {
if (annotation.getClassName().simpleName().contentEquals("Nullable")) {
return type.getTypeName().annotated(toAnnotationSpec(annotation));
}
}
return type.getTypeName();
}
/**
* Returns a ParameterSpec of the input parameter, @Nullable annotated if existing in original.
*/
private static ParameterSpec getParameterSpecWithNullable(XVariableElement parameter) {
TypeName type = withAnyNullnessAnnotation(parameter.getType());
ParameterSpec.Builder builder = ParameterSpec.builder(type, getSimpleName(parameter));
/*
* If we already have a type-use Nullable, don't consider also adding a declaration Nullable,
* which could be a duplicate in the case of "hybrid" annotations that support both type-use and
* declaration targets.
*/
if (!type.isAnnotated()) {
getNullableAnnotationSpec(parameter).ifPresent(builder::addAnnotation);
}
return builder.build();
}
/**
* Returns a {@link MethodSpec} for a constructor matching the given {@link XConstructorElement}
* constructor signature, and just calls super. If the constructor is {@link
* android.annotation.TargetApi} guarded, adds the TargetApi as well.
*/
// Example:
// Foo(Param1 param1, Param2 param2) {
// super(param1, param2);
// }
static MethodSpec copyConstructor(XConstructorElement constructor) {
return copyConstructor(constructor, CodeBlock.builder().build());
}
private static MethodSpec copyConstructor(XConstructorElement constructor, CodeBlock body) {
List<ParameterSpec> params =
constructor.getParameters().stream()
.map(Generators::getParameterSpecWithNullable)
.collect(Collectors.toList());
final MethodSpec.Builder builder =
MethodSpec.constructorBuilder()
.addParameters(params)
.addStatement(
"super($L)", params.stream().map(param -> param.name).collect(joining(", ")))
.addCode(body);
constructor.getAllAnnotations().stream()
.filter(a -> a.getTypeElement().hasAnnotation(AndroidClassNames.TARGET_API))
.collect(toOptional())
.map(JavaPoetExtKt::toAnnotationSpec)
.ifPresent(builder::addAnnotation);
return builder.build();
}
/** Copies SuppressWarnings annotations from the annotated element to the generated element. */
static void copySuppressAnnotations(XElement element, TypeSpec.Builder builder) {
ImmutableSet<String> suppressValues =
SUPPRESS_ANNOTATION_PROPERTY_NAME.keySet().stream()
.filter(element::hasAnnotation)
.flatMap(
annotation ->
element
.getAnnotation(annotation)
.getAsStringList(SUPPRESS_ANNOTATION_PROPERTY_NAME.get(annotation))
.stream())
.collect(toImmutableSet());
if (!suppressValues.isEmpty()) {
// Replace kotlin Suppress with java SuppressWarnings, as the generated file is java.
AnnotationSpec.Builder annotation = AnnotationSpec.builder(ClassNames.SUPPRESS_WARNINGS);
suppressValues.forEach(value -> annotation.addMember("value", "$S", value));
builder.addAnnotation(annotation.build());
}
}
/**
* Copies the Android lint annotations from the annotated element to the generated element.
*
* <p>Note: For now we only copy over {@link android.annotation.TargetApi}.
*/
static void copyLintAnnotations(XElement element, TypeSpec.Builder builder) {
if (element.hasAnnotation(AndroidClassNames.TARGET_API)) {
builder.addAnnotation(toAnnotationSpec(element.getAnnotation(AndroidClassNames.TARGET_API)));
}
}
// @Override
// public CompT generatedComponent() {
// return componentManager().generatedComponent();
// }
static void addComponentOverride(AndroidEntryPointMetadata metadata, TypeSpec.Builder builder) {
if (metadata.overridesAndroidEntryPointClass()) {
// We don't need to override this method if we are extending a Hilt type.
return;
}
builder
.addSuperinterface(ClassNames.GENERATED_COMPONENT_MANAGER_HOLDER)
.addMethod(
MethodSpec.methodBuilder("generatedComponent")
.addAnnotation(Override.class)
.addModifiers(Modifier.PUBLIC, Modifier.FINAL)
.returns(TypeName.OBJECT)
.addStatement("return $L.generatedComponent()", componentManagerCallBlock(metadata))
.build());
}
/** Adds the inject() and optionally the componentManager() methods to allow for injection. */
static void addInjectionMethods(AndroidEntryPointMetadata metadata, TypeSpec.Builder builder) {
switch (metadata.androidType()) {
case ACTIVITY:
case FRAGMENT:
case VIEW:
case SERVICE:
addComponentManagerMethods(metadata, builder);
// fall through
case BROADCAST_RECEIVER:
addInjectAndMaybeOptionalInjectMethod(metadata, builder);
break;
default:
throw new AssertionError();
}
}
/** Returns the nearest super
|
via
|
java
|
spring-projects__spring-security
|
webauthn/src/main/java/org/springframework/security/web/webauthn/jackson/AuthenticatorAttachmentDeserializer.java
|
{
"start": 1135,
"end": 1723
}
|
class ____ extends StdDeserializer<AuthenticatorAttachment> {
AuthenticatorAttachmentDeserializer() {
super(AuthenticatorAttachment.class);
}
@Override
public @Nullable AuthenticatorAttachment deserialize(JsonParser parser, DeserializationContext ctxt)
throws JacksonException {
String type = parser.readValueAs(String.class);
for (AuthenticatorAttachment publicKeyCredentialType : AuthenticatorAttachment.values()) {
if (publicKeyCredentialType.getValue().equals(type)) {
return publicKeyCredentialType;
}
}
return null;
}
}
|
AuthenticatorAttachmentDeserializer
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FlagSet.java
|
{
"start": 8332,
"end": 8678
}
|
class ____ enum
* @param conf configuration
* @param key key to look for
* @param ignoreUnknown should unknown values raise an exception?
* @param <E> enumeration type
* @return a mutable FlagSet
* @throws IllegalArgumentException if one of the entries was unknown and ignoreUnknown is false,
* or there are two entries in the
|
of
|
java
|
apache__rocketmq
|
remoting/src/main/java/org/apache/rocketmq/remoting/protocol/header/GetUserRequestHeader.java
|
{
"start": 1320,
"end": 1787
}
|
class ____ implements CommandCustomHeader {
private String username;
public GetUserRequestHeader() {
}
public GetUserRequestHeader(String username) {
this.username = username;
}
@Override
public void checkFields() throws RemotingCommandException {
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
}
|
GetUserRequestHeader
|
java
|
quarkusio__quarkus
|
extensions/devservices/keycloak/src/main/java/io/quarkus/devservices/keycloak/KeycloakDevServicesProcessor.java
|
{
"start": 40203,
"end": 44998
}
|
class ____ extends RuntimeException {
private final int errorStatus;
public RealmEndpointAccessException(int errorStatus) {
this.errorStatus = errorStatus;
}
public int getErrorStatus() {
return errorStatus;
}
}
private static Predicate<? super Throwable> realmEndpointNotAvailable() {
return t -> (t instanceof SocketException
|| (t instanceof RealmEndpointAccessException && ((RealmEndpointAccessException) t).getErrorStatus() == 404));
}
private static Map<String, String> getUsers(Map<String, String> configuredUsers, boolean createRealm) {
if (configuredUsers.isEmpty() && createRealm) {
Map<String, String> users = new LinkedHashMap<String, String>();
users.put("alice", "alice");
users.put("bob", "bob");
return users;
} else {
return configuredUsers;
}
}
private static List<String> getUserRoles(String user) {
List<String> roles = capturedDevServicesConfiguration.roles().get(user);
return roles == null ? ("alice".equals(user) ? List.of("admin", "user") : List.of("user"))
: roles;
}
private static RealmRepresentation createDefaultRealmRep() {
RealmRepresentation realm = new RealmRepresentation();
realm.setRealm(getDefaultRealmName());
realm.setEnabled(true);
realm.setUsers(new ArrayList<>());
realm.setClients(new ArrayList<>());
realm.setAccessTokenLifespan(600);
realm.setSsoSessionMaxLifespan(600);
realm.setRefreshTokenMaxReuse(10);
realm.setRequiredActions(List.of());
RolesRepresentation roles = new RolesRepresentation();
List<RoleRepresentation> realmRoles = new ArrayList<>();
roles.setRealm(realmRoles);
realm.setRoles(roles);
if (capturedDevServicesConfiguration.roles().isEmpty()) {
realm.getRoles().getRealm().add(new RoleRepresentation("user", null, false));
realm.getRoles().getRealm().add(new RoleRepresentation("admin", null, false));
} else {
Set<String> allRoles = new HashSet<>();
for (List<String> distinctRoles : capturedDevServicesConfiguration.roles().values()) {
for (String role : distinctRoles) {
if (!allRoles.contains(role)) {
allRoles.add(role);
realm.getRoles().getRealm().add(new RoleRepresentation(role, null, false));
}
}
}
}
return realm;
}
private static ClientRepresentation createClient(String clientId, String oidcClientSecret) {
ClientRepresentation client = new ClientRepresentation();
client.setClientId(clientId);
client.setPublicClient(false);
client.setSecret(oidcClientSecret);
client.setDirectAccessGrantsEnabled(true);
client.setServiceAccountsEnabled(true);
client.setImplicitFlowEnabled(true);
client.setEnabled(true);
client.setRedirectUris(List.of("*"));
client.setWebOrigins(List.of("*"));
client.setDefaultClientScopes(List.of("microprofile-jwt", "basic"));
return client;
}
private static UserRepresentation createUser(String username, String password, List<String> realmRoles) {
UserRepresentation user = new UserRepresentation();
user.setUsername(username);
user.setEnabled(true);
user.setCredentials(new ArrayList<>());
user.setRealmRoles(realmRoles);
user.setEmailVerified(true);
user.setRequiredActions(List.of());
CredentialRepresentation credential = new CredentialRepresentation();
credential.setType(CredentialRepresentation.PASSWORD);
credential.setValue(password);
credential.setTemporary(false);
user.getCredentials().add(credential);
return user;
}
private static String getOidcClientId() {
// if the application type is web-app or hybrid, OidcRecorder will enforce that the client id and secret are configured
return ConfigProvider.getConfig().getOptionalValue(CLIENT_ID_CONFIG_KEY, String.class)
.orElse(capturedDevServicesConfiguration.createClient() ? "quarkus-app" : "");
}
private static String getOidcClientSecret() {
// if the application type is web-app or hybrid, OidcRecorder will enforce that the client id and secret are configured
return ConfigProvider.getConfig().getOptionalValue(CLIENT_SECRET_CONFIG_KEY, String.class)
.orElse(capturedDevServicesConfiguration.createClient() ? "secret" : "");
}
}
|
RealmEndpointAccessException
|
java
|
apache__camel
|
core/camel-core-model/src/main/java/org/apache/camel/builder/AdviceWithTasks.java
|
{
"start": 2524,
"end": 3189
}
|
class ____ implements MatchBy {
private final String id;
private MatchById(String id) {
this.id = id;
}
@Override
public String getId() {
return id;
}
@Override
public boolean match(ProcessorDefinition<?> processor) {
if (id.equals("*")) {
// make sure the processor which id isn't be set is matched.
return true;
}
return PatternHelper.matchPattern(processor.getId(), id);
}
}
/**
* Will match by the to string representation of the processor.
*/
private static final
|
MatchById
|
java
|
apache__commons-lang
|
src/main/java/org/apache/commons/lang3/ClassUtils.java
|
{
"start": 31130,
"end": 31518
}
|
class ____ get the package name for, may be {@code null}.
* @return the package name or an empty string.
* @since 2.4
*/
public static String getPackageCanonicalName(final Class<?> cls) {
if (cls == null) {
return StringUtils.EMPTY;
}
return getPackageCanonicalName(cls.getName());
}
/**
* Gets the package name from the
|
to
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/context/annotation/ConfigurationClassPostProcessorTests.java
|
{
"start": 62864,
"end": 63057
}
|
class ____ {
@Bean public ExtendedFoo foo() {
return new ExtendedFoo();
}
@Bean public Bar bar() {
return new Bar(foo());
}
}
@Configuration
static
|
OverridingSingletonBeanConfig
|
java
|
quarkusio__quarkus
|
extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/tracing/intrumentation/vertx/OpenTelemetryVertxTracingFactory.java
|
{
"start": 453,
"end": 900
}
|
class ____ implements VertxTracerFactory {
private final VertxDelegator vertxTracerDelegator = new VertxDelegator();
public OpenTelemetryVertxTracingFactory() {
}
public VertxDelegator getVertxTracerDelegator() {
return vertxTracerDelegator;
}
@Override
public VertxTracer<?, ?> tracer(final TracingOptions options) {
return vertxTracerDelegator;
}
public static
|
OpenTelemetryVertxTracingFactory
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/DeprecationIssue.java
|
{
"start": 1320,
"end": 7775
}
|
enum ____ implements Writeable {
/**
* Resolving this issue is advised but not required to upgrade. There may be undesired changes in behavior unless this issue is
* resolved before upgrading.
*/
WARNING,
/**
* This issue must be resolved to upgrade. Failures will occur unless this is resolved before upgrading.
*/
CRITICAL;
public static Level fromString(String value) {
return Level.valueOf(value.toUpperCase(Locale.ROOT));
}
public static Level readFromStream(StreamInput in) throws IOException {
int ordinal = in.readVInt();
if (ordinal < 0 || ordinal >= values().length) {
throw new IOException("Unknown Level ordinal [" + ordinal + "]");
}
return values()[ordinal];
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(ordinal());
}
@Override
public String toString() {
return name().toLowerCase(Locale.ROOT);
}
}
private final Level level;
private final String message;
private final String url;
private final String details;
private final boolean resolveDuringRollingUpgrade;
private final Map<String, Object> meta;
public DeprecationIssue(
Level level,
String message,
String url,
@Nullable String details,
boolean resolveDuringRollingUpgrade,
@Nullable Map<String, Object> meta
) {
this.level = level;
this.message = message;
this.url = url;
this.details = details;
this.resolveDuringRollingUpgrade = resolveDuringRollingUpgrade;
this.meta = meta;
}
public DeprecationIssue(StreamInput in) throws IOException {
level = Level.readFromStream(in);
message = in.readString();
url = in.readString();
details = in.readOptionalString();
resolveDuringRollingUpgrade = in.readBoolean();
meta = in.readGenericMap();
}
public Level getLevel() {
return level;
}
public String getMessage() {
return message;
}
public String getUrl() {
return url;
}
public String getDetails() {
return details;
}
/**
* @return whether a deprecation issue can only be resolved during a rolling upgrade when a node is offline.
*/
public boolean isResolveDuringRollingUpgrade() {
return resolveDuringRollingUpgrade;
}
/**
* @return custom metadata, which allows the ui to display additional details
* without parsing the deprecation message itself.
*/
public Map<String, Object> getMeta() {
return meta;
}
private Optional<Meta> getMetaObject() {
return Meta.fromMetaMap(meta);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
level.writeTo(out);
out.writeString(message);
out.writeString(url);
out.writeOptionalString(details);
out.writeBoolean(resolveDuringRollingUpgrade);
out.writeGenericMap(meta);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject().field("level", level).field("message", message).field("url", url);
if (details != null) {
builder.field("details", details);
}
builder.field("resolve_during_rolling_upgrade", resolveDuringRollingUpgrade);
if (meta != null) {
builder.field("_meta", meta);
}
return builder.endObject();
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
DeprecationIssue that = (DeprecationIssue) o;
return Objects.equals(level, that.level)
&& Objects.equals(message, that.message)
&& Objects.equals(url, that.url)
&& Objects.equals(details, that.details)
&& Objects.equals(resolveDuringRollingUpgrade, that.resolveDuringRollingUpgrade)
&& Objects.equals(meta, that.meta);
}
@Override
public int hashCode() {
return Objects.hash(level, message, url, details, resolveDuringRollingUpgrade, meta);
}
@Override
public String toString() {
return Strings.toString(this);
}
public static Map<String, Object> createMetaMapForRemovableSettings(List<String> removableSettings) {
return Meta.fromRemovableSettings(removableSettings).toMetaMap();
}
/**
* This method returns a DeprecationIssue that has in its meta object the intersection of all auto-removable settings that appear on
* all of the DeprecationIssues that are passed in. This method assumes that all DeprecationIssues passed in are equal, except for the
* auto-removable settings in the meta object.
* @param similarIssues DeprecationIssues that are assumed to be identical except possibly removal actions.
* @return A DeprecationIssue containing only the removal actions that are in all similarIssues
*/
public static DeprecationIssue getIntersectionOfRemovableSettings(List<DeprecationIssue> similarIssues) {
if (similarIssues == null || similarIssues.isEmpty()) {
return null;
}
if (similarIssues.size() == 1) {
return similarIssues.get(0);
}
DeprecationIssue representativeIssue = similarIssues.get(0);
Optional<Meta> metaIntersection = similarIssues.stream()
.map(DeprecationIssue::getMetaObject)
.reduce(
representativeIssue.getMetaObject(),
(intersectionSoFar, meta) -> intersectionSoFar.isPresent() && meta.isPresent()
? Optional.of(intersectionSoFar.get().getIntersection(meta.get()))
: Optional.empty()
);
return new DeprecationIssue(
representativeIssue.level,
representativeIssue.message,
representativeIssue.url,
representativeIssue.details,
representativeIssue.resolveDuringRollingUpgrade,
metaIntersection.map(Meta::toMetaMap).orElse(null)
);
}
/*
* This
|
Level
|
java
|
apache__rocketmq
|
remoting/src/test/java/org/apache/rocketmq/remoting/RemotingServerTest.java
|
{
"start": 5813,
"end": 6363
}
|
class ____ implements CommandCustomHeader {
@CFNullable
private Integer count;
@CFNullable
private String messageTitle;
@Override
public void checkFields() throws RemotingCommandException {
}
public Integer getCount() {
return count;
}
public void setCount(Integer count) {
this.count = count;
}
public String getMessageTitle() {
return messageTitle;
}
public void setMessageTitle(String messageTitle) {
this.messageTitle = messageTitle;
}
}
|
RequestHeader
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/script/DoubleValuesScript.java
|
{
"start": 828,
"end": 1495
}
|
class ____ {
public DoubleValuesScript() {}
public abstract double execute();
public abstract double evaluate(DoubleValues[] functionValues);
public abstract DoubleValuesSource getDoubleValuesSource(Function<String, DoubleValuesSource> sourceProvider);
public abstract SortField getSortField(Function<String, DoubleValuesSource> sourceProvider, boolean reverse);
public abstract Rescorer getRescorer(Function<String, DoubleValuesSource> sourceProvider);
public abstract String sourceText();
public abstract String[] variables();
/** A factory to construct {@link DoubleValuesScript} instances. */
public
|
DoubleValuesScript
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/sql/dialect/presto/parser/PrestoCreateTableParser.java
|
{
"start": 1077,
"end": 2155
}
|
class ____ extends SQLCreateTableParser {
public PrestoCreateTableParser(String sql) {
super(sql);
}
public PrestoCreateTableParser(SQLExprParser exprParser) {
super(exprParser);
}
@Override
protected void parseCreateTableRest(SQLCreateTableStatement stmt) {
if (lexer.token() == Token.COMMENT) {
lexer.nextToken();
SQLExpr comment = this.exprParser.expr();
stmt.setComment(comment);
}
if (lexer.token() == Token.WITH) {
lexer.nextToken();
accept(Token.LPAREN);
parseAssignItems(stmt.getTableOptions(), stmt, false);
accept(Token.RPAREN);
}
if (lexer.token() == Token.AS) {
lexer.nextToken();
SQLSelect select = this.createSQLSelectParser().select();
stmt.setSelect(select);
}
super.parseCreateTableRest(stmt);
}
protected PrestoCreateTableStatement newCreateStatement() {
return new PrestoCreateTableStatement();
}
}
|
PrestoCreateTableParser
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/registration/RegistrationConnectionListener.java
|
{
"start": 1013,
"end": 2155
}
|
interface ____<
T extends RegisteredRpcConnection<?, ?, S, ?>,
S extends RegistrationResponse.Success,
R extends RegistrationResponse.Rejection> {
/**
* This method is called by the {@link RegisteredRpcConnection} when the registration is
* success.
*
* @param success The concrete response information for successful registration.
* @param connection The instance which established the connection
*/
void onRegistrationSuccess(T connection, S success);
/**
* This method is called by the {@link RegisteredRpcConnection} when the registration fails.
*
* @param failure The exception which causes the registration failure.
*/
void onRegistrationFailure(Throwable failure);
/**
* This method is called by the {@link RegisteredRpcConnection} when the registration is
* rejected.
*
* @param targetAddress targetAddress from which the registration was rejected.
* @param rejection rejection containing more information.
*/
void onRegistrationRejection(String targetAddress, R rejection);
}
|
RegistrationConnectionListener
|
java
|
spring-projects__spring-framework
|
spring-webmvc/src/main/java/org/springframework/web/servlet/handler/AbstractHandlerMethodMapping.java
|
{
"start": 1996,
"end": 2549
}
|
class ____ {@link HandlerMapping} implementations that define
* a mapping between a request and a {@link HandlerMethod}.
*
* <p>For each registered handler method, a unique mapping is maintained with
* subclasses defining the details of the mapping type {@code <T>}.
*
* @author Arjen Poutsma
* @author Rossen Stoyanchev
* @author Juergen Hoeller
* @author Sam Brannen
* @since 3.1
* @param <T> the mapping for a {@link HandlerMethod} containing the conditions
* needed to match the handler method to an incoming request.
*/
public abstract
|
for
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/parser/deser/SmartMatchTest_boolean_is.java
|
{
"start": 420,
"end": 647
}
|
class ____ {
private boolean visible;
public boolean isVisible() {
return visible;
}
public void setVisible(boolean visible) {
this.visible = visible;
}
}
}
|
VO
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/io/network/partition/PartitionRequestListener.java
|
{
"start": 1390,
"end": 2560
}
|
interface ____ {
/**
* The creation timestamp of this notifier, it's used to check whether the notifier is timeout.
*
* @return the creation timestamp
*/
long getCreateTimestamp();
/**
* Get the result partition id of the notifier.
*
* @return the result partition id
*/
ResultPartitionID getResultPartitionId();
/**
* Get the view reader of the notifier.
*
* @return the view reader
*/
NetworkSequenceViewReader getViewReader();
/**
* Get the input channel id of the notifier.
*
* @return the input channel id
*/
InputChannelID getReceiverId();
/**
* Notify the partition request listener when the given partition is registered.
*
* @param partition The registered partition.
*/
void notifyPartitionCreated(ResultPartition partition) throws IOException;
/**
* When the partition request listener is timeout, it will be notified to send {@link
* PartitionNotFoundException}.
*/
void notifyPartitionCreatedTimeout();
/** Release this listener. */
void releaseListener();
}
|
PartitionRequestListener
|
java
|
alibaba__nacos
|
common/src/main/java/com/alibaba/nacos/common/utils/PropertyUtils.java
|
{
"start": 761,
"end": 2411
}
|
class ____ {
private PropertyUtils() {
}
private static final String PROCESSORS_ENV_NAME = "NACOS_COMMON_PROCESSORS";
private static final String PROCESSORS_PROP_NAME = "nacos.common.processors";
/**
* Get system env or property value.
*
* <p>If {@link System#getenv()} has no value for {@code envName},
* return {@link System#getProperty(String)}.
*/
public static String getProperty(String propertyName, String envName) {
return System.getenv().getOrDefault(envName, System.getProperty(propertyName));
}
/**
* Get system env or property value.
*
* <p>If {@link System#getenv()} has no value for {@code envName},
* return {@link System#getProperty(String, String)} or {@code defaultValue}.
*/
public static String getProperty(String propertyName, String envName, String defaultValue) {
return System.getenv().getOrDefault(envName, System.getProperty(propertyName, defaultValue));
}
/**
* Get processors count maybe preset by env or property.
*/
public static int getProcessorsCount() {
int processorsCount = 0;
String processorsCountPreSet = getProperty(PROCESSORS_PROP_NAME, PROCESSORS_ENV_NAME);
if (processorsCountPreSet != null) {
try {
processorsCount = Integer.parseInt(processorsCountPreSet);
} catch (NumberFormatException ignored) {
}
}
if (processorsCount <= 0) {
processorsCount = Runtime.getRuntime().availableProcessors();
}
return processorsCount;
}
}
|
PropertyUtils
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/DateHistogramGroupConfig.java
|
{
"start": 1926,
"end": 5628
}
|
class ____ implements Writeable, ToXContentObject {
static final String NAME = "date_histogram";
public static final String INTERVAL = "interval";
public static final String FIXED_INTERVAL = "fixed_interval";
public static final String CALENDAR_INTERVAL = "calendar_interval";
public static final String TIME_ZONE = "time_zone";
public static final String DELAY = "delay";
private static final String DEFAULT_TIMEZONE = "UTC";
public static final ZoneId DEFAULT_ZONEID_TIMEZONE = ZoneOffset.UTC;
private static final String FIELD = "field";
private static final String TYPE_NAME = "interval";
private static final ConstructingObjectParser<DateHistogramGroupConfig, Void> PARSER;
static {
PARSER = new ConstructingObjectParser<>(NAME, a -> {
DateHistogramInterval oldInterval = (DateHistogramInterval) a[1];
DateHistogramInterval calendarInterval = (DateHistogramInterval) a[2];
DateHistogramInterval fixedInterval = (DateHistogramInterval) a[3];
if (oldInterval != null) {
if (calendarInterval != null || fixedInterval != null) {
throw new IllegalArgumentException(
"Cannot use [interval] with [fixed_interval] or [calendar_interval] " + "configuration options."
);
}
return fromUnknownTimeUnit((String) a[0], oldInterval, (DateHistogramInterval) a[4], (String) a[5]);
} else if (calendarInterval != null && fixedInterval == null) {
return new CalendarInterval((String) a[0], calendarInterval, (DateHistogramInterval) a[4], (String) a[5]);
} else if (calendarInterval == null && fixedInterval != null) {
return new FixedInterval((String) a[0], fixedInterval, (DateHistogramInterval) a[4], (String) a[5]);
} else if (calendarInterval != null && fixedInterval != null) {
throw new IllegalArgumentException("Cannot set both [fixed_interval] and [calendar_interval] at the same time");
} else {
throw new IllegalArgumentException("An interval is required. Use [fixed_interval] or [calendar_interval].");
}
});
PARSER.declareString(constructorArg(), new ParseField(FIELD));
PARSER.declareField(optionalConstructorArg(), p -> new DateHistogramInterval(p.text()), new ParseField(INTERVAL), ValueType.STRING);
PARSER.declareField(
optionalConstructorArg(),
p -> new DateHistogramInterval(p.text()),
new ParseField(CALENDAR_INTERVAL),
ValueType.STRING
);
PARSER.declareField(
optionalConstructorArg(),
p -> new DateHistogramInterval(p.text()),
new ParseField(FIXED_INTERVAL),
ValueType.STRING
);
PARSER.declareField(optionalConstructorArg(), p -> new DateHistogramInterval(p.text()), new ParseField(DELAY), ValueType.STRING);
PARSER.declareString(optionalConstructorArg(), new ParseField(TIME_ZONE));
}
private final String field;
private final DateHistogramInterval interval;
private final DateHistogramInterval delay;
private final String timeZone;
/**
* FixedInterval is a {@link DateHistogramGroupConfig} that uses a fixed time interval for rolling up data.
* The fixed time interval is one or multiples of SI units and has no calendar-awareness (e.g. doesn't account
* for leap corrections, does not have variable length months, etc).
*
* For calendar-aware rollups, use {@link CalendarInterval}
*/
public static
|
DateHistogramGroupConfig
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/notfound/OptionalEagerRefNonPKNotFoundTest.java
|
{
"start": 18242,
"end": 18849
}
|
class ____ extends Person {
@Id
private Long id;
@OneToOne(cascade = CascadeType.PERSIST)
@MapsId
@JoinColumn(
name = "cityName",
referencedColumnName = "name",
foreignKey = @ForeignKey(ConstraintMode.NO_CONSTRAINT)
)
@Fetch(FetchMode.JOIN)
private City city;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public City getCity() {
return city;
}
@Override
public void setCity(City city) {
this.city = city;
}
}
@Entity
@Table(name = "PersonMapsIdColumnJoinIgnore")
public static
|
PersonMapsIdColumnJoinException
|
java
|
spring-projects__spring-security
|
oauth2/oauth2-client/src/test/java/org/springframework/security/oauth2/client/web/client/ClientRegistrationIdProcessorWebClientTests.java
|
{
"start": 1916,
"end": 3742
}
|
class ____ extends AbstractMockServerClientRegistrationIdProcessorTests {
@Test
void clientRegistrationIdProcessorWorksWithReactiveWebClient() throws InterruptedException {
ReactiveOAuth2AuthorizedClientManager authorizedClientManager = mock(
ReactiveOAuth2AuthorizedClientManager.class);
ServerOAuth2AuthorizedClientExchangeFilterFunction oauth2Client = new ServerOAuth2AuthorizedClientExchangeFilterFunction(
authorizedClientManager);
WebClient.Builder builder = WebClient.builder().filter(oauth2Client).baseUrl(this.baseUrl);
ArgumentCaptor<OAuth2AuthorizeRequest> authorizeRequest = ArgumentCaptor.forClass(OAuth2AuthorizeRequest.class);
given(authorizedClientManager.authorize(authorizeRequest.capture()))
.willReturn(Mono.just(this.authorizedClient));
testWithAdapter(WebClientAdapter.create(builder.build()));
assertThat(authorizeRequest.getValue().getClientRegistrationId()).isEqualTo(REGISTRATION_ID);
}
@Test
void clientRegistrationIdProcessorWorksWithServletWebClient() throws InterruptedException {
OAuth2AuthorizedClientManager authorizedClientManager = mock(OAuth2AuthorizedClientManager.class);
ServletOAuth2AuthorizedClientExchangeFilterFunction oauth2Client = new ServletOAuth2AuthorizedClientExchangeFilterFunction(
authorizedClientManager);
WebClient.Builder builder = WebClient.builder().filter(oauth2Client).baseUrl(this.baseUrl);
ArgumentCaptor<OAuth2AuthorizeRequest> authorizeRequest = ArgumentCaptor.forClass(OAuth2AuthorizeRequest.class);
given(authorizedClientManager.authorize(authorizeRequest.capture())).willReturn(this.authorizedClient);
testWithAdapter(WebClientAdapter.create(builder.build()));
assertThat(authorizeRequest.getValue().getClientRegistrationId()).isEqualTo(REGISTRATION_ID);
}
}
|
ClientRegistrationIdProcessorWebClientTests
|
java
|
quarkusio__quarkus
|
integration-tests/spring-web/src/main/java/io/quarkus/it/spring/web/HandledPojoException.java
|
{
"start": 42,
"end": 179
}
|
class ____ extends RuntimeException {
public HandledPojoException(String message) {
super(message);
}
}
|
HandledPojoException
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntIntAggregatorFunction.java
|
{
"start": 946,
"end": 7864
}
|
class ____ implements AggregatorFunction {
private static final List<IntermediateStateDesc> INTERMEDIATE_STATE_DESC = List.of(
new IntermediateStateDesc("top", ElementType.INT),
new IntermediateStateDesc("output", ElementType.INT) );
private final DriverContext driverContext;
private final TopIntIntAggregator.SingleState state;
private final List<Integer> channels;
private final int limit;
private final boolean ascending;
public TopIntIntAggregatorFunction(DriverContext driverContext, List<Integer> channels,
TopIntIntAggregator.SingleState state, int limit, boolean ascending) {
this.driverContext = driverContext;
this.channels = channels;
this.state = state;
this.limit = limit;
this.ascending = ascending;
}
public static TopIntIntAggregatorFunction create(DriverContext driverContext,
List<Integer> channels, int limit, boolean ascending) {
return new TopIntIntAggregatorFunction(driverContext, channels, TopIntIntAggregator.initSingle(driverContext.bigArrays(), limit, ascending), limit, ascending);
}
public static List<IntermediateStateDesc> intermediateStateDesc() {
return INTERMEDIATE_STATE_DESC;
}
@Override
public int intermediateBlockCount() {
return INTERMEDIATE_STATE_DESC.size();
}
@Override
public void addRawInput(Page page, BooleanVector mask) {
if (mask.allFalse()) {
// Entire page masked away
} else if (mask.allTrue()) {
addRawInputNotMasked(page);
} else {
addRawInputMasked(page, mask);
}
}
private void addRawInputMasked(Page page, BooleanVector mask) {
IntBlock vBlock = page.getBlock(channels.get(0));
IntBlock outputValueBlock = page.getBlock(channels.get(1));
IntVector vVector = vBlock.asVector();
if (vVector == null) {
addRawBlock(vBlock, outputValueBlock, mask);
return;
}
IntVector outputValueVector = outputValueBlock.asVector();
if (outputValueVector == null) {
addRawBlock(vBlock, outputValueBlock, mask);
return;
}
addRawVector(vVector, outputValueVector, mask);
}
private void addRawInputNotMasked(Page page) {
IntBlock vBlock = page.getBlock(channels.get(0));
IntBlock outputValueBlock = page.getBlock(channels.get(1));
IntVector vVector = vBlock.asVector();
if (vVector == null) {
addRawBlock(vBlock, outputValueBlock);
return;
}
IntVector outputValueVector = outputValueBlock.asVector();
if (outputValueVector == null) {
addRawBlock(vBlock, outputValueBlock);
return;
}
addRawVector(vVector, outputValueVector);
}
private void addRawVector(IntVector vVector, IntVector outputValueVector) {
for (int valuesPosition = 0; valuesPosition < vVector.getPositionCount(); valuesPosition++) {
int vValue = vVector.getInt(valuesPosition);
int outputValueValue = outputValueVector.getInt(valuesPosition);
TopIntIntAggregator.combine(state, vValue, outputValueValue);
}
}
private void addRawVector(IntVector vVector, IntVector outputValueVector, BooleanVector mask) {
for (int valuesPosition = 0; valuesPosition < vVector.getPositionCount(); valuesPosition++) {
if (mask.getBoolean(valuesPosition) == false) {
continue;
}
int vValue = vVector.getInt(valuesPosition);
int outputValueValue = outputValueVector.getInt(valuesPosition);
TopIntIntAggregator.combine(state, vValue, outputValueValue);
}
}
private void addRawBlock(IntBlock vBlock, IntBlock outputValueBlock) {
for (int p = 0; p < vBlock.getPositionCount(); p++) {
int vValueCount = vBlock.getValueCount(p);
if (vValueCount == 0) {
continue;
}
int outputValueValueCount = outputValueBlock.getValueCount(p);
if (outputValueValueCount == 0) {
continue;
}
int vStart = vBlock.getFirstValueIndex(p);
int vEnd = vStart + vValueCount;
for (int vOffset = vStart; vOffset < vEnd; vOffset++) {
int vValue = vBlock.getInt(vOffset);
int outputValueStart = outputValueBlock.getFirstValueIndex(p);
int outputValueEnd = outputValueStart + outputValueValueCount;
for (int outputValueOffset = outputValueStart; outputValueOffset < outputValueEnd; outputValueOffset++) {
int outputValueValue = outputValueBlock.getInt(outputValueOffset);
TopIntIntAggregator.combine(state, vValue, outputValueValue);
}
}
}
}
private void addRawBlock(IntBlock vBlock, IntBlock outputValueBlock, BooleanVector mask) {
for (int p = 0; p < vBlock.getPositionCount(); p++) {
if (mask.getBoolean(p) == false) {
continue;
}
int vValueCount = vBlock.getValueCount(p);
if (vValueCount == 0) {
continue;
}
int outputValueValueCount = outputValueBlock.getValueCount(p);
if (outputValueValueCount == 0) {
continue;
}
int vStart = vBlock.getFirstValueIndex(p);
int vEnd = vStart + vValueCount;
for (int vOffset = vStart; vOffset < vEnd; vOffset++) {
int vValue = vBlock.getInt(vOffset);
int outputValueStart = outputValueBlock.getFirstValueIndex(p);
int outputValueEnd = outputValueStart + outputValueValueCount;
for (int outputValueOffset = outputValueStart; outputValueOffset < outputValueEnd; outputValueOffset++) {
int outputValueValue = outputValueBlock.getInt(outputValueOffset);
TopIntIntAggregator.combine(state, vValue, outputValueValue);
}
}
}
}
@Override
public void addIntermediateInput(Page page) {
assert channels.size() == intermediateBlockCount();
assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size();
Block topUncast = page.getBlock(channels.get(0));
if (topUncast.areAllValuesNull()) {
return;
}
IntBlock top = (IntBlock) topUncast;
assert top.getPositionCount() == 1;
Block outputUncast = page.getBlock(channels.get(1));
if (outputUncast.areAllValuesNull()) {
return;
}
IntBlock output = (IntBlock) outputUncast;
assert output.getPositionCount() == 1;
TopIntIntAggregator.combineIntermediate(state, top, output);
}
@Override
public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) {
state.toIntermediate(blocks, offset, driverContext);
}
@Override
public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) {
blocks[offset] = TopIntIntAggregator.evaluateFinal(state, driverContext);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getClass().getSimpleName()).append("[");
sb.append("channels=").append(channels);
sb.append("]");
return sb.toString();
}
@Override
public void close() {
state.close();
}
}
|
TopIntIntAggregatorFunction
|
java
|
apache__camel
|
components/camel-jetty/src/test/java/org/apache/camel/component/jetty/rest/RestJettyClientResponseValidationResponseCodeTest.java
|
{
"start": 1440,
"end": 4588
}
|
class ____ extends BaseJettyTest {
@Test
public void testResponseCode() {
FluentProducerTemplate requestTemplate = fluentTemplate.withHeader(Exchange.CONTENT_TYPE, "application/json")
.withHeader(Exchange.HTTP_METHOD, "post")
.withBody("{\"name\": \"Donald\"}") // the body is ok
.to("http://localhost:" + getPort() + "/users/123/update");
Exception ex = assertThrows(CamelExecutionException.class, () -> requestTemplate.request(String.class));
HttpOperationFailedException cause = assertIsInstanceOf(HttpOperationFailedException.class, ex.getCause());
assertEquals(500, cause.getStatusCode());
assertEquals("Invalid content-type: application/xml for response code: 200", cause.getResponseBody());
}
@Test
public void testResponseHeader() {
FluentProducerTemplate requestTemplate = fluentTemplate.withHeader(Exchange.CONTENT_TYPE, "application/json")
.withHeader(Exchange.HTTP_METHOD, "post")
.withBody("{\"name\": \"Donald\"}") // the body is ok
.to("http://localhost:" + getPort() + "/users/123/update2");
Exception ex = assertThrows(CamelExecutionException.class, () -> requestTemplate.request(String.class));
HttpOperationFailedException cause = assertIsInstanceOf(HttpOperationFailedException.class, ex.getCause());
assertEquals(500, cause.getStatusCode());
assertEquals("Some of the response HTTP headers are missing.", cause.getResponseBody());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
// configure to use jetty on localhost with the given port
restConfiguration().component("jetty").host("localhost").port(getPort())
.bindingMode(RestBindingMode.json)
// turn on response validation
.clientResponseValidation(true);
// use the rest DSL to define the rest services
rest("/users/").post("{id}/update")
.consumes("application/json").produces("application/json")
.responseMessage().code(200).contentType("application/json").message("updates an user").endResponseMessage()
.to("direct:update");
rest("/users/").post("{id}/update2")
.consumes("application/json").produces("application/xml")
.responseMessage().code(200).contentType("application/xml").message("updates an user")
.header("category").description("The category of the user").endResponseHeader()
.endResponseMessage()
.to("direct:update");
from("direct:update")
.setHeader(Exchange.CONTENT_TYPE, constant("application/xml"))
.setBody(constant("<status>ok</status>"));
}
};
}
}
|
RestJettyClientResponseValidationResponseCodeTest
|
java
|
apache__camel
|
dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/TahuHostComponentBuilderFactory.java
|
{
"start": 1913,
"end": 9869
}
|
interface ____ extends ComponentBuilder<TahuHostComponent> {
/**
* MQTT client ID length check enabled.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param checkClientIdLength the value to set
* @return the dsl builder
*/
default TahuHostComponentBuilder checkClientIdLength(boolean checkClientIdLength) {
doSetProperty("checkClientIdLength", checkClientIdLength);
return this;
}
/**
* MQTT client ID to use for all server definitions, rather than
* specifying the same one for each. Note that if neither the 'clientId'
* parameter nor an 'MqttClientId' are defined for an MQTT Server, a
* random MQTT Client ID will be generated automatically, prefaced with
* 'Camel'.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param clientId the value to set
* @return the dsl builder
*/
default TahuHostComponentBuilder clientId(java.lang.String clientId) {
doSetProperty("clientId", clientId);
return this;
}
/**
* MQTT connection keep alive timeout, in seconds.
*
* The option is a: <code>int</code> type.
*
* Default: 30
* Group: common
*
* @param keepAliveTimeout the value to set
* @return the dsl builder
*/
default TahuHostComponentBuilder keepAliveTimeout(int keepAliveTimeout) {
doSetProperty("keepAliveTimeout", keepAliveTimeout);
return this;
}
/**
* Delay before recurring node rebirth messages will be sent.
*
* The option is a: <code>long</code> type.
*
* Default: 5000
* Group: common
*
* @param rebirthDebounceDelay the value to set
* @return the dsl builder
*/
default TahuHostComponentBuilder rebirthDebounceDelay(long rebirthDebounceDelay) {
doSetProperty("rebirthDebounceDelay", rebirthDebounceDelay);
return this;
}
/**
* MQTT server definitions, given with the following syntax in a
* comma-separated list:
* MqttServerName:(MqttClientId:)(tcp/ssl)://hostname(:port),...
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param servers the value to set
* @return the dsl builder
*/
default TahuHostComponentBuilder servers(java.lang.String servers) {
doSetProperty("servers", servers);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions (if possible) occurred while the Camel
* consumer is trying to pickup incoming messages, or the likes, will
* now be processed as a message and handled by the routing Error
* Handler. Important: This is only possible if the 3rd party component
* allows Camel to be alerted if an exception was thrown. Some
* components handle this internally only, and therefore
* bridgeErrorHandler is not possible. In other situations we may
* improve the Camel component to hook into the 3rd party component and
* make this possible for future releases. By default the consumer will
* use the org.apache.camel.spi.ExceptionHandler to deal with
* exceptions, that will be logged at WARN or ERROR level and ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default TahuHostComponentBuilder bridgeErrorHandler(boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Whether autowiring is enabled. This is used for automatic autowiring
* options (the option must be marked as autowired) by looking up in the
* registry to find if there is a single instance of matching type,
* which then gets configured on the component. This can be used for
* automatic configuring JDBC data sources, JMS connection factories,
* AWS Clients, etc.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: advanced
*
* @param autowiredEnabled the value to set
* @return the dsl builder
*/
default TahuHostComponentBuilder autowiredEnabled(boolean autowiredEnabled) {
doSetProperty("autowiredEnabled", autowiredEnabled);
return this;
}
/**
* To use a shared Tahu configuration.
*
* The option is a:
* <code>org.apache.camel.component.tahu.TahuConfiguration</code> type.
*
* Group: advanced
*
* @param configuration the value to set
* @return the dsl builder
*/
default TahuHostComponentBuilder configuration(org.apache.camel.component.tahu.TahuConfiguration configuration) {
doSetProperty("configuration", configuration);
return this;
}
/**
* Password for MQTT server authentication.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param password the value to set
* @return the dsl builder
*/
default TahuHostComponentBuilder password(java.lang.String password) {
doSetProperty("password", password);
return this;
}
/**
* SSL configuration for MQTT server connections.
*
* The option is a:
* <code>org.apache.camel.support.jsse.SSLContextParameters</code> type.
*
* Group: security
*
* @param sslContextParameters the value to set
* @return the dsl builder
*/
default TahuHostComponentBuilder sslContextParameters(org.apache.camel.support.jsse.SSLContextParameters sslContextParameters) {
doSetProperty("sslContextParameters", sslContextParameters);
return this;
}
/**
* Enable/disable global SSL context parameters use.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: security
*
* @param useGlobalSslContextParameters the value to set
* @return the dsl builder
*/
default TahuHostComponentBuilder useGlobalSslContextParameters(boolean useGlobalSslContextParameters) {
doSetProperty("useGlobalSslContextParameters", useGlobalSslContextParameters);
return this;
}
/**
* Username for MQTT server authentication.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param username the value to set
* @return the dsl builder
*/
default TahuHostComponentBuilder username(java.lang.String username) {
doSetProperty("username", username);
return this;
}
}
|
TahuHostComponentBuilder
|
java
|
grpc__grpc-java
|
xds/src/generated/thirdparty/grpc/io/envoyproxy/envoy/service/rate_limit_quota/v3/RateLimitQuotaServiceGrpc.java
|
{
"start": 13785,
"end": 14970
}
|
class ____
extends RateLimitQuotaServiceBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoMethodDescriptorSupplier {
private final java.lang.String methodName;
RateLimitQuotaServiceMethodDescriptorSupplier(java.lang.String methodName) {
this.methodName = methodName;
}
@java.lang.Override
public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() {
return getServiceDescriptor().findMethodByName(methodName);
}
}
private static volatile io.grpc.ServiceDescriptor serviceDescriptor;
public static io.grpc.ServiceDescriptor getServiceDescriptor() {
io.grpc.ServiceDescriptor result = serviceDescriptor;
if (result == null) {
synchronized (RateLimitQuotaServiceGrpc.class) {
result = serviceDescriptor;
if (result == null) {
serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME)
.setSchemaDescriptor(new RateLimitQuotaServiceFileDescriptorSupplier())
.addMethod(getStreamRateLimitQuotasMethod())
.build();
}
}
}
return result;
}
}
|
RateLimitQuotaServiceMethodDescriptorSupplier
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/context/annotation/BeanMethodPolymorphismTests.java
|
{
"start": 9112,
"end": 9175
}
|
class ____ extends BaseConfig {
}
@Configuration
static
|
Config
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/boot/model/source/spi/EntitySource.java
|
{
"start": 1396,
"end": 3909
}
|
interface ____
*/
String getProxy();
/**
* Obtain the batch-size to be applied when initializing proxies of this entity.
*
* @return returns the batch-size.
*/
int getBatchSize();
/**
* Is the entity abstract?
* <p>
* The implication is whether the entity maps to a database table.
*
* @return {@code true} indicates the entity is abstract; {@code false} non-abstract; {@code null}
* indicates that a reflection check should be done when building the persister.
*/
Boolean isAbstract();
/**
* Did the source specify dynamic inserts?
*
* @return {@code true} indicates dynamic inserts will be used; {@code false} otherwise.
*/
boolean isDynamicInsert();
/**
* Did the source specify dynamic updates?
*
* @return {@code true} indicates dynamic updates will be used; {@code false} otherwise.
*/
boolean isDynamicUpdate();
/**
* Did the source specify to perform selects to decide whether to perform (detached) updates?
*
* @return {@code true} indicates selects will be done; {@code false} otherwise.
*/
boolean isSelectBeforeUpdate();
/**
* Obtain the name of a named-query that will be used for loading this entity
*
* @return THe custom loader query name
*/
String getCustomLoaderName();
/**
* Obtain the custom SQL to be used for inserts for this entity
*
* @return The custom insert SQL
*/
CustomSql getCustomSqlInsert();
/**
* Obtain the custom SQL to be used for updates for this entity
*
* @return The custom update SQL
*/
CustomSql getCustomSqlUpdate();
/**
* Obtain the custom SQL to be used for deletes for this entity
*
* @return The custom delete SQL
*/
CustomSql getCustomSqlDelete();
/**
* Obtain any additional table names on which to synchronize (auto flushing) this entity.
*
* @return Additional synchronized table names or 0 sized String array, never return null.
*/
String[] getSynchronizedTableNames();
/**
* Get the actual discriminator value in case of a single table inheritance
*
* @return the actual discriminator value in case of a single table inheritance or {@code null} in case there is no
* explicit value or a different inheritance scheme
*/
String getDiscriminatorMatchValue();
/**
* Obtain the filters for this entity.
*
* @return returns an array of the filters for this entity.
*/
FilterSource[] getFilterSources();
List<JaxbHbmNamedQueryType> getNamedQueries();
List<JaxbHbmNamedNativeQueryType> getNamedNativeQueries();
}
|
name
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSeenAction.java
|
{
"start": 1198,
"end": 2958
}
|
class ____ extends ConnectorActionRequest implements ToXContentObject {
private final String connectorId;
public Request(String connectorId) {
this.connectorId = connectorId;
}
public Request(StreamInput in) throws IOException {
super(in);
this.connectorId = in.readString();
}
public String getConnectorId() {
return connectorId;
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = null;
if (Strings.isNullOrEmpty(connectorId)) {
validationException = addValidationError("[connector_id] cannot be [null] or [\"\"].", validationException);
}
return validationException;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
{
builder.field(Connector.ID_FIELD.getPreferredName(), connectorId);
}
builder.endObject();
return builder;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(connectorId);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Request request = (Request) o;
return Objects.equals(connectorId, request.connectorId);
}
@Override
public int hashCode() {
return Objects.hash(connectorId);
}
}
}
|
Request
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/EntityWithCompositeId.java
|
{
"start": 261,
"end": 637
}
|
class ____ implements Serializable {
@EmbeddedId
private CompositeId id;
private String description;
public CompositeId getId() {
return id;
}
public void setId(CompositeId id) {
this.id = id;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
}
|
EntityWithCompositeId
|
java
|
google__truth
|
core/src/main/java/com/google/common/truth/GuavaOptionalSubject.java
|
{
"start": 986,
"end": 3079
}
|
class ____ extends Subject {
@SuppressWarnings("NullableOptional") // Truth always accepts nulls, no matter the type
private final @Nullable Optional<?> actual;
private GuavaOptionalSubject(
FailureMetadata metadata,
@SuppressWarnings("NullableOptional") // Truth always accepts nulls, no matter the type
@Nullable Optional<?> actual) {
super(metadata, actual);
this.actual = actual;
}
/** Checks that the actual {@link Optional} contains a value. */
public void isPresent() {
if (actual == null) {
failWithActual(simpleFact("expected present optional"));
} else if (!actual.isPresent()) {
failWithoutActual(simpleFact("expected to be present"));
}
}
/** Checks that the actual {@link Optional} does not contain a value. */
public void isAbsent() {
if (actual == null) {
failWithActual(simpleFact("expected absent optional"));
} else if (actual.isPresent()) {
failWithoutActual(
simpleFact("expected to be absent"), fact("but was present with value", actual.get()));
}
}
/**
* Checks that the actual {@link Optional} contains the given value.
*
* <p>To make more complex assertions on the optional's value, split your assertion in two:
*
* <pre>{@code
* assertThat(myOptional).isPresent();
* assertThat(myOptional.get()).contains("foo");
* }</pre>
*/
public void hasValue(@Nullable Object expected) {
if (expected == null) {
failWithoutActual(
simpleFact("expected an optional with a null value, but that is impossible"),
fact("was", actual));
} else if (actual == null) {
failWithActual("expected an optional with value", expected);
} else if (!actual.isPresent()) {
failWithoutActual(fact("expected to have value", expected), simpleFact("but was absent"));
} else {
checkNoNeedToDisplayBothValues("get()").that(actual.get()).isEqualTo(expected);
}
}
static Factory<GuavaOptionalSubject, Optional<?>> guavaOptionals() {
return GuavaOptionalSubject::new;
}
}
|
GuavaOptionalSubject
|
java
|
hibernate__hibernate-orm
|
tooling/metamodel-generator/src/main/java/org/hibernate/processor/annotation/CDIAccessorMetaAttribute.java
|
{
"start": 337,
"end": 2545
}
|
class ____ implements MetaAttribute {
private AnnotationMetaEntity annotationMetaEntity;
private String propertyName;
private String typeName;
public CDIAccessorMetaAttribute(AnnotationMetaEntity annotationMetaEntity, Element repositoryElement) {
this.annotationMetaEntity = annotationMetaEntity;
// turn the name into lowercase
String name = repositoryElement.getSimpleName().toString();
// FIXME: this is wrong for types like STEFQueries
this.propertyName = StringUtil.decapitalize( name );
this.typeName = name;
}
public CDIAccessorMetaAttribute(AnnotationMetaEntity annotationMetaEntity, String propertyName, String className) {
this.annotationMetaEntity = annotationMetaEntity;
this.propertyName = propertyName;
this.typeName = className;
}
@Override
public boolean hasTypedAttribute() {
return true;
}
@Override
public boolean hasStringAttribute() {
return false;
}
@Override
public String getAttributeDeclarationString() {
final StringBuilder declaration = new StringBuilder();
modifiers( declaration );
preamble( declaration );
returnCDI( declaration );
closingBrace( declaration );
return declaration.toString();
}
private void returnCDI(StringBuilder declaration) {
annotationMetaEntity.importType("jakarta.enterprise.inject.spi.CDI");
declaration
.append("\treturn CDI.current().select(")
.append(typeName)
.append(".class).get();\n");
}
void closingBrace(StringBuilder declaration) {
declaration.append("}");
}
void preamble(StringBuilder declaration) {
declaration
.append(typeName)
.append(" ")
.append( getPropertyName() );
declaration
.append("() {\n");
}
@Override
public String getAttributeNameDeclarationString() {
return "";
}
@Override
public String getMetaType() {
throw new UnsupportedOperationException("operation not supported");
}
@Override
public String getPropertyName() {
return propertyName;
}
@Override
public String getTypeDeclaration() {
return "";
}
void modifiers(StringBuilder declaration) {
declaration
.append("\npublic static ");
}
@Override
public Metamodel getHostingEntity() {
return annotationMetaEntity;
}
}
|
CDIAccessorMetaAttribute
|
java
|
redisson__redisson
|
redisson/src/main/java/org/redisson/codec/SmileJacksonCodec.java
|
{
"start": 852,
"end": 1326
}
|
class ____ extends JsonJacksonCodec {
public SmileJacksonCodec() {
super(new ObjectMapper(new SmileFactory()));
}
public SmileJacksonCodec(ClassLoader classLoader) {
super(createObjectMapper(classLoader, new ObjectMapper(new SmileFactory())));
}
public SmileJacksonCodec(ClassLoader classLoader, SmileJacksonCodec codec) {
super(createObjectMapper(classLoader, codec.mapObjectMapper.copy()));
}
}
|
SmileJacksonCodec
|
java
|
spring-projects__spring-framework
|
spring-webmvc/src/main/java/org/springframework/web/servlet/config/annotation/ResourceChainRegistration.java
|
{
"start": 1684,
"end": 4849
}
|
class ____ {
private static final String DEFAULT_CACHE_NAME = "spring-resource-chain-cache";
private static final boolean WEB_JARS_PRESENT = ClassUtils.isPresent(
"org.webjars.WebJarVersionLocator", ResourceChainRegistration.class.getClassLoader());
private final List<ResourceResolver> resolvers = new ArrayList<>(4);
private final List<ResourceTransformer> transformers = new ArrayList<>(4);
private boolean hasVersionResolver;
private boolean hasPathResolver;
private boolean hasCssLinkTransformer;
private boolean hasWebjarsResolver;
public ResourceChainRegistration(boolean cacheResources) {
this(cacheResources, (cacheResources ? new ConcurrentMapCache(DEFAULT_CACHE_NAME) : null));
}
@SuppressWarnings("NullAway") // Dataflow analysis limitation
public ResourceChainRegistration(boolean cacheResources, @Nullable Cache cache) {
Assert.isTrue(!cacheResources || cache != null, "'cache' is required when cacheResources=true");
if (cacheResources) {
this.resolvers.add(new CachingResourceResolver(cache));
this.transformers.add(new CachingResourceTransformer(cache));
}
}
/**
* Add a resource resolver to the chain.
* @param resolver the resolver to add
* @return the current instance for chained method invocation
*/
@SuppressWarnings("removal")
public ResourceChainRegistration addResolver(ResourceResolver resolver) {
Assert.notNull(resolver, "The provided ResourceResolver should not be null");
this.resolvers.add(resolver);
if (resolver instanceof VersionResourceResolver) {
this.hasVersionResolver = true;
}
else if (resolver instanceof PathResourceResolver) {
this.hasPathResolver = true;
}
else if (resolver instanceof LiteWebJarsResourceResolver) {
this.hasWebjarsResolver = true;
}
return this;
}
/**
* Add a resource transformer to the chain.
* @param transformer the transformer to add
* @return the current instance for chained method invocation
*/
public ResourceChainRegistration addTransformer(ResourceTransformer transformer) {
Assert.notNull(transformer, "The provided ResourceTransformer should not be null");
this.transformers.add(transformer);
if (transformer instanceof CssLinkResourceTransformer) {
this.hasCssLinkTransformer = true;
}
return this;
}
protected List<ResourceResolver> getResourceResolvers() {
if (!this.hasPathResolver) {
List<ResourceResolver> result = new ArrayList<>(this.resolvers);
if (WEB_JARS_PRESENT && !this.hasWebjarsResolver) {
result.add(new LiteWebJarsResourceResolver());
}
result.add(new PathResourceResolver());
return result;
}
return this.resolvers;
}
protected List<ResourceTransformer> getResourceTransformers() {
if (this.hasVersionResolver && !this.hasCssLinkTransformer) {
List<ResourceTransformer> result = new ArrayList<>(this.transformers);
boolean hasTransformers = !this.transformers.isEmpty();
boolean hasCaching = hasTransformers && this.transformers.get(0) instanceof CachingResourceTransformer;
result.add(hasCaching ? 1 : 0, new CssLinkResourceTransformer());
return result;
}
return this.transformers;
}
}
|
ResourceChainRegistration
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/server/policy/CreateTopicPolicy.java
|
{
"start": 2134,
"end": 6558
}
|
class ____ the provided parameters.
*
* This constructor is public to make testing of <code>CreateTopicPolicy</code> implementations easier.
*
* @param topic the name of the topic to create.
* @param numPartitions the number of partitions to create or null if replicasAssignments is set.
* @param replicationFactor the replication factor for the topic or null if replicaAssignments is set.
* @param replicasAssignments replica assignments or null if numPartitions and replicationFactor is set. The
* assignment is a map from partition id to replica (broker) ids.
* @param configs topic configs for the topic to be created, not including broker defaults. Broker configs are
* passed via the {@code configure()} method of the policy implementation.
*/
public RequestMetadata(String topic, Integer numPartitions, Short replicationFactor,
Map<Integer, List<Integer>> replicasAssignments, Map<String, String> configs) {
this.topic = topic;
this.numPartitions = numPartitions;
this.replicationFactor = replicationFactor;
this.replicasAssignments = replicasAssignments == null ? null : Collections.unmodifiableMap(replicasAssignments);
this.configs = Collections.unmodifiableMap(configs);
}
/**
* Return the name of the topic to create.
*/
public String topic() {
return topic;
}
/**
* Return the number of partitions to create or null if replicaAssignments is not null.
*/
public Integer numPartitions() {
return numPartitions;
}
/**
* Return the number of replicas to create or null if replicaAssignments is not null.
*/
public Short replicationFactor() {
return replicationFactor;
}
/**
* Return a map from partition id to replica (broker) ids or null if numPartitions and replicationFactor are
* set instead.
*/
public Map<Integer, List<Integer>> replicasAssignments() {
return replicasAssignments;
}
/**
* Return topic configs in the request, not including broker defaults. Broker configs are passed via
* the {@code configure()} method of the policy implementation.
*/
public Map<String, String> configs() {
return configs;
}
@Override
public int hashCode() {
return Objects.hash(topic, numPartitions, replicationFactor,
replicasAssignments, configs);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
RequestMetadata other = (RequestMetadata) o;
return topic.equals(other.topic) &&
Objects.equals(numPartitions, other.numPartitions) &&
Objects.equals(replicationFactor, other.replicationFactor) &&
Objects.equals(replicasAssignments, other.replicasAssignments) &&
configs.equals(other.configs);
}
@Override
public String toString() {
return "CreateTopicPolicy.RequestMetadata(topic=" + topic +
", numPartitions=" + numPartitions +
", replicationFactor=" + replicationFactor +
", replicasAssignments=" + replicasAssignments +
", configs=" + configs + ")";
}
}
/**
* Validate the request parameters and throw a <code>PolicyViolationException</code> with a suitable error
* message if the create topics request parameters for the provided topic do not satisfy this policy.
*
* Clients will receive the POLICY_VIOLATION error code along with the exception's message. Note that validation
* failure only affects the relevant topic, other topics in the request will still be processed.
*
* @param requestMetadata the create topics request parameters for the provided topic.
* @throws PolicyViolationException if the request parameters do not satisfy this policy.
*/
void validate(RequestMetadata requestMetadata) throws PolicyViolationException;
}
|
with
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/context/objectfactory/ContextWithObjectFactoryTest.java
|
{
"start": 589,
"end": 961
}
|
class ____ {
@ProcessorTest
public void testFactoryCalled( ) {
ValveDto dto = new ValveDto();
dto.setOneWay( true );
Valve result = ContextWithObjectFactoryMapper.INSTANCE.map( dto, new ContextObjectFactory() );
assertThat( result ).isNotNull();
assertThat( result.isOneWay() ).isTrue();
}
}
|
ContextWithObjectFactoryTest
|
java
|
google__guava
|
android/guava/src/com/google/common/collect/ForwardingList.java
|
{
"start": 2363,
"end": 7936
}
|
class ____<E extends @Nullable Object> extends ForwardingCollection<E>
implements List<E> {
// TODO(lowasser): identify places where thread safety is actually lost
/** Constructor for use by subclasses. */
protected ForwardingList() {}
@Override
protected abstract List<E> delegate();
@Override
public void add(int index, @ParametricNullness E element) {
delegate().add(index, element);
}
@CanIgnoreReturnValue
@Override
public boolean addAll(int index, Collection<? extends E> elements) {
return delegate().addAll(index, elements);
}
@Override
@ParametricNullness
public E get(int index) {
return delegate().get(index);
}
@Override
public int indexOf(@Nullable Object element) {
return delegate().indexOf(element);
}
@Override
public int lastIndexOf(@Nullable Object element) {
return delegate().lastIndexOf(element);
}
@Override
public ListIterator<E> listIterator() {
return delegate().listIterator();
}
@Override
public ListIterator<E> listIterator(int index) {
return delegate().listIterator(index);
}
@CanIgnoreReturnValue
@Override
@ParametricNullness
public E remove(int index) {
return delegate().remove(index);
}
@CanIgnoreReturnValue
@Override
@ParametricNullness
public E set(int index, @ParametricNullness E element) {
return delegate().set(index, element);
}
@Override
public List<E> subList(int fromIndex, int toIndex) {
return delegate().subList(fromIndex, toIndex);
}
@Override
public boolean equals(@Nullable Object object) {
return object == this || delegate().equals(object);
}
@Override
public int hashCode() {
return delegate().hashCode();
}
/**
* A sensible default implementation of {@link #add(Object)}, in terms of {@link #add(int,
* Object)}. If you override {@link #add(int, Object)}, you may wish to override {@link
* #add(Object)} to forward to this implementation.
*
* @since 7.0
*/
protected boolean standardAdd(@ParametricNullness E element) {
add(size(), element);
return true;
}
/**
* A sensible default implementation of {@link #addAll(int, Collection)}, in terms of the {@code
* add} method of {@link #listIterator(int)}. If you override {@link #listIterator(int)}, you may
* wish to override {@link #addAll(int, Collection)} to forward to this implementation.
*
* @since 7.0
*/
protected boolean standardAddAll(int index, Iterable<? extends E> elements) {
return Lists.addAllImpl(this, index, elements);
}
/**
* A sensible default implementation of {@link #indexOf}, in terms of {@link #listIterator()}. If
* you override {@link #listIterator()}, you may wish to override {@link #indexOf} to forward to
* this implementation.
*
* @since 7.0
*/
protected int standardIndexOf(@Nullable Object element) {
return Lists.indexOfImpl(this, element);
}
/**
* A sensible default implementation of {@link #lastIndexOf}, in terms of {@link
* #listIterator(int)}. If you override {@link #listIterator(int)}, you may wish to override
* {@link #lastIndexOf} to forward to this implementation.
*
* @since 7.0
*/
protected int standardLastIndexOf(@Nullable Object element) {
return Lists.lastIndexOfImpl(this, element);
}
/**
* A sensible default implementation of {@link #iterator}, in terms of {@link #listIterator()}. If
* you override {@link #listIterator()}, you may wish to override {@link #iterator} to forward to
* this implementation.
*
* @since 7.0
*/
protected Iterator<E> standardIterator() {
return listIterator();
}
/**
* A sensible default implementation of {@link #listIterator()}, in terms of {@link
* #listIterator(int)}. If you override {@link #listIterator(int)}, you may wish to override
* {@link #listIterator()} to forward to this implementation.
*
* @since 7.0
*/
protected ListIterator<E> standardListIterator() {
return listIterator(0);
}
/**
* A sensible default implementation of {@link #listIterator(int)}, in terms of {@link #size},
* {@link #get(int)}, {@link #set(int, Object)}, {@link #add(int, Object)}, and {@link
* #remove(int)}. If you override any of these methods, you may wish to override {@link
* #listIterator(int)} to forward to this implementation.
*
* @since 7.0
*/
protected ListIterator<E> standardListIterator(int start) {
return Lists.listIteratorImpl(this, start);
}
/**
* A sensible default implementation of {@link #subList(int, int)}. If you override any other
* methods, you may wish to override {@link #subList(int, int)} to forward to this implementation.
*
* @since 7.0
*/
protected List<E> standardSubList(int fromIndex, int toIndex) {
return Lists.subListImpl(this, fromIndex, toIndex);
}
/**
* A sensible definition of {@link #equals(Object)} in terms of {@link #size} and {@link
* #iterator}. If you override either of those methods, you may wish to override {@link
* #equals(Object)} to forward to this implementation.
*
* @since 7.0
*/
protected boolean standardEquals(@Nullable Object object) {
return Lists.equalsImpl(this, object);
}
/**
* A sensible definition of {@link #hashCode} in terms of {@link #iterator}. If you override
* {@link #iterator}, you may wish to override {@link #hashCode} to forward to this
* implementation.
*
* @since 7.0
*/
protected int standardHashCode() {
return Lists.hashCodeImpl(this);
}
}
|
ForwardingList
|
java
|
spring-projects__spring-boot
|
module/spring-boot-jackson2/src/test/java/org/springframework/boot/jackson2/NameAndCareerJsonComponent.java
|
{
"start": 1711,
"end": 2117
}
|
class ____ extends JsonObjectDeserializer<Name> {
@Override
protected Name deserializeObject(JsonParser jsonParser, DeserializationContext context, ObjectCodec codec,
JsonNode tree) throws IOException {
String name = nullSafeValue(tree.get("name"), String.class);
String career = nullSafeValue(tree.get("career"), String.class);
return new NameAndCareer(name, career);
}
}
}
|
Deserializer
|
java
|
apache__rocketmq
|
tools/src/main/java/org/apache/rocketmq/tools/command/topic/RemappingStaticTopicSubCommand.java
|
{
"start": 1894,
"end": 9225
}
|
class ____ implements SubCommand {
@Override
public String commandName() {
return "remappingStaticTopic";
}
@Override
public String commandDesc() {
return "Remapping static topic.";
}
@Override
public Options buildCommandlineOptions(Options options) {
OptionGroup optionGroup = new OptionGroup();
Option opt = null;
opt = new Option("c", "clusters", true, "remapping static topic to clusters, comma separated");
optionGroup.addOption(opt);
opt = new Option("b", "brokers", true, "remapping static topic to brokers, comma separated");
optionGroup.addOption(opt);
optionGroup.setRequired(true);
options.addOptionGroup(optionGroup);
opt = new Option("t", "topic", true, "topic name");
opt.setRequired(true);
options.addOption(opt);
opt = new Option("mf", "mapFile", true, "The mapping data file name ");
opt.setRequired(false);
options.addOption(opt);
opt = new Option("fr", "forceReplace", true, "Force replace the old mapping");
opt.setRequired(false);
options.addOption(opt);
return options;
}
public void executeFromFile(final CommandLine commandLine, final Options options,
RPCHook rpcHook) throws SubCommandException {
DefaultMQAdminExt defaultMQAdminExt = new DefaultMQAdminExt(rpcHook);
defaultMQAdminExt.setInstanceName(Long.toString(System.currentTimeMillis()));
try {
defaultMQAdminExt.start();
String topic = commandLine.getOptionValue('t').trim();
String mapFileName = commandLine.getOptionValue('f').trim();
String mapData = MixAll.file2String(mapFileName);
TopicRemappingDetailWrapper wrapper = TopicRemappingDetailWrapper.decode(mapData.getBytes(StandardCharsets.UTF_8),
TopicRemappingDetailWrapper.class);
//double check the config
TopicQueueMappingUtils.checkNameEpochNumConsistence(topic, wrapper.getBrokerConfigMap());
TopicQueueMappingUtils.checkAndBuildMappingItems(new ArrayList<>(TopicQueueMappingUtils.getMappingDetailFromConfig(wrapper.getBrokerConfigMap().values())), false, true);
boolean force = false;
if (commandLine.hasOption("fr") && Boolean.parseBoolean(commandLine.getOptionValue("fr").trim())) {
force = true;
}
MQAdminUtils.remappingStaticTopic(topic, wrapper.getBrokerToMapIn(), wrapper.getBrokerToMapOut(), wrapper.getBrokerConfigMap(), 10000, force, defaultMQAdminExt);
return;
} catch (Exception e) {
throw new SubCommandException(this.getClass().getSimpleName() + " command failed", e);
} finally {
defaultMQAdminExt.shutdown();
}
}
@Override
public void execute(final CommandLine commandLine, final Options options,
RPCHook rpcHook) throws SubCommandException {
if (!commandLine.hasOption('t')) {
ServerUtil.printCommandLineHelp("mqadmin " + this.commandName(), options);
return;
}
if (commandLine.hasOption("f")) {
executeFromFile(commandLine, options, rpcHook);
return;
}
DefaultMQAdminExt defaultMQAdminExt = new DefaultMQAdminExt(rpcHook);
defaultMQAdminExt.setInstanceName(Long.toString(System.currentTimeMillis()));
ClientMetadata clientMetadata = new ClientMetadata();
Map<String, TopicConfigAndQueueMapping> brokerConfigMap;
Set<String> targetBrokers = new HashSet<>();
try {
defaultMQAdminExt.start();
if (!commandLine.hasOption("b") && !commandLine.hasOption('c')) {
ServerUtil.printCommandLineHelp("mqadmin " + this.commandName(), options);
return;
}
String topic = commandLine.getOptionValue('t').trim();
ClusterInfo clusterInfo = defaultMQAdminExt.examineBrokerClusterInfo();
if (clusterInfo == null
|| clusterInfo.getClusterAddrTable().isEmpty()) {
throw new RuntimeException("The Cluster info is empty");
}
clientMetadata.refreshClusterInfo(clusterInfo);
{
if (commandLine.hasOption("b")) {
String brokerStrs = commandLine.getOptionValue("b").trim();
for (String broker: brokerStrs.split(",")) {
targetBrokers.add(broker.trim());
}
} else if (commandLine.hasOption("c")) {
String clusters = commandLine.getOptionValue('c').trim();
for (String cluster : clusters.split(",")) {
cluster = cluster.trim();
if (clusterInfo.getClusterAddrTable().get(cluster) != null) {
targetBrokers.addAll(clusterInfo.getClusterAddrTable().get(cluster));
}
}
}
if (targetBrokers.isEmpty()) {
throw new RuntimeException("Find none brokers, do nothing");
}
for (String broker : targetBrokers) {
String addr = clientMetadata.findMasterBrokerAddr(broker);
if (addr == null) {
throw new RuntimeException("Can't find addr for broker " + broker);
}
}
}
brokerConfigMap = MQAdminUtils.examineTopicConfigAll(topic, defaultMQAdminExt);
if (brokerConfigMap.isEmpty()) {
throw new RuntimeException("No topic route to do the remapping");
}
Map.Entry<Long, Integer> maxEpochAndNum = TopicQueueMappingUtils.checkNameEpochNumConsistence(topic, brokerConfigMap);
{
TopicRemappingDetailWrapper oldWrapper = new TopicRemappingDetailWrapper(topic, TopicRemappingDetailWrapper.TYPE_CREATE_OR_UPDATE, maxEpochAndNum.getKey(), brokerConfigMap, new HashSet<>(), new HashSet<>());
String oldMappingDataFile = TopicQueueMappingUtils.writeToTemp(oldWrapper, false);
System.out.printf("The old mapping data is written to file " + oldMappingDataFile + "\n");
}
TopicRemappingDetailWrapper newWrapper = TopicQueueMappingUtils.remappingStaticTopic(topic, brokerConfigMap, targetBrokers);
{
String newMappingDataFile = TopicQueueMappingUtils.writeToTemp(newWrapper, true);
System.out.printf("The old mapping data is written to file " + newMappingDataFile + "\n");
}
MQAdminUtils.completeNoTargetBrokers(newWrapper.getBrokerConfigMap(), defaultMQAdminExt);
MQAdminUtils.remappingStaticTopic(topic, newWrapper.getBrokerToMapIn(), newWrapper.getBrokerToMapOut(), newWrapper.getBrokerConfigMap(), 10000, false, defaultMQAdminExt);
} catch (Exception e) {
throw new SubCommandException(this.getClass().getSimpleName() + " command failed", e);
} finally {
defaultMQAdminExt.shutdown();
}
}
}
|
RemappingStaticTopicSubCommand
|
java
|
apache__camel
|
components/camel-salesforce/camel-salesforce-component/src/main/java/org/apache/camel/component/salesforce/internal/client/BulkApiV2Client.java
|
{
"start": 1928,
"end": 2073
}
|
interface ____ {
void onResponse(QueryJob queryJob, Map<String, String> headers, SalesforceException ex);
}
|
QueryJobResponseCallback
|
java
|
micronaut-projects__micronaut-core
|
inject-java/src/test/groovy/io/micronaut/aop/LoggedInterceptor.java
|
{
"start": 636,
"end": 923
}
|
class ____ implements Interceptor {
@Override
public Object intercept(InvocationContext context) {
System.out.println("Starting method");
Object value = context.proceed();
System.out.println("Finished method");
return value;
}
}
|
LoggedInterceptor
|
java
|
hibernate__hibernate-orm
|
hibernate-spatial/src/main/java/org/hibernate/spatial/jts/EnvelopeAdapter.java
|
{
"start": 424,
"end": 1451
}
|
class ____ {
private static volatile GeometryFactory geomFactory = new GeometryFactory();
private EnvelopeAdapter() {
}
/**
* Converts the specified {@code Envelope} to a {@code Polygon} having the specified srid.
*
* @param env The envelope to convert
* @param srid The srid for the polygon
*
* @return The Polygon
*/
public static Polygon toPolygon(Envelope env, int srid) {
final Coordinate[] coords = new Coordinate[5];
coords[0] = new Coordinate( env.getMinX(), env.getMinY() );
coords[1] = new Coordinate( env.getMinX(), env.getMaxY() );
coords[2] = new Coordinate( env.getMaxX(), env.getMaxY() );
coords[3] = new Coordinate( env.getMaxX(), env.getMinY() );
coords[4] = new Coordinate( env.getMinX(), env.getMinY() );
final LinearRing shell = geomFactory.createLinearRing( coords );
final Polygon pg = geomFactory.createPolygon( shell, null );
pg.setSRID( srid );
return pg;
}
public static void setGeometryFactory(GeometryFactory gf) {
geomFactory = gf;
}
}
|
EnvelopeAdapter
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/bytecode/enhance/spi/interceptor/BytecodeLazyAttributeInterceptor.java
|
{
"start": 261,
"end": 723
}
|
interface ____ extends SessionAssociableInterceptor {
/**
* The name of the entity this interceptor is meant to intercept
*/
String getEntityName();
/**
* The id of the entity instance this interceptor is associated with
*/
Object getIdentifier();
/**
* The names of all lazy attributes which have been initialized
*/
@Override
Set<String> getInitializedLazyAttributeNames();
/**
* Callback from the enhanced
|
BytecodeLazyAttributeInterceptor
|
java
|
apache__camel
|
core/camel-management/src/main/java/org/apache/camel/management/mbean/StatisticCounter.java
|
{
"start": 902,
"end": 1445
}
|
class ____ extends Statistic {
private final AtomicLong value = new AtomicLong();
@Override
public void updateValue(long newValue) {
value.getAndAdd(newValue);
}
@Override
public long getValue() {
return value.get();
}
@Override
public String toString() {
return Long.toString(value.get());
}
@Override
public void reset() {
value.set(0);
}
@Override
public boolean isUpdated() {
// this is okay
return true;
}
}
|
StatisticCounter
|
java
|
spring-projects__spring-framework
|
spring-jdbc/src/test/java/org/springframework/jdbc/datasource/DriverManagerDataSourceTests.java
|
{
"start": 1205,
"end": 2311
}
|
class ____ extends DriverManagerDataSource {
@Override
protected Connection getConnectionFromDriverManager(String url, Properties props) {
assertThat(url).isEqualTo(jdbcUrl);
assertThat(props.getProperty("user")).isEqualTo(uname);
assertThat(props.getProperty("password")).isEqualTo(pwd);
return connection;
}
}
DriverManagerDataSource ds = new TestDriverManagerDataSource();
//ds.setDriverClassName("foobar");
ds.setUrl(jdbcUrl);
ds.setUsername(uname);
ds.setPassword(pwd);
Connection actualCon = ds.getConnection();
assertThat(actualCon).isSameAs(connection);
assertThat(ds.getUrl()).isEqualTo(jdbcUrl);
assertThat(ds.getPassword()).isEqualTo(pwd);
assertThat(ds.getUsername()).isEqualTo(uname);
}
@Test
void usageWithConnectionProperties() throws Exception {
final String jdbcUrl = "url";
final Properties connProps = new Properties();
connProps.setProperty("myProp", "myValue");
connProps.setProperty("yourProp", "yourValue");
connProps.setProperty("user", "uname");
connProps.setProperty("password", "pwd");
|
TestDriverManagerDataSource
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/boot/model/source/spi/PluralAttributeElementSourceAssociation.java
|
{
"start": 204,
"end": 314
}
|
interface ____ extends PluralAttributeElementSource, AssociationSource {
}
|
PluralAttributeElementSourceAssociation
|
java
|
apache__camel
|
core/camel-core-model/src/main/java/org/apache/camel/model/dataformat/CsvDataFormat.java
|
{
"start": 7862,
"end": 15918
}
|
interface ____ is able to customize
* marshalling/unmarshalling behavior by extending CsvMarshaller or creating it from scratch.
*
* @param marshallerFactoryRef the <code>CsvMarshallerFactory</code> reference.
*/
public void setMarshallerFactoryRef(String marshallerFactoryRef) {
this.marshallerFactoryRef = marshallerFactoryRef;
}
/**
* Returns the <code>CsvMarshallerFactory</code> reference.
*
* @return the <code>CsvMarshallerFactory</code> or <code>null</code> if none has been specified.
*/
public String getMarshallerFactoryRef() {
return marshallerFactoryRef;
}
public String getFormat() {
return format;
}
/**
* The format to use.
*/
public void setFormat(String format) {
this.format = format;
}
public String getCommentMarkerDisabled() {
return commentMarkerDisabled;
}
/**
* Disables the comment marker of the reference format.
*/
public void setCommentMarkerDisabled(String commentMarkerDisabled) {
this.commentMarkerDisabled = commentMarkerDisabled;
}
public String getCommentMarker() {
return commentMarker;
}
/**
* Sets the comment marker of the reference format.
*/
public void setCommentMarker(String commentMarker) {
this.commentMarker = commentMarker;
}
public String getDelimiter() {
return delimiter;
}
/**
* Sets the delimiter to use.
* <p/>
* The default value is , (comma)
*/
public void setDelimiter(String delimiter) {
this.delimiter = delimiter;
}
public String getEscapeDisabled() {
return escapeDisabled;
}
/**
* Use for disabling using escape character
*/
public void setEscapeDisabled(String escapeDisabled) {
this.escapeDisabled = escapeDisabled;
}
public String getEscape() {
return escape;
}
/**
* Sets the escape character to use
*/
public void setEscape(String escape) {
this.escape = escape;
}
/**
* Use for disabling headers
*/
public String getHeaderDisabled() {
return headerDisabled;
}
public void setHeaderDisabled(String headerDisabled) {
this.headerDisabled = headerDisabled;
}
public String getHeader() {
return header;
}
/**
* To configure the CSV headers. Multiple headers can be separated by comma.
*/
public void setHeader(String header) {
this.header = header;
}
public String getAllowMissingColumnNames() {
return allowMissingColumnNames;
}
/**
* Whether to allow missing column names.
*/
public void setAllowMissingColumnNames(String allowMissingColumnNames) {
this.allowMissingColumnNames = allowMissingColumnNames;
}
public String getIgnoreEmptyLines() {
return ignoreEmptyLines;
}
/**
* Whether to ignore empty lines.
*/
public void setIgnoreEmptyLines(String ignoreEmptyLines) {
this.ignoreEmptyLines = ignoreEmptyLines;
}
public String getIgnoreSurroundingSpaces() {
return ignoreSurroundingSpaces;
}
/**
* Whether to ignore surrounding spaces
*/
public void setIgnoreSurroundingSpaces(String ignoreSurroundingSpaces) {
this.ignoreSurroundingSpaces = ignoreSurroundingSpaces;
}
public String getNullStringDisabled() {
return nullStringDisabled;
}
/**
* Used to disable null strings
*/
public void setNullStringDisabled(String nullStringDisabled) {
this.nullStringDisabled = nullStringDisabled;
}
public String getNullString() {
return nullString;
}
/**
* Sets the null string
*/
public void setNullString(String nullString) {
this.nullString = nullString;
}
public String getQuoteDisabled() {
return quoteDisabled;
}
/**
* Used to disable quotes
*/
public void setQuoteDisabled(String quoteDisabled) {
this.quoteDisabled = quoteDisabled;
}
public String getQuote() {
return quote;
}
/**
* Sets the quote to use which by default is double-quote character
*/
public void setQuote(String quote) {
this.quote = quote;
}
public String getRecordSeparatorDisabled() {
return recordSeparatorDisabled;
}
/**
* Used for disabling record separator
*/
public void setRecordSeparatorDisabled(String recordSeparatorDisabled) {
this.recordSeparatorDisabled = recordSeparatorDisabled;
}
public String getRecordSeparator() {
return recordSeparator;
}
/**
* Sets the record separator (aka new line) which by default is new line characters (CRLF)
*/
public void setRecordSeparator(String recordSeparator) {
this.recordSeparator = recordSeparator;
}
public String getSkipHeaderRecord() {
return skipHeaderRecord;
}
/**
* Whether to skip the header record in the output
*/
public void setSkipHeaderRecord(String skipHeaderRecord) {
this.skipHeaderRecord = skipHeaderRecord;
}
public String getQuoteMode() {
return quoteMode;
}
/**
* Sets the quote mode
*/
public void setQuoteMode(String quoteMode) {
this.quoteMode = quoteMode;
}
public String getLazyLoad() {
return lazyLoad;
}
/**
* Whether the unmarshalling should produce an iterator that reads the lines on the fly or if all the lines must be
* read at one.
*/
public void setLazyLoad(String lazyLoad) {
this.lazyLoad = lazyLoad;
}
public String getUseMaps() {
return useMaps;
}
/**
* Whether the unmarshalling should produce maps (HashMap)for the lines values instead of lists. It requires to have
* header (either defined or collected).
*/
public void setUseMaps(String useMaps) {
this.useMaps = useMaps;
}
public String getUseOrderedMaps() {
return useOrderedMaps;
}
/**
* Whether the unmarshalling should produce ordered maps (LinkedHashMap) for the lines values instead of lists. It
* requires to have header (either defined or collected).
*/
public void setUseOrderedMaps(String useOrderedMaps) {
this.useOrderedMaps = useOrderedMaps;
}
public String getRecordConverterRef() {
return recordConverterRef;
}
/**
* Refers to a custom <tt>CsvRecordConverter</tt> to lookup from the registry to use.
*/
public void setRecordConverterRef(String recordConverterRef) {
this.recordConverterRef = recordConverterRef;
}
/**
* Sets whether or not to trim leading and trailing blanks.
*/
public void setTrim(String trim) {
this.trim = trim;
}
public String getTrim() {
return trim;
}
/**
* Sets whether or not to ignore case when accessing header names.
*/
public void setIgnoreHeaderCase(String ignoreHeaderCase) {
this.ignoreHeaderCase = ignoreHeaderCase;
}
public String getIgnoreHeaderCase() {
return ignoreHeaderCase;
}
/**
* Sets whether or not to add a trailing delimiter.
*/
public void setTrailingDelimiter(String trailingDelimiter) {
this.trailingDelimiter = trailingDelimiter;
}
public String getTrailingDelimiter() {
return trailingDelimiter;
}
public String getCaptureHeaderRecord() {
return captureHeaderRecord;
}
/**
* Whether the unmarshalling should capture the header record and store it in the message header
*/
public void setCaptureHeaderRecord(String captureHeaderRecord) {
this.captureHeaderRecord = captureHeaderRecord;
}
/**
* {@code Builder} is a specific builder for {@link CsvDataFormat}.
*/
@XmlTransient
public static
|
which
|
java
|
quarkusio__quarkus
|
extensions/hibernate-validator/deployment/src/test/java/io/quarkus/hibernate/validator/test/validatorfactory/MyMultipleValidatorFactoryCustomizer.java
|
{
"start": 421,
"end": 1234
}
|
class ____ implements ValidatorFactoryCustomizer {
@Override
public void customize(BaseHibernateValidatorConfiguration<?> configuration) {
ConstraintMapping constraintMapping = configuration.createConstraintMapping();
constraintMapping
.constraintDefinition(Email.class)
.includeExistingValidators(false)
.validatedBy(MyEmailValidator.class);
configuration.addMapping(constraintMapping);
constraintMapping = configuration.createConstraintMapping();
constraintMapping
.constraintDefinition(Min.class)
.includeExistingValidators(false)
.validatedBy(MyNumValidator.class);
configuration.addMapping(constraintMapping);
}
}
|
MyMultipleValidatorFactoryCustomizer
|
java
|
spring-projects__spring-framework
|
spring-core/src/test/java/org/springframework/core/annotation/MergedAnnotationsTests.java
|
{
"start": 113487,
"end": 113889
}
|
interface ____ {
@AliasFor(annotation = ContextConfiguration.class, attribute = "locations")
String[] locations() default {};
// Do NOT use @AliasFor(annotation = ...)
@AliasFor("value")
Class<?>[] classes() default {};
// Do NOT use @AliasFor(annotation = ...)
@AliasFor("classes")
Class<?>[] value() default {};
}
@Retention(RetentionPolicy.RUNTIME)
@
|
SpringApplicationConfiguration
|
java
|
spring-projects__spring-framework
|
spring-expression/src/test/java/org/springframework/expression/spel/ParsingTests.java
|
{
"start": 5593,
"end": 6810
}
|
class ____ {
@Test
void insideSingleQuotes() {
parseCheck("'hello'");
parseCheck("'hello world'");
}
@Test
void insideDoubleQuotes() {
parseCheck("\"hello\"", "'hello'");
parseCheck("\"hello world\"", "'hello world'");
}
@Test
void singleQuotesInsideSingleQuotes() {
parseCheck("'Tony''s Pizza'");
parseCheck("'big ''''pizza'''' parlor'");
}
@Test
void doubleQuotesInsideDoubleQuotes() {
parseCheck("\"big \"\"pizza\"\" parlor\"", "'big \"pizza\" parlor'");
parseCheck("\"big \"\"\"\"pizza\"\"\"\" parlor\"", "'big \"\"pizza\"\" parlor'");
}
@Test
void singleQuotesInsideDoubleQuotes() {
parseCheck("\"Tony's Pizza\"", "'Tony''s Pizza'");
parseCheck("\"big ''pizza'' parlor\"", "'big ''''pizza'''' parlor'");
}
@Test
void doubleQuotesInsideSingleQuotes() {
parseCheck("'big \"pizza\" parlor'");
parseCheck("'two double \"\" quotes'");
}
@Test
void inCompoundExpressions() {
parseCheck("'123''4' == '123''4'", "('123''4' == '123''4')");
parseCheck("('123''4'=='123''4')", "('123''4' == '123''4')");
parseCheck(
"""
"123""4" == "123""4"\
""",
"""
('123"4' == '123"4')\
""");
}
}
@Nested
|
LiteralStrings
|
java
|
google__error-prone
|
check_api/src/test/java/com/google/errorprone/util/testdata/TargetTypeTest.java
|
{
"start": 845,
"end": 16109
}
|
class ____ {
void unary() {
System.out.println(
// BUG: Diagnostic contains: boolean
!detectWrappedBoolean());
System.out.println(
// BUG: Diagnostic contains: boolean
!detectPrimitiveBoolean());
System.out.println(
// BUG: Diagnostic contains: int
~detectPrimitiveInt());
System.out.println(
// BUG: Diagnostic contains: int
~detectWrappedInteger());
System.out.println(
// BUG: Diagnostic contains: int
~detectPrimitiveByte());
}
void binary(boolean b) {
// BUG: Diagnostic contains: int
long a1 = detectPrimitiveInt() + 20;
// BUG: Diagnostic contains: int
long a2 = detectWrappedInteger() - 20;
// BUG: Diagnostic contains: long
long a3 = detectPrimitiveInt() + 20L;
// BUG: Diagnostic contains: long
long a4 = detectWrappedInteger() - 20L;
// BUG: Diagnostic contains: boolean
boolean b1 = detectPrimitiveBoolean() & b;
// BUG: Diagnostic contains: boolean
boolean b2 = b || detectWrappedBoolean();
// BUG: Diagnostic contains: java.lang.String
String s1 = detectString() + "";
// BUG: Diagnostic contains: java.lang.String
String s2 = null + detectString();
// BUG: Diagnostic contains: java.lang.String
String s3 = 0 + detectString();
// BUG: Diagnostic contains: java.lang.String
boolean eq1 = detectString() == "";
// BUG: Diagnostic contains: java.lang.String
boolean eq2 = null != detectString();
// BUG: Diagnostic contains: int
boolean eq3 = detectPrimitiveInt() == 0;
// BUG: Diagnostic contains: int
boolean eq4 = 0 == detectWrappedInteger();
}
void binary_shift() {
// The shift operator is unusual in terms of binary operators, in that the operands undergo
// unary numeric promotion separately.
// BUG: Diagnostic contains: int
System.out.println(detectPrimitiveInt() << 1L);
// BUG: Diagnostic contains: int
System.out.println(1L << detectPrimitiveInt());
int i = 1;
// BUG: Diagnostic contains: int
i >>>= detectPrimitiveInt();
// BUG: Diagnostic contains: int
i >>>= detectWrappedInteger();
long a = 1;
// BUG: Diagnostic contains: int
a <<= detectPrimitiveInt();
// BUG: Diagnostic contains: int
a >>>= detectWrappedInteger();
// BUG: Diagnostic contains: int
a >>= detectWrappedInteger();
}
void conditional_condition() {
// BUG: Diagnostic contains: boolean
System.out.println(detectPrimitiveBoolean() ? "" : "");
// BUG: Diagnostic contains: boolean
System.out.println(detectWrappedBoolean() ? "" : "");
}
void conditional_trueExpression(boolean b) {
// BUG: Diagnostic contains: int
System.out.println(b ? detectWrappedInteger() : 0);
}
void conditional_trueExpression_noUnboxing(boolean b) {
// BUG: Diagnostic contains: java.lang.Integer
System.out.println(b ? detectWrappedInteger() : Integer.valueOf(0));
}
void conditional_conditionalInCondition(boolean b1, boolean b2) {
// BUG: Diagnostic contains: long
System.out.println(((detectPrimitiveInt() != 0L) ? b1 : b2) ? "" : "");
}
void ifStatement() {
if (
// BUG: Diagnostic contains: boolean
detectPrimitiveBoolean()) {}
if (
// BUG: Diagnostic contains: boolean
detectWrappedBoolean()) {}
}
void ifElseStatement() {
if (true) {
} else if (
// BUG: Diagnostic contains: boolean
detectPrimitiveBoolean()) {
}
if (true) {
} else if (
// BUG: Diagnostic contains: boolean
detectWrappedBoolean()) {
}
}
void whileLoop() {
while (
// BUG: Diagnostic contains: boolean
detectPrimitiveBoolean()) {}
while (
// BUG: Diagnostic contains: boolean
detectWrappedBoolean()) {}
}
void doWhileLoop() {
do {} while (
// BUG: Diagnostic contains: boolean
detectPrimitiveBoolean());
do {} while (
// BUG: Diagnostic contains: boolean
detectWrappedBoolean());
}
void forLoop() {
for (;
// BUG: Diagnostic contains: boolean
detectPrimitiveBoolean(); ) {}
for (;
// BUG: Diagnostic contains: boolean
detectWrappedBoolean(); ) {}
}
void typesOfDetectMethods() {
// BUG: Diagnostic contains: byte
byte primitiveByte = detectPrimitiveByte();
// BUG: Diagnostic contains: boolean
boolean primitiveBoolean = detectPrimitiveBoolean();
// BUG: Diagnostic contains: int
int primitiveInt = detectPrimitiveInt();
// BUG: Diagnostic contains: java.lang.Boolean
Boolean wrappedBoolean = detectWrappedBoolean();
// BUG: Diagnostic contains: java.lang.Integer
Integer wrappedInteger = detectWrappedInteger();
}
void arrayAccess(String[] s) {
// BUG: Diagnostic contains: int
System.out.println(s[detectPrimitiveInt()]);
// BUG: Diagnostic contains: int
System.out.println(s[detectWrappedInteger()]);
// BUG: Diagnostic contains: java.lang.String[]
System.out.println(detectStringArray()[0]);
}
void switchStatement() {
// BUG: Diagnostic contains: int
switch (detectPrimitiveInt()) {
}
// BUG: Diagnostic contains: int
switch (detectWrappedInteger()) {
}
// BUG: Diagnostic contains: java.lang.String
switch (detectString()) {
}
// BUG: Diagnostic contains: com.google.errorprone.util.testdata.TargetTypeTest.ThisEnum
switch (detectThisEnum()) {
}
}
int[] array_intInPrimitiveIntArray() {
// BUG: Diagnostic contains: int
int[] array = {detectPrimitiveInt()};
// BUG: Diagnostic contains: int
return new int[] {detectPrimitiveInt()};
}
int[][] array_intInPrimitiveIntArray2D() {
// BUG: Diagnostic contains: int
int[][] array = {{detectPrimitiveInt()}};
// BUG: Diagnostic contains: int
return new int[][] {{detectPrimitiveInt()}};
}
int[][][] array_byteInPrimitiveIntArray3D() {
// BUG: Diagnostic contains: int
int[][][] array = {{{detectPrimitiveByte()}}};
// BUG: Diagnostic contains: int
return new int[][][] {{{detectPrimitiveByte()}}};
}
int[] array_byteInPrimitiveIntArray() {
// BUG: Diagnostic contains: int
int[] array = {detectPrimitiveByte()};
// BUG: Diagnostic contains: int
return new int[] {detectPrimitiveByte()};
}
Integer[] array_intInWrappedIntegerArray() {
// BUG: Diagnostic contains: java.lang.Integer
Integer[] array = {detectPrimitiveInt()};
// BUG: Diagnostic contains: java.lang.Integer
return new Integer[] {detectPrimitiveInt()};
}
Integer[] array_integerInWrappedIntegerArray() {
// BUG: Diagnostic contains: java.lang.Integer
Integer[] array = {detectWrappedInteger()};
// BUG: Diagnostic contains: java.lang.Integer
return new Integer[] {detectWrappedInteger()};
}
int[] array_integerInPrimitiveIntArray() {
// BUG: Diagnostic contains: int
int[] array = {detectWrappedInteger()};
// BUG: Diagnostic contains: int
return new int[] {detectWrappedInteger()};
}
Integer[][] array_integerInWrappedIntegerArray2D() {
// BUG: Diagnostic contains: java.lang.Integer
Integer[][] array = {{detectWrappedInteger()}};
// BUG: Diagnostic contains: java.lang.Integer
return new Integer[][] {{detectWrappedInteger()}};
}
Integer[][][] array_integerInWrappedIntegerArray3D() {
// BUG: Diagnostic contains: java.lang.Integer
Integer[][][] array = {{{detectWrappedInteger()}}};
// BUG: Diagnostic contains: java.lang.Integer
return new Integer[][][] {{{detectWrappedInteger()}}};
}
Serializable[] array_integerInSerializableArray() {
// BUG: Diagnostic contains: java.io.Serializable
Serializable[] array = {detectWrappedInteger()};
// BUG: Diagnostic contains: java.io.Serializable
return new Serializable[] {detectWrappedInteger()};
}
Object[][][] array_integerInObjectArray3D() {
// BUG: Diagnostic contains: java.lang.Object
Object[][][] array = {{{detectWrappedInteger()}}};
// BUG: Diagnostic contains: java.lang.Object
return new Object[][][] {{{detectWrappedInteger()}}};
}
Object[][] array_integerArrayInObjectArray() {
// BUG: Diagnostic contains: java.lang.Integer
Object[][] array = {new Integer[] {detectPrimitiveInt()}};
// BUG: Diagnostic contains: java.lang.Integer
return new Object[][] {new Integer[] {detectPrimitiveInt()}};
}
Object[][] array_arrayHiddenInsideObjectArray() {
// BUG: Diagnostic contains: java.lang.Integer
Object[][] array = {{new Integer[] {detectPrimitiveInt()}}};
// BUG: Diagnostic contains: java.lang.Integer
return new Object[][] {{new Integer[] {detectPrimitiveInt()}}};
}
Integer[][] array_primitiveByteInDimensions() {
// BUG: Diagnostic contains: int
return new Integer[detectPrimitiveByte()][];
}
String[][] array_wrappedIntegerInDimensions() {
// BUG: Diagnostic contains: int
return new String[detectWrappedInteger()][];
}
String[][] array_initializeWithArray() {
// BUG: Diagnostic contains: java.lang.String[]
String[][] s = {detectStringArray()};
return s;
}
String methodChain() {
// BUG: Diagnostic contains: java.lang.Boolean
Boolean b = TargetTypeTest.detectWrappedBoolean();
// BUG: Diagnostic contains: java.lang.Object
return detectWrappedInteger().toString();
}
void compoundAssignment_numeric(Integer i, int j, Long k) {
// BUG: Diagnostic contains: int
i /= detectWrappedInteger();
// BUG: Diagnostic contains: int
i *= (detectWrappedInteger());
// BUG: Diagnostic contains: int
j -= detectWrappedInteger();
// BUG: Diagnostic contains: long
k /= detectWrappedInteger();
}
void compoundAssignment_string(String s, Object o) {
// BUG: Diagnostic contains: java.lang.String
s += detectWrappedInteger();
// BUG: Diagnostic contains: java.lang.String
s += detectPrimitiveInt();
// BUG: Diagnostic contains: java.lang.String
o += detectString();
}
void compoundAssignment_boolean(boolean b) {
// BUG: Diagnostic contains: boolean
b &= detectWrappedBoolean();
// BUG: Diagnostic contains: boolean
b |= detectPrimitiveBoolean();
}
void concatenation(String s, Object a) {
// BUG: Diagnostic contains: java.lang.String
a = s + detectWrappedInteger();
// BUG: Diagnostic contains: java.lang.String
a = s + detectPrimitiveByte();
// BUG: Diagnostic contains: java.lang.String
a = s + detectVoid();
// BUG: Diagnostic contains: java.lang.String
a = s + detectStringArray();
}
abstract <T> T id(T t);
abstract <T> List<T> list(List<T> t);
void generic() {
// BUG: Diagnostic contains: java.lang.String
String s = id(detectString());
// BUG: Diagnostic contains: java.lang.Integer
int i = id(detectPrimitiveInt());
// BUG: Diagnostic contains: java.util.List<java.lang.String>
List<String> y = id(detectStringList());
// BUG: Diagnostic contains: java.lang.Integer
Integer z = id(detectPrimitiveInt());
}
void typeCast() {
// BUG: Diagnostic contains: int
long a = (int) detectPrimitiveByte();
// BUG: Diagnostic contains: java.lang.Object
String s = (String) (Object) detectString();
}
void enhancedForLoop() {
// BUG: Diagnostic contains: java.lang.String[]
for (String s : detectStringArray()) {}
// BUG: Diagnostic contains: java.lang.Object[]
for (Object s : detectStringArray()) {}
// BUG: Diagnostic contains: int[]
for (int i : detectPrimitiveIntArray()) {}
// BUG: Diagnostic contains: long[]
for (long i : detectPrimitiveIntArray()) {}
// BUG: Diagnostic contains: java.lang.Integer[]
for (Integer i : detectPrimitiveIntArray()) {}
// BUG: Diagnostic contains: java.lang.Object[]
for (Object i : detectPrimitiveIntArray()) {}
// BUG: Diagnostic contains: java.lang.Iterable<? extends java.lang.String>
for (String s : detectStringList()) {}
// BUG: Diagnostic contains: java.lang.Iterable<? extends java.lang.Object>
for (Object s : detectStringList()) {}
// BUG: Diagnostic contains: java.lang.Iterable<? extends java.lang.Integer>
for (int s : detectIntegerList()) {}
}
void testAssert() {
// BUG: Diagnostic contains: boolean
assert detectPrimitiveBoolean();
// BUG: Diagnostic contains: boolean
assert detectWrappedBoolean();
// BUG: Diagnostic contains: boolean
assert detectPrimitiveBoolean() : "";
// BUG: Diagnostic contains: java.lang.String
assert false : detectString();
// BUG: Diagnostic contains: java.lang.String
assert false : detectPrimitiveInt();
}
void testSwitch(int anInt, String aString) {
final int detectInt = 0;
switch (anInt) {
// BUG: Diagnostic contains: int
case detectInt:
break;
}
final byte detectByte = 0;
switch (anInt) {
// BUG: Diagnostic contains: int
case detectByte:
break;
}
final String detectString = "";
switch (aString) {
// BUG: Diagnostic contains: java.lang.String
case detectString:
break;
}
}
void instanceOf() {
// BUG: Diagnostic contains: java.lang.Object
if (detectString() instanceof String) {}
// BUG: Diagnostic contains: java.lang.Object
if (detectStringList() instanceof Serializable) {}
}
void testSynchronized() {
// BUG: Diagnostic contains: java.lang.Object
synchronized (detectString()) {
}
}
void testThrow() throws IOException {
if (System.currentTimeMillis() > 0) {
// BUG: Diagnostic contains: java.lang.IllegalArgumentException
throw detectIllegalArgumentException();
}
if (System.currentTimeMillis() > 0) {
// BUG: Diagnostic contains: java.io.IOException
throw detectIoException();
}
}
void newClass() {
// BUG: Diagnostic contains: java.lang.String
new IllegalArgumentException(detectString());
// BUG: Diagnostic contains: HasInner
detectHasInner().new Inner();
// BUG: Diagnostic contains: HasInner
detectDifferentName().new Inner();
}
// Helper methods that we can search for.
static byte detectPrimitiveByte() {
return 0;
}
static boolean detectPrimitiveBoolean() {
return true;
}
static int detectPrimitiveInt() {
return 0;
}
static int[] detectPrimitiveIntArray() {
return new int[0];
}
static Boolean detectWrappedBoolean() {
return true;
}
static Integer detectWrappedInteger() {
return 0;
}
static String detectString() {
return "";
}
static String[] detectStringArray() {
return new String[] {};
}
static ThisEnum detectThisEnum() {
return null;
}
static Void detectVoid() {
return null;
}
static List<String> detectStringList() {
return null;
}
static List<Integer> detectIntegerList() {
return null;
}
static IllegalArgumentException detectIllegalArgumentException() {
return null;
}
static IOException detectIoException() {
return null;
}
static HasInner detectHasInner() {
return null;
}
static DifferentName detectDifferentName() {
return null;
}
|
TargetTypeTest
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/context/support/PrivateConfigInnerClassTestCase.java
|
{
"start": 874,
"end": 973
}
|
class ____ {
// Intentionally private
@Configuration
private static
|
PrivateConfigInnerClassTestCase
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/AbstractLogicalPlanSerializationTests.java
|
{
"start": 778,
"end": 1926
}
|
class ____<T extends LogicalPlan> extends AbstractNodeSerializationTests<T> {
public static LogicalPlan randomChild(int depth) {
if (randomBoolean() && depth < 4) {
// TODO more random options
return LookupSerializationTests.randomLookup(depth + 1);
}
return randomBoolean() ? EsRelationSerializationTests.randomEsRelation() : LocalRelationSerializationTests.randomLocalRelation();
}
@Override
protected final NamedWriteableRegistry getNamedWriteableRegistry() {
List<NamedWriteableRegistry.Entry> entries = new ArrayList<>();
entries.addAll(PlanWritables.logical());
entries.addAll(PlanWritables.others());
entries.addAll(ExpressionWritables.aggregates());
entries.addAll(ExpressionWritables.allExpressions());
entries.addAll(ExpressionWritables.binaryComparisons());
entries.addAll(ExpressionWritables.scalars());
return new NamedWriteableRegistry(entries);
}
@Override
protected final Class<? extends Node<?>> categoryClass() {
return LogicalPlan.class;
}
}
|
AbstractLogicalPlanSerializationTests
|
java
|
apache__dubbo
|
dubbo-spring-boot-project/dubbo-spring-boot-actuator/src/main/java/org/apache/dubbo/spring/boot/actuate/mertics/DubboMetricsBinder.java
|
{
"start": 1228,
"end": 1902
}
|
class ____ implements ApplicationListener<ApplicationStartedEvent>, DisposableBean {
private final MeterRegistry meterRegistry;
public DubboMetricsBinder(MeterRegistry meterRegistry) {
this.meterRegistry = meterRegistry;
}
@Override
public void onApplicationEvent(ApplicationStartedEvent applicationStartedEvent) {
if (meterRegistry instanceof CompositeMeterRegistry) {
MetricsGlobalRegistry.setCompositeRegistry((CompositeMeterRegistry) meterRegistry);
} else {
MetricsGlobalRegistry.getCompositeRegistry().add(meterRegistry);
}
}
@Override
public void destroy() {}
}
|
DubboMetricsBinder
|
java
|
quarkusio__quarkus
|
test-framework/security-oidc/src/main/java/io/quarkus/test/security/oidc/OidcSecurity.java
|
{
"start": 296,
"end": 545
}
|
interface ____ {
Claim[] claims() default {};
boolean introspectionRequired() default false;
TokenIntrospection[] introspection() default {};
UserInfo[] userinfo() default {};
ConfigMetadata[] config() default {};
}
|
OidcSecurity
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/inference/TextEmbedding.java
|
{
"start": 1723,
"end": 6927
}
|
class ____ extends InferenceFunction<TextEmbedding> {
private final Expression inferenceId;
private final Expression inputText;
@FunctionInfo(
returnType = "dense_vector",
description = "Generates dense vector embeddings from text input using a specified "
+ "[inference endpoint](docs-content://explore-analyze/elastic-inference/inference-api.md). "
+ "Use this function to generate query vectors for KNN searches against your vectorized data "
+ "or others dense vector based operations.",
appliesTo = { @FunctionAppliesTo(version = "9.3", lifeCycle = FunctionAppliesToLifecycle.PREVIEW) },
preview = true,
examples = {
@Example(
description = "Basic text embedding generation from a text string using an inference endpoint.",
file = "text-embedding",
tag = "text-embedding-eval"
),
@Example(
description = "Generate text embeddings and store them in a variable for reuse in KNN vector search queries.",
file = "text-embedding",
tag = "text-embedding-knn"
),
@Example(
description = "Directly embed text within a KNN query for streamlined vector search without intermediate variables.",
file = "text-embedding",
tag = "text-embedding-knn-inline"
) }
)
public TextEmbedding(
Source source,
@Param(
name = "text",
type = { "keyword" },
description = "Text string to generate embeddings from. Must be a non-null literal string value."
) Expression inputText,
@Param(
name = InferenceFunction.INFERENCE_ID_PARAMETER_NAME,
type = { "keyword" },
description = "Identifier of an existing inference endpoint the that will generate the embeddings. "
+ "The inference endpoint must have the `text_embedding` task type and should use the same model "
+ "that was used to embed your indexed data."
) Expression inferenceId
) {
super(source, List.of(inputText, inferenceId));
this.inferenceId = inferenceId;
this.inputText = inputText;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
throw new UnsupportedOperationException("doesn't escape the node");
}
@Override
public String getWriteableName() {
throw new UnsupportedOperationException("doesn't escape the node");
}
public Expression inputText() {
return inputText;
}
@Override
public Expression inferenceId() {
return inferenceId;
}
@Override
public boolean foldable() {
return inferenceId.foldable() && inputText.foldable();
}
@Override
public DataType dataType() {
return inputText.dataType() == DataType.NULL ? DataType.NULL : DataType.DENSE_VECTOR;
}
@Override
protected TypeResolution resolveType() {
if (childrenResolved() == false) {
return new TypeResolution("Unresolved children");
}
TypeResolution textResolution = isNotNull(inputText, sourceText(), FIRST).and(isFoldable(inputText, sourceText(), FIRST))
.and(isType(inputText, DataType.KEYWORD::equals, sourceText(), FIRST, "string"));
if (textResolution.unresolved()) {
return textResolution;
}
TypeResolution inferenceIdResolution = isNotNull(inferenceId, sourceText(), SECOND).and(
isType(inferenceId, DataType.KEYWORD::equals, sourceText(), SECOND, "string")
).and(isFoldable(inferenceId, sourceText(), SECOND));
if (inferenceIdResolution.unresolved()) {
return inferenceIdResolution;
}
return TypeResolution.TYPE_RESOLVED;
}
@Override
public TaskType taskType() {
return TaskType.TEXT_EMBEDDING;
}
@Override
public TextEmbedding withInferenceResolutionError(String inferenceId, String error) {
return new TextEmbedding(source(), inputText, new UnresolvedAttribute(inferenceId().source(), inferenceId, error));
}
@Override
public Expression replaceChildren(List<Expression> newChildren) {
return new TextEmbedding(source(), newChildren.get(0), newChildren.get(1));
}
@Override
protected NodeInfo<? extends Expression> info() {
return NodeInfo.create(this, TextEmbedding::new, inputText, inferenceId);
}
@Override
public String toString() {
return "TEXT_EMBEDDING(" + inputText + ", " + inferenceId + ")";
}
@Override
public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
if (super.equals(o) == false) return false;
TextEmbedding textEmbedding = (TextEmbedding) o;
return Objects.equals(inferenceId, textEmbedding.inferenceId) && Objects.equals(inputText, textEmbedding.inputText);
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), inferenceId, inputText);
}
}
|
TextEmbedding
|
java
|
apache__logging-log4j2
|
log4j-perf-test/src/main/java/org/apache/logging/log4j/perf/jmh/GelfLayoutBenchmark.java
|
{
"start": 1936,
"end": 4436
}
|
class ____ {
private static final CharSequence MESSAGE =
"This is rather long and chatty log message with quite some interesting information and a bit of fun in it which is suitable here";
private static final LogEvent EVENT = createLogEvent();
private static final KeyValuePair[] ADDITIONAL_FIELDS = KeyValuePair.EMPTY_ARRAY;
private static LogEvent createLogEvent() {
final Marker marker = null;
final String fqcn = "com.mycom.myproject.mypackage.MyClass";
final org.apache.logging.log4j.Level level = org.apache.logging.log4j.Level.DEBUG;
final Message message = new SimpleMessage(MESSAGE);
final Throwable t = null;
final StringMap mdc = null;
final ThreadContext.ContextStack ndc = null;
final String threadName = null;
final StackTraceElement location = null;
final long timestamp = 12345678;
return Log4jLogEvent.newBuilder() //
.setLoggerName("name(ignored)") //
.setMarker(marker) //
.setLoggerFqcn(fqcn) //
.setLevel(level) //
.setMessage(message) //
.setThrown(t) //
.setContextData(mdc) //
.setContextStack(ndc) //
.setThreadName(threadName) //
.setSource(location) //
.setTimeMillis(timestamp) //
.build();
}
Appender appender;
int j;
@Setup
public void setUp() {
System.setProperty("log4j2.enable.direct.encoders", "true");
appender = new DemoAppender(GelfLayout.newBuilder()
.setConfiguration(new NullConfiguration())
.setHost("host")
.setAdditionalFields(ADDITIONAL_FIELDS)
.setCompressionType(GelfLayout.CompressionType.OFF)
.setCompressionThreshold(0)
.setIncludeStacktrace(true)
.setIncludeThreadContext(true)
.build());
j = 0;
}
@TearDown
public void tearDown() {
System.clearProperty("log4j2.enable.direct.encoders");
}
@BenchmarkMode(Mode.Throughput)
@OutputTimeUnit(TimeUnit.MILLISECONDS)
@Benchmark
public boolean baseline() {
++j;
return true;
}
@BenchmarkMode(Mode.Throughput)
@OutputTimeUnit(TimeUnit.MILLISECONDS)
@Benchmark
public void log4j2Gelf() {
appender.append(EVENT);
}
}
|
GelfLayoutBenchmark
|
java
|
spring-projects__spring-boot
|
buildSrc/src/main/java/org/springframework/boot/build/bom/bomr/github/StandardGitHubRepository.java
|
{
"start": 1232,
"end": 3535
}
|
class ____ implements GitHubRepository {
private final RestTemplate rest;
StandardGitHubRepository(RestTemplate restTemplate) {
this.rest = restTemplate;
}
@Override
@SuppressWarnings("rawtypes")
public int openIssue(String title, String body, List<String> labels, Milestone milestone) {
Map<String, Object> requestBody = new HashMap<>();
requestBody.put("title", title);
if (milestone != null) {
requestBody.put("milestone", milestone.getNumber());
}
if (!labels.isEmpty()) {
requestBody.put("labels", labels);
}
requestBody.put("body", body);
try {
ResponseEntity<Map> response = this.rest.postForEntity("issues", requestBody, Map.class);
// See gh-30304
sleep(Duration.ofSeconds(3));
return (Integer) response.getBody().get("number");
}
catch (RestClientException ex) {
if (ex instanceof Forbidden forbidden) {
System.out.println("Received 403 response with headers " + forbidden.getResponseHeaders());
}
throw ex;
}
}
@Override
public Set<String> getLabels() {
return new HashSet<>(get("labels?per_page=100", (label) -> (String) label.get("name")));
}
@Override
public List<Milestone> getMilestones() {
return get("milestones?per_page=100", (milestone) -> new Milestone((String) milestone.get("title"),
(Integer) milestone.get("number"),
(milestone.get("due_on") != null) ? OffsetDateTime.parse((String) milestone.get("due_on")) : null));
}
@Override
public List<Issue> findIssues(List<String> labels, Milestone milestone) {
return get(
"issues?per_page=100&state=all&labels=" + String.join(",", labels) + "&milestone="
+ milestone.getNumber(),
(issue) -> new Issue(this.rest, (Integer) issue.get("number"), (String) issue.get("title"),
Issue.State.of((String) issue.get("state"))));
}
@SuppressWarnings({ "rawtypes", "unchecked" })
private <T> List<T> get(String name, Function<Map<String, Object>, T> mapper) {
ResponseEntity<List> response = this.rest.getForEntity(name, List.class);
return ((List<Map<String, Object>>) response.getBody()).stream().map(mapper).toList();
}
private static void sleep(Duration duration) {
try {
Thread.sleep(duration.toMillis());
}
catch (InterruptedException ex) {
Thread.currentThread().interrupt();
}
}
}
|
StandardGitHubRepository
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/query/joinfetch/Item.java
|
{
"start": 229,
"end": 1088
}
|
class ____ {
private String description;
private Long id;
private Category category;
private Set bids = new HashSet();
private Set comments = new HashSet();
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
Item() {}
public Item(Category cat, String desc) {
description = desc;
category = cat;
}
public Set getBids() {
return bids;
}
public void setBids(Set bids) {
this.bids = bids;
}
public Set getComments() {
return comments;
}
public void setComments(Set comments) {
this.comments = comments;
}
public Category getCategory() {
return category;
}
public void setCategory(Category category) {
this.category = category;
}
}
|
Item
|
java
|
ReactiveX__RxJava
|
src/test/java/io/reactivex/rxjava3/internal/operators/observable/ObservableElementAtTest.java
|
{
"start": 1188,
"end": 9343
}
|
class ____ extends RxJavaTest {
@Test
public void elementAtObservable() {
assertEquals(2, Observable.fromArray(1, 2).elementAt(1).toObservable().blockingSingle()
.intValue());
}
@Test
public void elementAtWithIndexOutOfBoundsObservable() {
assertEquals(-99, Observable.fromArray(1, 2).elementAt(2).toObservable().blockingSingle(-99).intValue());
}
@Test
public void elementAtOrDefaultObservable() {
assertEquals(2, Observable.fromArray(1, 2).elementAt(1, 0).toObservable().blockingSingle().intValue());
}
@Test
public void elementAtOrDefaultWithIndexOutOfBoundsObservable() {
assertEquals(0, Observable.fromArray(1, 2).elementAt(2, 0).toObservable().blockingSingle().intValue());
}
@Test
public void elementAt() {
assertEquals(2, Observable.fromArray(1, 2).elementAt(1).blockingGet()
.intValue());
}
@Test(expected = IndexOutOfBoundsException.class)
public void elementAtWithMinusIndex() {
Observable.fromArray(1, 2).elementAt(-1);
}
@Test
public void elementAtWithIndexOutOfBounds() {
assertNull(Observable.fromArray(1, 2).elementAt(2).blockingGet());
}
@Test
public void elementAtOrDefault() {
assertEquals(2, Observable.fromArray(1, 2).elementAt(1, 0).blockingGet().intValue());
}
@Test
public void elementAtOrDefaultWithIndexOutOfBounds() {
assertEquals(0, Observable.fromArray(1, 2).elementAt(2, 0).blockingGet().intValue());
}
@Test(expected = IndexOutOfBoundsException.class)
public void elementAtOrDefaultWithMinusIndex() {
Observable.fromArray(1, 2).elementAt(-1, 0);
}
@Test(expected = IndexOutOfBoundsException.class)
public void elementAtOrErrorNegativeIndex() {
Observable.empty()
.elementAtOrError(-1);
}
@Test
public void elementAtOrErrorNoElement() {
Observable.empty()
.elementAtOrError(0)
.test()
.assertNoValues()
.assertError(NoSuchElementException.class);
}
@Test
public void elementAtOrErrorOneElement() {
Observable.just(1)
.elementAtOrError(0)
.test()
.assertNoErrors()
.assertValue(1);
}
@Test
public void elementAtOrErrorMultipleElements() {
Observable.just(1, 2, 3)
.elementAtOrError(1)
.test()
.assertNoErrors()
.assertValue(2);
}
@Test
public void elementAtOrErrorInvalidIndex() {
Observable.just(1, 2, 3)
.elementAtOrError(3)
.test()
.assertNoValues()
.assertError(NoSuchElementException.class);
}
@Test
public void elementAtOrErrorError() {
Observable.error(new RuntimeException("error"))
.elementAtOrError(0)
.to(TestHelper.testConsumer())
.assertNoValues()
.assertErrorMessage("error")
.assertError(RuntimeException.class);
}
@Test
public void elementAtIndex0OnEmptySource() {
Observable.empty()
.elementAt(0)
.test()
.assertResult();
}
@Test
public void elementAtIndex0WithDefaultOnEmptySource() {
Observable.empty()
.elementAt(0, 5)
.test()
.assertResult(5);
}
@Test
public void elementAtIndex1OnEmptySource() {
Observable.empty()
.elementAt(1)
.test()
.assertResult();
}
@Test
public void elementAtIndex1WithDefaultOnEmptySource() {
Observable.empty()
.elementAt(1, 10)
.test()
.assertResult(10);
}
@Test
public void elementAtOrErrorIndex1OnEmptySource() {
Observable.empty()
.elementAtOrError(1)
.test()
.assertFailure(NoSuchElementException.class);
}
@Test
public void dispose() {
TestHelper.checkDisposed(PublishSubject.create().elementAt(0).toObservable());
TestHelper.checkDisposed(PublishSubject.create().elementAt(0));
TestHelper.checkDisposed(PublishSubject.create().elementAt(0, 1).toObservable());
TestHelper.checkDisposed(PublishSubject.create().elementAt(0, 1));
}
@Test
public void doubleOnSubscribe() {
TestHelper.checkDoubleOnSubscribeObservable(new Function<Observable<Object>, ObservableSource<Object>>() {
@Override
public ObservableSource<Object> apply(Observable<Object> o) throws Exception {
return o.elementAt(0).toObservable();
}
});
TestHelper.checkDoubleOnSubscribeObservableToMaybe(new Function<Observable<Object>, MaybeSource<Object>>() {
@Override
public MaybeSource<Object> apply(Observable<Object> o) throws Exception {
return o.elementAt(0);
}
});
TestHelper.checkDoubleOnSubscribeObservableToSingle(new Function<Observable<Object>, SingleSource<Object>>() {
@Override
public SingleSource<Object> apply(Observable<Object> o) throws Exception {
return o.elementAt(0, 1);
}
});
}
@Test
public void elementAtIndex1WithDefaultOnEmptySourceObservable() {
Observable.empty()
.elementAt(1, 10)
.toObservable()
.test()
.assertResult(10);
}
@Test
public void errorObservable() {
Observable.error(new TestException())
.elementAt(1, 10)
.toObservable()
.test()
.assertFailure(TestException.class);
}
@Test
public void badSourceObservable() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
new Observable<Integer>() {
@Override
protected void subscribeActual(Observer<? super Integer> observer) {
observer.onSubscribe(Disposable.empty());
observer.onNext(1);
observer.onNext(2);
observer.onError(new TestException());
observer.onComplete();
}
}
.elementAt(0)
.toObservable()
.test()
.assertResult(1);
TestHelper.assertUndeliverable(errors, 0, TestException.class);
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void badSource() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
new Observable<Integer>() {
@Override
protected void subscribeActual(Observer<? super Integer> observer) {
observer.onSubscribe(Disposable.empty());
observer.onNext(1);
observer.onNext(2);
observer.onError(new TestException());
observer.onComplete();
}
}
.elementAt(0)
.test()
.assertResult(1);
TestHelper.assertUndeliverable(errors, 0, TestException.class);
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void badSource2() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
new Observable<Integer>() {
@Override
protected void subscribeActual(Observer<? super Integer> observer) {
observer.onSubscribe(Disposable.empty());
observer.onNext(1);
observer.onNext(2);
observer.onError(new TestException());
observer.onComplete();
}
}
.elementAt(0, 1)
.test()
.assertResult(1);
TestHelper.assertUndeliverable(errors, 0, TestException.class);
} finally {
RxJavaPlugins.reset();
}
}
}
|
ObservableElementAtTest
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AStorageClass.java
|
{
"start": 5812,
"end": 6550
}
|
class ____'t be read directly
// when trying to read it, AccessDeniedException will be thrown
// with message InvalidObjectState
intercept(AccessDeniedException.class, "InvalidObjectState", () -> fs.rename(path, path2));
}
/*
* Verify object can be created and copied correctly
* with completely invalid storage class
*/
@Test
public void testCreateAndCopyObjectWithStorageClassInvalid() throws Throwable {
Configuration conf = this.createConfiguration();
conf.set(STORAGE_CLASS, "testing");
S3AContract contract = (S3AContract) createContract(conf);
contract.init();
FileSystem fs = contract.getTestFileSystem();
Path dir = methodPath();
fs.mkdirs(dir);
// even with storage
|
can
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/nfs/TestNfsExports.java
|
{
"start": 971,
"end": 8584
}
|
class ____ {
private final String address1 = "192.168.0.12";
private final String address2 = "10.0.0.12";
private final String hostname1 = "a.b.com";
private final String hostname2 = "a.b.org";
private static final long ExpirationPeriod =
Nfs3Constant.NFS_EXPORTS_CACHE_EXPIRYTIME_MILLIS_DEFAULT * 1000 * 1000;
private static final int CacheSize = Nfs3Constant.NFS_EXPORTS_CACHE_SIZE_DEFAULT;
private static final long NanosPerMillis = 1000000;
@Test
public void testWildcardRW() {
NfsExports matcher = new NfsExports(CacheSize, ExpirationPeriod, "* rw");
Assertions.assertEquals(AccessPrivilege.READ_WRITE,
matcher.getAccessPrivilege(address1, hostname1));
}
@Test
public void testWildcardRO() {
NfsExports matcher = new NfsExports(CacheSize, ExpirationPeriod, "* ro");
Assertions.assertEquals(AccessPrivilege.READ_ONLY,
matcher.getAccessPrivilege(address1, hostname1));
}
@Test
public void testExactAddressRW() {
NfsExports matcher = new NfsExports(CacheSize, ExpirationPeriod, address1
+ " rw");
Assertions.assertEquals(AccessPrivilege.READ_WRITE,
matcher.getAccessPrivilege(address1, hostname1));
Assertions.assertFalse(AccessPrivilege.READ_WRITE == matcher
.getAccessPrivilege(address2, hostname1));
}
@Test
public void testExactAddressRO() {
NfsExports matcher = new NfsExports(CacheSize, ExpirationPeriod, address1);
Assertions.assertEquals(AccessPrivilege.READ_ONLY,
matcher.getAccessPrivilege(address1, hostname1));
Assertions.assertEquals(AccessPrivilege.NONE, matcher.getAccessPrivilege(address2, hostname1));
}
@Test
public void testExactHostRW() {
NfsExports matcher = new NfsExports(CacheSize, ExpirationPeriod, hostname1
+ " rw");
Assertions.assertEquals(AccessPrivilege.READ_WRITE,
matcher.getAccessPrivilege(address1, hostname1));
}
@Test
public void testExactHostRO() {
NfsExports matcher = new NfsExports(CacheSize, ExpirationPeriod, hostname1);
Assertions.assertEquals(AccessPrivilege.READ_ONLY,
matcher.getAccessPrivilege(address1, hostname1));
}
@Test
public void testCidrShortRW() {
NfsExports matcher = new NfsExports(CacheSize, ExpirationPeriod,
"192.168.0.0/22 rw");
Assertions.assertEquals(AccessPrivilege.READ_WRITE,
matcher.getAccessPrivilege(address1, hostname1));
Assertions.assertEquals(AccessPrivilege.NONE, matcher.getAccessPrivilege(address2, hostname1));
}
@Test
public void testCidrShortRO() {
NfsExports matcher = new NfsExports(CacheSize, ExpirationPeriod,
"192.168.0.0/22");
Assertions.assertEquals(AccessPrivilege.READ_ONLY,
matcher.getAccessPrivilege(address1, hostname1));
Assertions.assertEquals(AccessPrivilege.NONE, matcher.getAccessPrivilege(address2, hostname1));
}
@Test
public void testCidrLongRW() {
NfsExports matcher = new NfsExports(CacheSize, ExpirationPeriod,
"192.168.0.0/255.255.252.0 rw");
Assertions.assertEquals(AccessPrivilege.READ_WRITE,
matcher.getAccessPrivilege(address1, hostname1));
Assertions.assertEquals(AccessPrivilege.NONE, matcher.getAccessPrivilege(address2, hostname1));
}
@Test
public void testCidrLongRO() {
NfsExports matcher = new NfsExports(CacheSize, ExpirationPeriod,
"192.168.0.0/255.255.252.0");
Assertions.assertEquals(AccessPrivilege.READ_ONLY,
matcher.getAccessPrivilege(address1, hostname1));
Assertions.assertEquals(AccessPrivilege.NONE, matcher.getAccessPrivilege(address2, hostname1));
}
@Test
public void testRegexIPRW() {
NfsExports matcher = new NfsExports(CacheSize, ExpirationPeriod,
"192.168.0.[0-9]+ rw");
Assertions.assertEquals(AccessPrivilege.READ_WRITE,
matcher.getAccessPrivilege(address1, hostname1));
Assertions.assertEquals(AccessPrivilege.NONE, matcher.getAccessPrivilege(address2, hostname1));
}
@Test
public void testRegexIPRO() {
NfsExports matcher = new NfsExports(CacheSize, ExpirationPeriod,
"192.168.0.[0-9]+");
Assertions.assertEquals(AccessPrivilege.READ_ONLY,
matcher.getAccessPrivilege(address1, hostname1));
Assertions.assertEquals(AccessPrivilege.NONE, matcher.getAccessPrivilege(address2, hostname1));
}
@Test
public void testRegexHostRW() {
NfsExports matcher = new NfsExports(CacheSize, ExpirationPeriod,
"[a-z]+.b.com rw");
Assertions.assertEquals(AccessPrivilege.READ_WRITE,
matcher.getAccessPrivilege(address1, hostname1));
// address1 will hit the cache
Assertions.assertEquals(AccessPrivilege.READ_WRITE,
matcher.getAccessPrivilege(address1, hostname2));
}
@Test
public void testRegexHostRO() {
NfsExports matcher = new NfsExports(CacheSize, ExpirationPeriod,
"[a-z]+.b.com");
Assertions.assertEquals(AccessPrivilege.READ_ONLY,
matcher.getAccessPrivilege(address1, hostname1));
// address1 will hit the cache
Assertions.assertEquals(AccessPrivilege.READ_ONLY,
matcher.getAccessPrivilege(address1, hostname2));
}
@Test
public void testRegexGrouping() {
NfsExports matcher = new NfsExports(CacheSize, ExpirationPeriod,
"192.168.0.(12|34)");
Assertions.assertEquals(AccessPrivilege.READ_ONLY,
matcher.getAccessPrivilege(address1, hostname1));
// address1 will hit the cache
Assertions.assertEquals(AccessPrivilege.READ_ONLY,
matcher.getAccessPrivilege(address1, hostname2));
matcher = new NfsExports(CacheSize, ExpirationPeriod, "\\w*.a.b.com");
Assertions.assertEquals(AccessPrivilege.READ_ONLY,
matcher.getAccessPrivilege("1.2.3.4", "web.a.b.com"));
// address "1.2.3.4" will hit the cache
Assertions.assertEquals(AccessPrivilege.READ_ONLY,
matcher.getAccessPrivilege("1.2.3.4", "email.a.b.org"));
}
@Test
public void testMultiMatchers() throws Exception {
long shortExpirationPeriod = 1 * 1000 * 1000 * 1000; // 1s
NfsExports matcher = new NfsExports(CacheSize, shortExpirationPeriod,
"192.168.0.[0-9]+;[a-z]+.b.com rw");
Assertions.assertEquals(AccessPrivilege.READ_ONLY,
matcher.getAccessPrivilege(address1, hostname2));
Assertions.assertEquals(AccessPrivilege.READ_ONLY,
matcher.getAccessPrivilege(address1, address1));
Assertions.assertEquals(AccessPrivilege.READ_ONLY,
matcher.getAccessPrivilege(address1, hostname1));
Assertions.assertEquals(AccessPrivilege.READ_WRITE,
matcher.getAccessPrivilege(address2, hostname1));
// address2 will hit the cache
Assertions.assertEquals(AccessPrivilege.READ_WRITE,
matcher.getAccessPrivilege(address2, hostname2));
Thread.sleep(1000);
// no cache for address2 now
AccessPrivilege ap;
long startNanos = System.nanoTime();
do {
ap = matcher.getAccessPrivilege(address2, address2);
if (ap == AccessPrivilege.NONE) {
break;
}
Thread.sleep(500);
} while ((System.nanoTime() - startNanos) / NanosPerMillis < 5000);
Assertions.assertEquals(AccessPrivilege.NONE, ap);
}
@Test
public void testInvalidHost() {
Assertions.assertThrows(IllegalArgumentException.class, () -> {
NfsExports matcher = new NfsExports(CacheSize, ExpirationPeriod, "foo#bar");
});
}
@Test
public void testInvalidSeparator() {
Assertions.assertThrows(IllegalArgumentException.class, () -> {
NfsExports matcher = new NfsExports(CacheSize, ExpirationPeriod, "foo ro : bar rw");
});
}
}
|
TestNfsExports
|
java
|
apache__camel
|
components/camel-file/src/main/java/org/apache/camel/component/file/GenericFileComponent.java
|
{
"start": 1359,
"end": 1417
}
|
class ____ component. To be extended.
*/
public abstract
|
file
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/query/ScalarResultNativeQueryTest.java
|
{
"start": 1266,
"end": 2037
}
|
class ____ {
@Id
private Integer id;
@SuppressWarnings("unused")
@Column(name = "age")
private int age;
public Person() {
}
public Person(Integer id, int age) {
this.id = id;
this.age = age;
}
}
@Test
public void shouldApplyConfiguredTypeForProjectionOfScalarValue(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> entityManager.persist( new Person( 1, 29 ) ) );
scope.inTransaction( entityManager -> {
List<String> results = entityManager.createNamedQuery( "personAge", String.class ).getResultList();
assertEquals( 1, results.size() );
assertEquals( "29", results.get( 0 ) );
} );
scope.inTransaction( entityManager -> entityManager.createQuery( "delete from Person" ).executeUpdate() );
}
}
|
Person
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/plugins/AnalysisPlugin.java
|
{
"start": 2553,
"end": 5439
}
|
interface ____ {
/**
* Override to add additional {@link CharFilter}s. See {@link #requiresAnalysisSettings(AnalysisProvider)}
* how to on get the configuration from the index.
*/
default Map<String, AnalysisProvider<CharFilterFactory>> getCharFilters() {
return emptyMap();
}
/**
* Override to add additional {@link TokenFilter}s. See {@link #requiresAnalysisSettings(AnalysisProvider)}
* how to on get the configuration from the index.
*/
default Map<String, AnalysisProvider<TokenFilterFactory>> getTokenFilters() {
return emptyMap();
}
/**
* Override to add additional {@link Tokenizer}s. See {@link #requiresAnalysisSettings(AnalysisProvider)}
* how to on get the configuration from the index.
*/
default Map<String, AnalysisProvider<TokenizerFactory>> getTokenizers() {
return emptyMap();
}
/**
* Override to add additional {@link Analyzer}s. See {@link #requiresAnalysisSettings(AnalysisProvider)}
* how to on get the configuration from the index.
*/
default Map<String, AnalysisProvider<AnalyzerProvider<? extends Analyzer>>> getAnalyzers() {
return emptyMap();
}
/**
* Override to add additional pre-configured {@link Analyzer}s.
*/
default List<PreBuiltAnalyzerProviderFactory> getPreBuiltAnalyzerProviderFactories() {
return emptyList();
}
/**
* Override to add additional pre-configured {@link CharFilter}s.
*/
default List<PreConfiguredCharFilter> getPreConfiguredCharFilters() {
return emptyList();
}
/**
* Override to add additional pre-configured {@link TokenFilter}s.
*/
default List<PreConfiguredTokenFilter> getPreConfiguredTokenFilters() {
return emptyList();
}
/**
* Override to add additional pre-configured {@link Tokenizer}.
*/
default List<PreConfiguredTokenizer> getPreConfiguredTokenizers() {
return emptyList();
}
/**
* Override to add additional hunspell {@link org.apache.lucene.analysis.hunspell.Dictionary}s.
*/
default Map<String, org.apache.lucene.analysis.hunspell.Dictionary> getHunspellDictionaries() {
return emptyMap();
}
/**
* Mark an {@link AnalysisProvider} as requiring the index's settings.
*/
static <T> AnalysisProvider<T> requiresAnalysisSettings(AnalysisProvider<T> provider) {
return new AnalysisProvider<T>() {
@Override
public T get(IndexSettings indexSettings, Environment environment, String name, Settings settings) throws IOException {
return provider.get(indexSettings, environment, name, settings);
}
@Override
public boolean requiresAnalysisSettings() {
return true;
}
};
}
}
|
AnalysisPlugin
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/cluster/coordination/MessagesTests.java
|
{
"start": 1085,
"end": 16756
}
|
class ____ extends ESTestCase {
private DiscoveryNode createNode(String id) {
return DiscoveryNodeUtils.create(id);
}
public void testJoinEqualsHashCodeSerialization() {
Join initialJoin = new Join(
createNode(randomAlphaOfLength(10)),
createNode(randomAlphaOfLength(10)),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeLong()
);
EqualsHashCodeTestUtils.checkEqualsAndHashCode(
initialJoin,
join -> copyWriteable(join, writableRegistry(), Join::new),
join -> switch (randomInt(4)) {
case 0 ->
// change sourceNode
new Join(
createNode(randomAlphaOfLength(20)),
join.masterCandidateNode(),
join.term(),
join.lastAcceptedTerm(),
join.lastAcceptedVersion()
);
case 1 ->
// change targetNode
new Join(
join.votingNode(),
createNode(randomAlphaOfLength(20)),
join.term(),
join.lastAcceptedTerm(),
join.lastAcceptedVersion()
);
case 2 ->
// change term
new Join(
join.votingNode(),
join.masterCandidateNode(),
randomValueOtherThan(join.term(), ESTestCase::randomNonNegativeLong),
join.lastAcceptedTerm(),
join.lastAcceptedVersion()
);
case 3 ->
// change last accepted term
new Join(
join.votingNode(),
join.masterCandidateNode(),
join.term(),
randomValueOtherThan(join.lastAcceptedTerm(), ESTestCase::randomNonNegativeLong),
join.lastAcceptedVersion()
);
case 4 ->
// change version
new Join(
join.votingNode(),
join.masterCandidateNode(),
join.term(),
join.lastAcceptedTerm(),
randomValueOtherThan(join.lastAcceptedVersion(), ESTestCase::randomNonNegativeLong)
);
default -> throw new AssertionError();
}
);
}
public void testPublishRequestEqualsHashCode() {
PublishRequest initialPublishRequest = new PublishRequest(randomClusterState());
EqualsHashCodeTestUtils.checkEqualsAndHashCode(
initialPublishRequest,
publishRequest -> new PublishRequest(publishRequest.getAcceptedState()),
in -> new PublishRequest(randomClusterState())
);
}
public void testPublishResponseEqualsHashCodeSerialization() {
PublishResponse initialPublishResponse = new PublishResponse(randomNonNegativeLong(), randomNonNegativeLong());
EqualsHashCodeTestUtils.checkEqualsAndHashCode(
initialPublishResponse,
publishResponse -> copyWriteable(publishResponse, writableRegistry(), PublishResponse::new),
publishResponse -> switch (randomInt(1)) {
case 0 ->
// change term
new PublishResponse(
randomValueOtherThan(publishResponse.getTerm(), ESTestCase::randomNonNegativeLong),
publishResponse.getVersion()
);
case 1 ->
// change version
new PublishResponse(
publishResponse.getTerm(),
randomValueOtherThan(publishResponse.getVersion(), ESTestCase::randomNonNegativeLong)
);
default -> throw new AssertionError();
}
);
}
public void testPublishWithJoinResponseEqualsHashCodeSerialization() {
PublishResponse initialPublishResponse = new PublishResponse(randomNonNegativeLong(), randomNonNegativeLong());
Join initialJoin = new Join(
createNode(randomAlphaOfLength(10)),
createNode(randomAlphaOfLength(10)),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeLong()
);
PublishWithJoinResponse initialPublishWithJoinResponse = new PublishWithJoinResponse(
initialPublishResponse,
randomBoolean() ? Optional.empty() : Optional.of(initialJoin)
);
EqualsHashCodeTestUtils.checkEqualsAndHashCode(
initialPublishWithJoinResponse,
publishWithJoinResponse -> copyWriteable(publishWithJoinResponse, writableRegistry(), PublishWithJoinResponse::new),
publishWithJoinResponse -> switch (randomInt(1)) {
case 0 ->
// change publish response
new PublishWithJoinResponse(
new PublishResponse(randomNonNegativeLong(), randomNonNegativeLong()),
publishWithJoinResponse.getJoin()
);
case 1 -> {
// change optional join
Join newJoin = new Join(
createNode(randomAlphaOfLength(10)),
createNode(randomAlphaOfLength(10)),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeLong()
);
yield new PublishWithJoinResponse(
publishWithJoinResponse.getPublishResponse(),
publishWithJoinResponse.getJoin().isPresent() && randomBoolean() ? Optional.empty() : Optional.of(newJoin)
);
}
default -> throw new AssertionError();
}
);
}
public void testStartJoinRequestEqualsHashCodeSerialization() {
StartJoinRequest initialStartJoinRequest = new StartJoinRequest(createNode(randomAlphaOfLength(10)), randomNonNegativeLong());
EqualsHashCodeTestUtils.checkEqualsAndHashCode(
initialStartJoinRequest,
startJoinRequest -> copyWriteable(startJoinRequest, writableRegistry(), StartJoinRequest::new),
startJoinRequest -> switch (randomInt(1)) {
case 0 ->
// change sourceNode
new StartJoinRequest(createNode(randomAlphaOfLength(20)), startJoinRequest.getTerm());
case 1 ->
// change term
new StartJoinRequest(
startJoinRequest.getMasterCandidateNode(),
randomValueOtherThan(startJoinRequest.getTerm(), ESTestCase::randomNonNegativeLong)
);
default -> throw new AssertionError();
}
);
}
public void testApplyCommitEqualsHashCodeSerialization() {
ApplyCommitRequest initialApplyCommit = new ApplyCommitRequest(
createNode(randomAlphaOfLength(10)),
randomNonNegativeLong(),
randomNonNegativeLong()
);
EqualsHashCodeTestUtils.checkEqualsAndHashCode(
initialApplyCommit,
applyCommit -> copyWriteable(applyCommit, writableRegistry(), ApplyCommitRequest::new),
applyCommit -> switch (randomInt(2)) {
case 0 ->
// change sourceNode
new ApplyCommitRequest(createNode(randomAlphaOfLength(20)), applyCommit.getTerm(), applyCommit.getVersion());
case 1 ->
// change term
new ApplyCommitRequest(
applyCommit.getSourceNode(),
randomValueOtherThan(applyCommit.getTerm(), ESTestCase::randomNonNegativeLong),
applyCommit.getVersion()
);
case 2 ->
// change version
new ApplyCommitRequest(
applyCommit.getSourceNode(),
applyCommit.getTerm(),
randomValueOtherThan(applyCommit.getVersion(), ESTestCase::randomNonNegativeLong)
);
default -> throw new AssertionError();
}
);
}
public void testJoinRequestEqualsHashCodeSerialization() {
Join initialJoin = new Join(
createNode(randomAlphaOfLength(10)),
createNode(randomAlphaOfLength(10)),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeLong()
);
JoinRequest initialJoinRequest = new JoinRequest(
initialJoin.votingNode(),
CompatibilityVersionsUtils.fakeSystemIndicesRandom(),
Set.of(generateRandomStringArray(10, 10, false)),
randomNonNegativeLong(),
randomBoolean() ? Optional.empty() : Optional.of(initialJoin)
);
EqualsHashCodeTestUtils.checkEqualsAndHashCode(
initialJoinRequest,
joinRequest -> copyWriteable(joinRequest, writableRegistry(), JoinRequest::new),
joinRequest -> switch (randomInt(4)) {
case 0 -> {
assumeTrue("Optional join needs to be empty", joinRequest.getOptionalJoin().isEmpty());
yield new JoinRequest(
createNode(randomAlphaOfLength(10)),
joinRequest.getCompatibilityVersions(),
joinRequest.getFeatures(),
joinRequest.getMinimumTerm(),
joinRequest.getOptionalJoin()
);
}
case 1 -> new JoinRequest(
joinRequest.getSourceNode(),
new CompatibilityVersions(
TransportVersionUtils.randomVersion(Set.of(joinRequest.getCompatibilityVersions().transportVersion())),
Map.of()
),
joinRequest.getFeatures(),
joinRequest.getMinimumTerm(),
joinRequest.getOptionalJoin()
);
case 2 -> new JoinRequest(
joinRequest.getSourceNode(),
joinRequest.getCompatibilityVersions(),
randomValueOtherThan(joinRequest.getFeatures(), () -> Set.of(generateRandomStringArray(10, 10, false))),
joinRequest.getMinimumTerm(),
joinRequest.getOptionalJoin()
);
case 3 -> new JoinRequest(
joinRequest.getSourceNode(),
joinRequest.getCompatibilityVersions(),
joinRequest.getFeatures(),
randomValueOtherThan(joinRequest.getMinimumTerm(), ESTestCase::randomNonNegativeLong),
joinRequest.getOptionalJoin()
);
case 4 -> {
// change OptionalJoin
Join newJoin = new Join(
joinRequest.getSourceNode(),
createNode(randomAlphaOfLength(10)),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeLong()
);
yield new JoinRequest(
joinRequest.getSourceNode(),
joinRequest.getCompatibilityVersions(),
joinRequest.getFeatures(),
joinRequest.getMinimumTerm(),
joinRequest.getOptionalJoin().isPresent() && randomBoolean() ? Optional.empty() : Optional.of(newJoin)
);
}
default -> throw new AssertionError();
}
);
}
public ClusterState randomClusterState() {
return CoordinationStateTests.clusterState(
randomNonNegativeLong(),
randomNonNegativeLong(),
createNode(randomAlphaOfLength(10)),
new CoordinationMetadata.VotingConfiguration(Sets.newHashSet(generateRandomStringArray(10, 10, false))),
new CoordinationMetadata.VotingConfiguration(Sets.newHashSet(generateRandomStringArray(10, 10, false))),
randomLong()
);
}
public void testPreVoteRequestEqualsHashCodeSerialization() {
PreVoteRequest initialPreVoteRequest = new PreVoteRequest(createNode(randomAlphaOfLength(10)), randomNonNegativeLong());
EqualsHashCodeTestUtils.checkEqualsAndHashCode(
initialPreVoteRequest,
preVoteRequest -> copyWriteable(preVoteRequest, writableRegistry(), PreVoteRequest::new),
preVoteRequest -> switch (randomInt(1)) {
case 0 -> new PreVoteRequest(createNode(randomAlphaOfLength(10)), preVoteRequest.getCurrentTerm());
case 1 -> new PreVoteRequest(preVoteRequest.getSourceNode(), randomNonNegativeLong());
default -> throw new AssertionError();
}
);
}
public void testPreVoteResponseEqualsHashCodeSerialization() {
long currentTerm = randomNonNegativeLong();
PreVoteResponse initialPreVoteResponse = new PreVoteResponse(
currentTerm,
randomLongBetween(1, currentTerm),
randomNonNegativeLong()
);
EqualsHashCodeTestUtils.checkEqualsAndHashCode(
initialPreVoteResponse,
preVoteResponse -> copyWriteable(preVoteResponse, writableRegistry(), PreVoteResponse::new),
preVoteResponse -> switch (randomInt(2)) {
case 0 -> {
assumeTrue("last-accepted term is Long.MAX_VALUE", preVoteResponse.getLastAcceptedTerm() < Long.MAX_VALUE);
yield new PreVoteResponse(
randomValueOtherThan(
preVoteResponse.getCurrentTerm(),
() -> randomLongBetween(preVoteResponse.getLastAcceptedTerm(), Long.MAX_VALUE)
),
preVoteResponse.getLastAcceptedTerm(),
preVoteResponse.getLastAcceptedVersion()
);
}
case 1 -> {
assumeTrue("current term is 1", 1 < preVoteResponse.getCurrentTerm());
yield new PreVoteResponse(
preVoteResponse.getCurrentTerm(),
randomValueOtherThan(
preVoteResponse.getLastAcceptedTerm(),
() -> randomLongBetween(1, preVoteResponse.getCurrentTerm())
),
preVoteResponse.getLastAcceptedVersion()
);
}
case 2 -> new PreVoteResponse(
preVoteResponse.getCurrentTerm(),
preVoteResponse.getLastAcceptedTerm(),
randomValueOtherThan(preVoteResponse.getLastAcceptedVersion(), ESTestCase::randomNonNegativeLong)
);
default -> throw new AssertionError();
}
);
}
}
|
MessagesTests
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/fixes/SuggestedFixesTest.java
|
{
"start": 77612,
"end": 77672
}
|
class ____ {}
@Anno(a = 1, b = 2)
|
A
|
java
|
apache__logging-log4j2
|
log4j-api/src/main/java/org/apache/logging/log4j/spi/ExtendedLogger.java
|
{
"start": 19959,
"end": 21002
}
|
class ____
* method when location information needs to be logged.
* @param level The logging Level to check.
* @param marker A Marker or null.
* @param message The message format.
* @param p0 the message parameters
* @param p1 the message parameters
* @param p2 the message parameters
* @param p3 the message parameters
* @param p4 the message parameters
* @param p5 the message parameters
* @param p6 the message parameters
* @param p7 the message parameters
* @param p8 the message parameters
* @since 2.6
*/
void logIfEnabled(
String fqcn,
Level level,
Marker marker,
String message,
Object p0,
Object p1,
Object p2,
Object p3,
Object p4,
Object p5,
Object p6,
Object p7,
Object p8);
/**
* Logs a message if the specified level is active.
*
* @param fqcn The fully qualified
|
and
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/beanvalidation/Display.java
|
{
"start": 361,
"end": 664
}
|
class ____ {
private Integer id;
private String brand;
@Id
@GeneratedValue
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
@NotNull
public String getBrand() {
return brand;
}
public void setBrand(String brand) {
this.brand = brand;
}
}
|
Display
|
java
|
apache__camel
|
components/camel-salesforce/camel-salesforce-component/src/main/java/org/apache/camel/component/salesforce/api/dto/pubsub/PublishResult.java
|
{
"start": 999,
"end": 1944
}
|
class ____ {
private final com.salesforce.eventbus.protobuf.PublishResult source;
public PublishResult(com.salesforce.eventbus.protobuf.PublishResult source) {
this.source = source;
}
// Replay ID is opaque.
public String getReplayId() {
return PubSubApiClient.base64EncodeByteString(source.getReplayId());
}
public boolean hasError() {
return source.hasError();
}
public Error getError() {
return source.getError();
}
public String getCorrelationKey() {
return source.getCorrelationKey();
}
public com.salesforce.eventbus.protobuf.PublishResult getSource() {
return source;
}
@Override
public String toString() {
return "PublishResult{" +
"hasError=" + hasError() +
",error=" + getError() +
",correlationKey=" + getCorrelationKey() +
"}";
}
}
|
PublishResult
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/deser/filter/ProblemHandler2973Test.java
|
{
"start": 507,
"end": 572
}
|
class ____
{
// [databind#2973]
static
|
ProblemHandler2973Test
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/JUnit4TestNotRunTest.java
|
{
"start": 24481,
"end": 24960
}
|
class ____ extends TestSuper {
@Override
public void testToOverride() {}
}
""")
.doTest();
}
@Test
public void underscoreInName_mustBeATest() {
compilationHelper
.addSourceLines(
"T.java",
"""
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
@RunWith(JUnit4.class)
public
|
TestSub
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/component/StructComponentManyToAnyTest.java
|
{
"start": 1656,
"end": 2677
}
|
class ____ {
@BeforeEach
public void setUp(SessionFactoryScope scope){
scope.inTransaction(
session -> {
Book book1 = new Book();
book1.id = 1L;
book1.title = "Hibernate 3";
book1.author = new Author( "Gavin", null );
session.persist( book1 );
Book book2 = new Book();
book2.id = 2L;
book2.title = "Hibernate 6";
book2.author = new Author( "Steve", book1 );
session.persist( book2 );
}
);
}
@AfterEach
public void tearDown(SessionFactoryScope scope){
scope.getSessionFactory().getSchemaManager().truncate();
}
@Test
public void testGet(SessionFactoryScope scope){
scope.inTransaction(
session -> {
Book book = session.createQuery( "from Book b where b.id = 2", Book.class ).getSingleResult();
assertFalse( Hibernate.isInitialized( book.author.getFavoriteBook() ) );
assertEquals( "Gavin", book.author.getFavoriteBook().getAuthor().getName() );
}
);
}
@Entity(name = "Book")
public static
|
StructComponentManyToAnyTest
|
java
|
elastic__elasticsearch
|
modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamAutoshardingIT.java
|
{
"start": 4492,
"end": 32581
}
|
class ____ extends ESIntegTestCase {
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return List.of(
DataStreamsPlugin.class,
MockTransportService.TestPlugin.class,
TestAutoshardingPlugin.class,
TestTelemetryPlugin.class
);
}
@Before
public void configureClusterSettings() {
updateClusterSettings(
Settings.builder()
// we want to manually trigger the rollovers in this test suite to be able to assert incrementally the changes in shard
// configurations
.put(DataStreamLifecycleService.DATA_STREAM_LIFECYCLE_POLL_INTERVAL, "30d")
);
}
@After
public void resetClusterSetting() {
updateClusterSettings(Settings.builder().putNull(DataStreamLifecycleService.DATA_STREAM_LIFECYCLE_POLL_INTERVAL));
}
public void testRolloverOnAutoShardCondition() throws Exception {
final String dataStreamName = "logs-es";
putComposableIndexTemplate("my-template", List.of("logs-*"), indexSettings(3, 0).build());
final var createDataStreamRequest = new CreateDataStreamAction.Request(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, dataStreamName);
assertAcked(client().execute(CreateDataStreamAction.INSTANCE, createDataStreamRequest).actionGet());
indexDocs(dataStreamName, randomIntBetween(100, 200));
{
resetTelemetry();
ClusterState clusterStateBeforeRollover = internalCluster().getCurrentMasterNodeInstance(ClusterService.class).state();
DataStream dataStreamBeforeRollover = clusterStateBeforeRollover.getMetadata().getProject().dataStreams().get(dataStreamName);
String assignedShardNodeId = clusterStateBeforeRollover.routingTable()
.index(dataStreamBeforeRollover.getWriteIndex())
.shard(0)
.primaryShard()
.currentNodeId();
Index firstGenerationIndex = clusterStateBeforeRollover.metadata()
.getProject()
.dataStreams()
.get(dataStreamName)
.getWriteIndex();
IndexMetadata firstGenerationMeta = clusterStateBeforeRollover.getMetadata().getProject().index(firstGenerationIndex);
List<ShardStats> shards = new ArrayList<>(firstGenerationMeta.getNumberOfShards());
for (int i = 0; i < firstGenerationMeta.getNumberOfShards(); i++) {
// the shard stats will yield a write load of 75.0 which will make the auto sharding service recommend an optimal number
// of 5 shards
shards.add(
getShardStats(
firstGenerationMeta,
i,
(long) Math.ceil(75.0 / firstGenerationMeta.getNumberOfShards()),
assignedShardNodeId
)
);
}
mockStatsForIndex(clusterStateBeforeRollover, assignedShardNodeId, firstGenerationMeta, shards);
assertAcked(indicesAdmin().rolloverIndex(new RolloverRequest(dataStreamName, null)).actionGet());
ClusterState clusterStateAfterRollover = internalCluster().getCurrentMasterNodeInstance(ClusterService.class).state();
DataStream dataStream = clusterStateAfterRollover.getMetadata().getProject().dataStreams().get(dataStreamName);
IndexMetadata secondGenerationMeta = clusterStateAfterRollover.metadata().getProject().getIndexSafe(dataStream.getWriteIndex());
// we auto sharded up to 5 shards
assertThat(secondGenerationMeta.getNumberOfShards(), is(5));
IndexMetadata index = clusterStateAfterRollover.metadata().getProject().index(firstGenerationIndex);
Map<String, RolloverInfo> rolloverInfos = index.getRolloverInfos();
assertThat(rolloverInfos.size(), is(1));
List<Condition<?>> metConditions = rolloverInfos.get(dataStreamName).getMetConditions();
assertThat(metConditions.size(), is(1));
assertThat(metConditions.get(0).value(), instanceOf(Integer.class));
int autoShardingRolloverInfo = (int) metConditions.get(0).value();
assertThat(autoShardingRolloverInfo, is(5));
assertTelemetry(MetadataRolloverService.AUTO_SHARDING_METRIC_NAMES.get(AutoShardingType.INCREASE_SHARDS));
}
// let's do another rollover now that will not increase the number of shards because the increase shards cooldown has not lapsed,
// however the rollover will use the existing/previous auto shard configuration and the new generation index will have 5 shards
{
resetTelemetry();
ClusterState clusterStateBeforeRollover = internalCluster().getCurrentMasterNodeInstance(ClusterService.class).state();
DataStream dataStreamBeforeRollover = clusterStateBeforeRollover.getMetadata().getProject().dataStreams().get(dataStreamName);
String assignedShardNodeId = clusterStateBeforeRollover.routingTable()
.index(dataStreamBeforeRollover.getWriteIndex())
.shard(0)
.primaryShard()
.currentNodeId();
IndexMetadata secondGenerationMeta = clusterStateBeforeRollover.metadata()
.getProject()
.index(dataStreamBeforeRollover.getIndices().get(1));
List<ShardStats> shards = new ArrayList<>(secondGenerationMeta.getNumberOfShards());
for (int i = 0; i < secondGenerationMeta.getNumberOfShards(); i++) {
// the shard stats will yield a write load of 100.0 which will make the auto sharding service recommend an optimal number of
// 7 shards
shards.add(
getShardStats(
secondGenerationMeta,
i,
(long) Math.ceil(100.0 / secondGenerationMeta.getNumberOfShards()),
assignedShardNodeId
)
);
}
mockStatsForIndex(clusterStateBeforeRollover, assignedShardNodeId, secondGenerationMeta, shards);
RolloverResponse response = indicesAdmin().rolloverIndex(new RolloverRequest(dataStreamName, null)).actionGet();
assertAcked(response);
Map<String, Boolean> conditionStatus = response.getConditionStatus();
// empty rollover executed
assertThat(conditionStatus.size(), is(0));
ClusterState clusterStateAfterRollover = internalCluster().getCurrentMasterNodeInstance(ClusterService.class).state();
DataStream dataStream = clusterStateAfterRollover.getMetadata().getProject().dataStreams().get(dataStreamName);
IndexMetadata thirdGenerationMeta = clusterStateAfterRollover.metadata().getProject().getIndexSafe(dataStream.getWriteIndex());
// we remained on 5 shards due to the increase shards cooldown
assertThat(thirdGenerationMeta.getNumberOfShards(), is(5));
assertTelemetry(MetadataRolloverService.AUTO_SHARDING_METRIC_NAMES.get(AutoShardingType.COOLDOWN_PREVENTED_INCREASE));
}
{
try {
// eliminate the increase shards cooldown and re-do the rollover should configure the data stream to 7 shards
// this time also add a rollover condition that does NOT match so that we test that it's the auto sharding that triggers
// indeed the rollover
updateClusterSettings(
Settings.builder().put(DataStreamAutoShardingService.DATA_STREAMS_AUTO_SHARDING_INCREASE_SHARDS_COOLDOWN.getKey(), "0s")
);
ClusterState clusterStateBeforeRollover = internalCluster().getCurrentMasterNodeInstance(ClusterService.class).state();
DataStream dataStreamBeforeRollover = clusterStateBeforeRollover.getMetadata()
.getProject()
.dataStreams()
.get(dataStreamName);
String assignedShardNodeId = clusterStateBeforeRollover.routingTable()
.index(dataStreamBeforeRollover.getWriteIndex())
.shard(0)
.primaryShard()
.currentNodeId();
IndexMetadata thirdGenIndex = clusterStateBeforeRollover.metadata()
.getProject()
.index(dataStreamBeforeRollover.getIndices().get(2));
List<ShardStats> shards = new ArrayList<>(thirdGenIndex.getNumberOfShards());
for (int i = 0; i < thirdGenIndex.getNumberOfShards(); i++) {
// the shard stats will yield a write load of 100.0 which will make the auto sharding service recommend an optimal
// number of 7 shards
shards.add(
getShardStats(thirdGenIndex, i, (long) Math.ceil(100.0 / thirdGenIndex.getNumberOfShards()), assignedShardNodeId)
);
}
mockStatsForIndex(clusterStateBeforeRollover, assignedShardNodeId, thirdGenIndex, shards);
RolloverRequest request = new RolloverRequest(dataStreamName, null);
request.setConditions(RolloverConditions.newBuilder().addMaxIndexDocsCondition(1_000_000L).build());
RolloverResponse response = indicesAdmin().rolloverIndex(request).actionGet();
assertAcked(response);
Map<String, Boolean> conditionStatus = response.getConditionStatus();
assertThat(conditionStatus.size(), is(2));
for (Map.Entry<String, Boolean> entry : conditionStatus.entrySet()) {
if (entry.getKey().equals(new MaxDocsCondition(1_000_000L).toString())) {
assertThat(entry.getValue(), is(false));
} else {
assertThat(entry.getKey(), is(new OptimalShardCountCondition(7).toString()));
assertThat(entry.getValue(), is(true));
}
}
ClusterState clusterStateAfterRollover = internalCluster().getCurrentMasterNodeInstance(ClusterService.class).state();
DataStream dataStream = clusterStateAfterRollover.getMetadata().getProject().dataStreams().get(dataStreamName);
IndexMetadata fourthGenerationMeta = clusterStateAfterRollover.metadata()
.getProject()
.getIndexSafe(dataStream.getWriteIndex());
// we auto-sharded up to 7 shards as there was no cooldown period
assertThat(fourthGenerationMeta.getNumberOfShards(), is(7));
} finally {
// reset increase shards cooldown value
updateClusterSettings(
Settings.builder().putNull(DataStreamAutoShardingService.DATA_STREAMS_AUTO_SHARDING_INCREASE_SHARDS_COOLDOWN.getKey())
);
}
}
}
public void testReduceShardsOnRollover() throws IOException {
final String dataStreamName = "logs-es";
// start with 3 shards
putComposableIndexTemplate("my-template", List.of("logs-*"), indexSettings(3, 0).build());
final var createDataStreamRequest = new CreateDataStreamAction.Request(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, dataStreamName);
assertAcked(client().execute(CreateDataStreamAction.INSTANCE, createDataStreamRequest).actionGet());
indexDocs(dataStreamName, randomIntBetween(100, 200));
{
// rollover executes but the reduction in shard number will not be executed due to the reduce shards cooldown
ClusterState clusterStateBeforeRollover = internalCluster().getCurrentMasterNodeInstance(ClusterService.class).state();
DataStream dataStreamBeforeRollover = clusterStateBeforeRollover.getMetadata().getProject().dataStreams().get(dataStreamName);
String assignedShardNodeId = clusterStateBeforeRollover.routingTable()
.index(dataStreamBeforeRollover.getWriteIndex())
.shard(0)
.primaryShard()
.currentNodeId();
Index firstGenerationIndex = clusterStateBeforeRollover.metadata()
.getProject()
.dataStreams()
.get(dataStreamName)
.getWriteIndex();
IndexMetadata firstGenerationMeta = clusterStateBeforeRollover.getMetadata().getProject().index(firstGenerationIndex);
List<ShardStats> shards = new ArrayList<>(firstGenerationMeta.getNumberOfShards());
for (int i = 0; i < firstGenerationMeta.getNumberOfShards(); i++) {
// the shard stats will yield a write load of 2.0 which will make the auto sharding service recommend an optimal number
// of 2 shards
shards.add(getShardStats(firstGenerationMeta, i, i < 2 ? 1 : 0, assignedShardNodeId));
}
mockStatsForIndex(clusterStateBeforeRollover, assignedShardNodeId, firstGenerationMeta, shards);
assertAcked(indicesAdmin().rolloverIndex(new RolloverRequest(dataStreamName, null)).actionGet());
ClusterState clusterStateAfterRollover = internalCluster().getCurrentMasterNodeInstance(ClusterService.class).state();
DataStream dataStream = clusterStateAfterRollover.getMetadata().getProject().dataStreams().get(dataStreamName);
IndexMetadata secondGenerationMeta = clusterStateAfterRollover.metadata().getProject().getIndexSafe(dataStream.getWriteIndex());
// we kept the number of shards to 3 as the reduce shards cooldown prevented us reducing the number of shards
assertThat(secondGenerationMeta.getNumberOfShards(), is(3));
}
{
// temporarily disable reduce shards cooldown and test that a rollover that doesn't match ANOTHER condition will not be
// executed just because we need to reduce the number of shards, and then that rollover when a different condition does
// indeed match will execute the rollover and the number of shards will be reduced to 2
try {
updateClusterSettings(
Settings.builder().put(DataStreamAutoShardingService.DATA_STREAMS_AUTO_SHARDING_DECREASE_SHARDS_COOLDOWN.getKey(), "0s")
);
ClusterState clusterStateBeforeRollover = internalCluster().getCurrentMasterNodeInstance(ClusterService.class).state();
DataStream dataStreamBeforeRollover = clusterStateBeforeRollover.getMetadata()
.getProject()
.dataStreams()
.get(dataStreamName);
String assignedShardNodeId = clusterStateBeforeRollover.routingTable()
.index(dataStreamBeforeRollover.getWriteIndex())
.shard(0)
.primaryShard()
.currentNodeId();
IndexMetadata secondGenerationIndex = clusterStateBeforeRollover.metadata()
.getProject()
.index(dataStreamBeforeRollover.getIndices().get(1));
List<ShardStats> shards = new ArrayList<>(secondGenerationIndex.getNumberOfShards());
for (int i = 0; i < secondGenerationIndex.getNumberOfShards(); i++) {
// the shard stats will yield a write load of 2.0 which will make the auto sharding service recommend an
// optimal number of 2 shards
shards.add(getShardStats(secondGenerationIndex, i, i < 2 ? 1 : 0, assignedShardNodeId));
}
mockStatsForIndex(clusterStateBeforeRollover, assignedShardNodeId, secondGenerationIndex, shards);
RolloverRequest request = new RolloverRequest(dataStreamName, null);
// adding condition that does NOT match
request.setConditions(RolloverConditions.newBuilder().addMaxIndexDocsCondition(1_000_000L).build());
RolloverResponse response = indicesAdmin().rolloverIndex(request).actionGet();
assertThat(response.isRolledOver(), is(false));
Map<String, Boolean> conditionStatus = response.getConditionStatus();
assertThat(conditionStatus.size(), is(1));
assertThat(conditionStatus.get(new MaxDocsCondition(1_000_000L).toString()), is(false));
// let's rollover with a condition that does match and test that the number of shards is reduced to 2
indexDocs(dataStreamName, 100);
request = new RolloverRequest(dataStreamName, null);
// adding condition that does NOT match
request.setConditions(RolloverConditions.newBuilder().addMaxIndexDocsCondition(1L).build());
response = indicesAdmin().rolloverIndex(request).actionGet();
assertThat(response.isRolledOver(), is(true));
conditionStatus = response.getConditionStatus();
assertThat(conditionStatus.size(), is(2));
for (Map.Entry<String, Boolean> entry : conditionStatus.entrySet()) {
if (entry.getKey().equals(new MaxDocsCondition(1L).toString())) {
assertThat(conditionStatus.get(new MaxDocsCondition(1L).toString()), is(true));
} else {
assertThat(conditionStatus.get(new OptimalShardCountCondition(2).toString()), is(true));
}
}
ClusterState clusterStateAfterRollover = internalCluster().getCurrentMasterNodeInstance(ClusterService.class).state();
DataStream dataStream = clusterStateAfterRollover.getMetadata().getProject().dataStreams().get(dataStreamName);
IndexMetadata thirdGenerationMeta = clusterStateAfterRollover.metadata()
.getProject()
.getIndexSafe(dataStream.getWriteIndex());
assertThat(thirdGenerationMeta.getNumberOfShards(), is(2));
} finally {
// reset increase shards cooldown value
updateClusterSettings(
Settings.builder().putNull(DataStreamAutoShardingService.DATA_STREAMS_AUTO_SHARDING_DECREASE_SHARDS_COOLDOWN.getKey())
);
}
}
}
public void testLazyRolloverKeepsPreviousAutoshardingDecision() throws IOException {
final String dataStreamName = "logs-es";
putComposableIndexTemplate("my-template", List.of("logs-*"), indexSettings(3, 0).build());
final var createDataStreamRequest = new CreateDataStreamAction.Request(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, dataStreamName);
assertAcked(client().execute(CreateDataStreamAction.INSTANCE, createDataStreamRequest).actionGet());
indexDocs(dataStreamName, randomIntBetween(100, 200));
{
ClusterState clusterStateBeforeRollover = internalCluster().getCurrentMasterNodeInstance(ClusterService.class).state();
DataStream dataStreamBeforeRollover = clusterStateBeforeRollover.getMetadata().getProject().dataStreams().get(dataStreamName);
Index firstGenerationIndex = clusterStateBeforeRollover.metadata()
.getProject()
.dataStreams()
.get(dataStreamName)
.getWriteIndex();
IndexMetadata firstGenerationMeta = clusterStateBeforeRollover.getMetadata().getProject().index(firstGenerationIndex);
List<ShardStats> shards = new ArrayList<>(firstGenerationMeta.getNumberOfShards());
String assignedShardNodeId = clusterStateBeforeRollover.routingTable()
.index(dataStreamBeforeRollover.getWriteIndex())
.shard(0)
.primaryShard()
.currentNodeId();
for (int i = 0; i < firstGenerationMeta.getNumberOfShards(); i++) {
// the shard stats will yield a write load of 75.0 which will make the auto sharding service recommend an optimal number
// of 5 shards
shards.add(
getShardStats(
firstGenerationMeta,
i,
(long) Math.ceil(75.0 / firstGenerationMeta.getNumberOfShards()),
assignedShardNodeId
)
);
}
mockStatsForIndex(clusterStateBeforeRollover, assignedShardNodeId, firstGenerationMeta, shards);
assertAcked(indicesAdmin().rolloverIndex(new RolloverRequest(dataStreamName, null)).actionGet());
ClusterState clusterStateAfterRollover = internalCluster().getCurrentMasterNodeInstance(ClusterService.class).state();
DataStream dataStream = clusterStateAfterRollover.getMetadata().getProject().dataStreams().get(dataStreamName);
IndexMetadata secondGenerationMeta = clusterStateAfterRollover.metadata().getProject().getIndexSafe(dataStream.getWriteIndex());
// we auto sharded up to 5 shards
assertThat(secondGenerationMeta.getNumberOfShards(), is(5));
}
{
try {
// eliminate the increase shards cooldown so there are no potential barriers to another increase shards option (we'll
// actually also simulate the stats such that an increase to 7 is warranted) and execute a lazy rollover that should not
// indeed auto shard up, but just keep the existing auto sharding event and create a new index with 5 shards (as dictated
// by the existing auto sharding event)
updateClusterSettings(
Settings.builder().put(DataStreamAutoShardingService.DATA_STREAMS_AUTO_SHARDING_INCREASE_SHARDS_COOLDOWN.getKey(), "0s")
);
ClusterState clusterStateBeforeRollover = internalCluster().getCurrentMasterNodeInstance(ClusterService.class).state();
DataStream dataStreamBeforeRollover = clusterStateBeforeRollover.getMetadata()
.getProject()
.dataStreams()
.get(dataStreamName);
String assignedShardNodeId = clusterStateBeforeRollover.routingTable()
.index(dataStreamBeforeRollover.getWriteIndex())
.shard(0)
.primaryShard()
.currentNodeId();
IndexMetadata secondGenIndex = clusterStateBeforeRollover.metadata()
.getProject()
.index(dataStreamBeforeRollover.getIndices().get(1));
List<ShardStats> shards = new ArrayList<>(secondGenIndex.getNumberOfShards());
for (int i = 0; i < secondGenIndex.getNumberOfShards(); i++) {
// the shard stats will yield a write load of 100.0 which will make the auto sharding service recommend an optimal
// number of 7 shards
shards.add(
getShardStats(secondGenIndex, i, (long) Math.ceil(100.0 / secondGenIndex.getNumberOfShards()), assignedShardNodeId)
);
}
mockStatsForIndex(clusterStateBeforeRollover, assignedShardNodeId, secondGenIndex, shards);
RolloverRequest request = new RolloverRequest(dataStreamName, null);
request.lazy(true);
assertAcked(indicesAdmin().rolloverIndex(request).actionGet());
// index some docs so the rollover is executed
indexDocs(dataStreamName, 10);
ClusterState clusterStateAfterRollover = internalCluster().getCurrentMasterNodeInstance(ClusterService.class).state();
DataStream dataStream = clusterStateAfterRollover.getMetadata().getProject().dataStreams().get(dataStreamName);
IndexMetadata thirdGenerationIndex = clusterStateAfterRollover.metadata()
.getProject()
.getIndexSafe(dataStream.getWriteIndex());
// we kept the number of shards to 5 as we did a lazy rollover
assertThat(thirdGenerationIndex.getNumberOfShards(), is(5));
} finally {
// reset increase shards cooldown value
updateClusterSettings(
Settings.builder().putNull(DataStreamAutoShardingService.DATA_STREAMS_AUTO_SHARDING_INCREASE_SHARDS_COOLDOWN.getKey())
);
}
}
}
private static ShardStats getShardStats(IndexMetadata indexMeta, int shardIndex, long targetWriteLoad, String assignedShardNodeId) {
ShardId shardId = new ShardId(indexMeta.getIndex(), shardIndex);
Path path = createTempDir().resolve("indices").resolve(indexMeta.getIndexUUID()).resolve(String.valueOf(shardIndex));
ShardRouting shardRouting = ShardRouting.newUnassigned(
shardId,
true,
RecoverySource.EmptyStoreRecoverySource.INSTANCE,
new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null),
ShardRouting.Role.DEFAULT
);
shardRouting = shardRouting.initialize(assignedShardNodeId, null, ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE);
shardRouting = shardRouting.moveToStarted(ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE);
CommonStats stats = new CommonStats();
stats.docs = new DocsStats(100, 0, randomByteSizeValue().getBytes());
stats.store = new StoreStats();
stats.indexing = new IndexingStats(
new IndexingStats.Stats(1, 1, 1, 1, 1, 1, 1, 1, 1, false, 1, 234, 234, 1000, 0.123, targetWriteLoad)
);
return new ShardStats(shardRouting, new ShardPath(false, path, path, shardId), stats, null, null, null, false, 0);
}
static void putComposableIndexTemplate(String id, List<String> patterns, @Nullable Settings settings) throws IOException {
TransportPutComposableIndexTemplateAction.Request request = new TransportPutComposableIndexTemplateAction.Request(id);
request.indexTemplate(
ComposableIndexTemplate.builder()
.indexPatterns(patterns)
.template(Template.builder().settings(settings))
.dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate())
.build()
);
client().execute(TransportPutComposableIndexTemplateAction.TYPE, request).actionGet();
}
static void indexDocs(String dataStream, int numDocs) {
BulkRequest bulkRequest = new BulkRequest();
for (int i = 0; i < numDocs; i++) {
String value = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.formatMillis(System.currentTimeMillis());
bulkRequest.add(
new IndexRequest(dataStream).opType(DocWriteRequest.OpType.CREATE)
.source(String.format(Locale.ROOT, "{\"%s\":\"%s\"}", DEFAULT_TIMESTAMP_FIELD, value), XContentType.JSON)
);
}
BulkResponse bulkResponse = client().bulk(bulkRequest).actionGet();
assertThat(bulkResponse.getItems().length, equalTo(numDocs));
String backingIndexPrefix = DataStream.BACKING_INDEX_PREFIX + dataStream;
for (BulkItemResponse itemResponse : bulkResponse) {
assertThat(itemResponse.getFailureMessage(), nullValue());
assertThat(itemResponse.status(), equalTo(RestStatus.CREATED));
assertThat(itemResponse.getIndex(), startsWith(backingIndexPrefix));
}
indicesAdmin().refresh(new RefreshRequest(dataStream)).actionGet();
}
/**
* Test plugin that registers an additional setting.
*/
public static
|
DataStreamAutoshardingIT
|
java
|
elastic__elasticsearch
|
test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestApiTests.java
|
{
"start": 777,
"end": 11773
}
|
class ____ extends ESTestCase {
public void testParseCommonSpec() throws IOException {
XContentParser parser = createParser(YamlXContent.yamlXContent, COMMON_SPEC);
ClientYamlSuiteRestSpec restSpec = new ClientYamlSuiteRestSpec();
ClientYamlSuiteRestSpec.parseCommonSpec(parser, restSpec);
assertTrue(restSpec.isGlobalParameter("pretty"));
assertTrue(restSpec.isGlobalParameter("human"));
assertTrue(restSpec.isGlobalParameter("error_trace"));
assertTrue(restSpec.isGlobalParameter("source"));
assertTrue(restSpec.isGlobalParameter("filter_path"));
assertFalse(restSpec.isGlobalParameter("unknown"));
}
public void testPathMatching() throws IOException {
XContentParser parser = createParser(YamlXContent.yamlXContent, REST_SPEC_API);
ClientYamlSuiteRestApi restApi = new ClientYamlSuiteRestApiParser().parse("index.json", parser);
{
List<ClientYamlSuiteRestApi.Path> paths = restApi.getBestMatchingPaths(Collections.emptySet());
assertEquals(1, paths.size());
assertEquals("/_doc", paths.get(0).path());
}
{
List<ClientYamlSuiteRestApi.Path> paths = restApi.getBestMatchingPaths(Collections.singleton("wait_for_active_shards"));
assertEquals(1, paths.size());
assertEquals("/_doc", paths.get(0).path());
}
{
List<ClientYamlSuiteRestApi.Path> paths = restApi.getBestMatchingPaths(Collections.singleton("index"));
assertEquals(1, paths.size());
assertEquals("/{index}/_doc", paths.get(0).path());
}
{
List<ClientYamlSuiteRestApi.Path> paths = restApi.getBestMatchingPaths(Set.of("index", "id"));
assertEquals(1, paths.size());
assertEquals("/{index}/_doc/{id}", paths.get(0).path());
}
{
List<ClientYamlSuiteRestApi.Path> paths = restApi.getBestMatchingPaths(Set.of("index", "type"));
assertEquals(3, paths.size());
assertEquals("/{index}/_mapping/{type}", paths.get(0).path());
assertEquals("/{index}/{type}", paths.get(1).path());
assertEquals("/{index}/_mappings/{type}", paths.get(2).path());
}
{
List<ClientYamlSuiteRestApi.Path> paths = restApi.getBestMatchingPaths(Set.of("index", "type", "id"));
assertEquals(1, paths.size());
assertEquals("/{index}/{type}/{id}", paths.get(0).path());
}
}
private static final String COMMON_SPEC = """
{
"documentation" : {
"url": "Parameters that are accepted by all API endpoints.",
"documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/common-options.html"
},
"params": {
"pretty": {
"type": "boolean",
"description": "Pretty format the returned JSON response.",
"default": false
},
"human": {
"type": "boolean",
"description": "Return human readable values for statistics.",
"default": true
},
"error_trace": {
"type": "boolean",
"description": "Include the stack trace of returned errors.",
"default": false
},
"source": {
"type": "string",
"description": "The URL-encoded request definition. Useful for libraries that do not accept a request body\
for non-POST requests."
},
"filter_path": {
"type": "list",
"description": "A comma-separated list of filters used to reduce the response."
}
}
}
""";
private static final String REST_SPEC_API = """
{
"index":{
"documentation":{
"url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-index_.html",
"description":"Creates or updates a document in an index."
},
"stability":"stable",
"visibility": "public",
"headers": { "accept": ["application/json"] },
"url":{
"paths":[
{
"path":"/_doc",
"methods":[
"PUT"
],
"parts":{
}
},
{
"path":"/{index}/_mapping/{type}",
"methods":[
"PUT"
],
"parts":{
"index":{
"type":"string",
"required":true,
"description":"The name of the index"
},
"type":{
"type":"string",
"description":"The type of the document"
}
}
},
{
"path":"/{index}/_mappings/{type}",
"methods":[
"PUT"
],
"parts":{
"index":{
"type":"string",
"required":true,
"description":"The name of the index"
},
"type":{
"type":"string",
"description":"The type of the document"
}
}
},
{
"path":"/{index}/_doc/{id}",
"methods":[
"PUT"
],
"parts":{
"id":{
"type":"string",
"description":"Document ID"
},
"index":{
"type":"string",
"required":true,
"description":"The name of the index"
}
}
},
{
"path":"/{index}/_doc",
"methods":[
"POST"
],
"parts":{
"index":{
"type":"string",
"required":true,
"description":"The name of the index"
}
}
},
{
"path":"/{index}/{type}",
"methods":[
"POST"
],
"parts":{
"index":{
"type":"string",
"required":true,
"description":"The name of the index"
},
"type":{
"type":"string",
"description":"The type of the document",
"deprecated":true
}
},
"deprecated":{
"version":"7.0.0",
"description":"Specifying types in urls has been deprecated"
}
},
{
"path":"/{index}/{type}/{id}",
"methods":[
"PUT"
],
"parts":{
"id":{
"type":"string",
"description":"Document ID"
},
"index":{
"type":"string",
"required":true,
"description":"The name of the index"
},
"type":{
"type":"string",
"description":"The type of the document",
"deprecated":true
}
},
"deprecated":{
"version":"7.0.0",
"description":"Specifying types in urls has been deprecated"
}
}
]
},
"params":{
"wait_for_active_shards":{
"type":"string",
"description":"Sets the number of shard copies that must be active before proceeding with the index operation.\
Defaults to 1, meaning the primary shard only. Set to `all` for all shard copies, otherwise set to any non-negative value\
less than or equal to the total number of copies for the shard (number of replicas + 1)"
},
"op_type":{
"type":"enum",
"options":[
"index",
"create"
],
"default":"index",
"description":"Explicit operation type"
},
"refresh":{
"type":"enum",
"options":[
"true",
"false",
"wait_for"
],
"description":"If `true` then refresh the affected shards to make this operation visible to search, if `wait_for`\
then wait for a refresh to make this operation visible to search, if `false` (the default) then do nothing with refreshes."
},
"routing":{
"type":"string",
"description":"Specific routing value"
},
"timeout":{
"type":"time",
"description":"Explicit operation timeout"
},
"version":{
"type":"number",
"description":"Explicit version number for concurrency control"
},
"version_type":{
"type":"enum",
"options":[
"internal",
"external",
"external_gte",
"force"
],
"description":"Specific version type"
},
"if_seq_no":{
"type":"number",
"description":"only perform the index operation if the last operation that has changed the document has the specified\
sequence number"
},
"if_primary_term":{
"type":"number",
"description":"only perform the index operation if the last operation that has changed the document has the specified\
primary term"
},
"pipeline":{
"type":"string",
"description":"The pipeline id to preprocess incoming documents with"
}
},
"body":{
"description":"The document",
"required":true
}
}
}
""";
}
|
ClientYamlSuiteRestApiTests
|
java
|
eclipse-vertx__vert.x
|
vertx-core/src/main/java/io/vertx/core/dns/DnsException.java
|
{
"start": 728,
"end": 1189
}
|
class ____ extends VertxException {
private static final String ERROR_MESSAGE_PREFIX = "DNS query error occurred: ";
private final DnsResponseCode code;
public DnsException(DnsResponseCode code) {
super(ERROR_MESSAGE_PREFIX + code, true);
this.code = code;
}
/**
* The {@link DnsResponseCode} which caused this {@link io.vertx.core.dns.DnsException} to be created.
*/
public DnsResponseCode code() {
return code;
}
}
|
DnsException
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsLogging.java
|
{
"start": 9809,
"end": 10312
}
|
class ____ {
private final IOStatisticsSource source;
private SourceToString(@Nullable IOStatisticsSource source) {
this.source = source;
}
@Override
public String toString() {
return source != null
? ioStatisticsSourceToString(source)
: IOStatisticsBinding.NULL_SOURCE;
}
}
/**
* Stringifier of statistics: low cost to instantiate and every
* toString/logging will re-evaluate the statistics.
*/
private static final
|
SourceToString
|
java
|
apache__camel
|
components/camel-kubernetes/src/generated/java/org/apache/camel/component/kubernetes/persistent_volumes_claims/KubernetesPersistentVolumesClaimsComponentConfigurer.java
|
{
"start": 763,
"end": 2996
}
|
class ____ extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
KubernetesPersistentVolumesClaimsComponent target = (KubernetesPersistentVolumesClaimsComponent) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "autowiredenabled":
case "autowiredEnabled": target.setAutowiredEnabled(property(camelContext, boolean.class, value)); return true;
case "kubernetesclient":
case "kubernetesClient": target.setKubernetesClient(property(camelContext, io.fabric8.kubernetes.client.KubernetesClient.class, value)); return true;
case "lazystartproducer":
case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true;
default: return false;
}
}
@Override
public String[] getAutowiredNames() {
return new String[]{"kubernetesClient"};
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "autowiredenabled":
case "autowiredEnabled": return boolean.class;
case "kubernetesclient":
case "kubernetesClient": return io.fabric8.kubernetes.client.KubernetesClient.class;
case "lazystartproducer":
case "lazyStartProducer": return boolean.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
KubernetesPersistentVolumesClaimsComponent target = (KubernetesPersistentVolumesClaimsComponent) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "autowiredenabled":
case "autowiredEnabled": return target.isAutowiredEnabled();
case "kubernetesclient":
case "kubernetesClient": return target.getKubernetesClient();
case "lazystartproducer":
case "lazyStartProducer": return target.isLazyStartProducer();
default: return null;
}
}
}
|
KubernetesPersistentVolumesClaimsComponentConfigurer
|
java
|
quarkusio__quarkus
|
independent-projects/arc/runtime/src/main/java/io/quarkus/arc/impl/Reflections.java
|
{
"start": 10701,
"end": 11778
}
|
class ____ {
final Class<?> clazz;
final String fieldName;
final int hashCode;
public FieldKey(Class<?> clazz, String fieldName) {
this.clazz = clazz;
this.fieldName = fieldName;
final int prime = 31;
int result = 1;
result = prime * result + ((clazz == null) ? 0 : clazz.hashCode());
result = prime * result + ((fieldName == null) ? 0 : fieldName.hashCode());
this.hashCode = result;
}
@Override
public int hashCode() {
return hashCode;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
FieldKey other = (FieldKey) obj;
return Objects.equals(clazz, other.clazz) && Objects.equals(fieldName, other.fieldName);
}
}
}
|
FieldKey
|
java
|
mockito__mockito
|
mockito-core/src/test/java/org/mockito/MockitoTest.java
|
{
"start": 17579,
"end": 17749
}
|
class ____ {
public static String getValue() {
return "value";
}
public int getAnswer() {
return 42;
}
}
}
|
Dummy
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/partition/TotalOrderPartitioner.java
|
{
"start": 11756,
"end": 15238
}
|
class ____
{
TrieNode content;
CarriedTrieNodeRef() {
content = null;
}
}
/**
* Given a sorted set of cut points, build a trie that will find the correct
* partition quickly.
* @param splits the list of cut points
* @param lower the lower bound of partitions 0..numPartitions-1
* @param upper the upper bound of partitions 0..numPartitions-1
* @param prefix the prefix that we have already checked against
* @param maxDepth the maximum depth we will build a trie for
* @return the trie node that will divide the splits correctly
*/
private TrieNode buildTrie(BinaryComparable[] splits, int lower,
int upper, byte[] prefix, int maxDepth) {
return buildTrieRec
(splits, lower, upper, prefix, maxDepth, new CarriedTrieNodeRef());
}
/**
* This is the core of buildTrie. The interface, and stub, above, just adds
* an empty CarriedTrieNodeRef.
*
* We build trie nodes in depth first order, which is also in key space
* order. Every leaf node is referenced as a slot in a parent internal
* node. If two adjacent slots [in the DFO] hold leaf nodes that have
* no split point, then they are not separated by a split point either,
* because there's no place in key space for that split point to exist.
*
* When that happens, the leaf nodes would be semantically identical, and
* we reuse the object. A single CarriedTrieNodeRef "ref" lives for the
* duration of the tree-walk. ref carries a potentially reusable, unsplit
* leaf node for such reuse until a leaf node with a split arises, which
* breaks the chain until we need to make a new unsplit leaf node.
*
* Note that this use of CarriedTrieNodeRef means that for internal nodes,
* for internal nodes if this code is modified in any way we still need
* to make or fill in the subnodes in key space order.
*/
private TrieNode buildTrieRec(BinaryComparable[] splits, int lower,
int upper, byte[] prefix, int maxDepth, CarriedTrieNodeRef ref) {
final int depth = prefix.length;
// We generate leaves for a single split point as well as for
// no split points.
if (depth >= maxDepth || lower >= upper - 1) {
// If we have two consecutive requests for an unsplit trie node, we
// can deliver the same one the second time.
if (lower == upper && ref.content != null) {
return ref.content;
}
TrieNode result = LeafTrieNodeFactory(depth, splits, lower, upper);
ref.content = lower == upper ? result : null;
return result;
}
InnerTrieNode result = new InnerTrieNode(depth);
byte[] trial = Arrays.copyOf(prefix, prefix.length + 1);
// append an extra byte on to the prefix
int currentBound = lower;
for(int ch = 0; ch < 0xFF; ++ch) {
trial[depth] = (byte) (ch + 1);
lower = currentBound;
while (currentBound < upper) {
if (splits[currentBound].compareTo(trial, 0, trial.length) >= 0) {
break;
}
currentBound += 1;
}
trial[depth] = (byte) ch;
result.child[0xFF & ch]
= buildTrieRec(splits, lower, currentBound, trial, maxDepth, ref);
}
// pick up the rest
trial[depth] = (byte)0xFF;
result.child[0xFF]
= buildTrieRec(splits, lower, currentBound, trial, maxDepth, ref);
return result;
}
}
|
CarriedTrieNodeRef
|
java
|
spring-projects__spring-framework
|
spring-core/src/test/java/org/springframework/core/annotation/AnnotationsScannerTests.java
|
{
"start": 22851,
"end": 22983
}
|
class ____ {
@TestAnnotation2
@TestInheritedAnnotation2
public void method() {
}
}
@TestAnnotation1
static
|
SingleSuperclass
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/where/annotations/LazyManyToManyNonUniqueIdWhereTest.java
|
{
"start": 1799,
"end": 9170
}
|
class ____ {
@BeforeAll
public void createSchema(SessionFactoryScope factoryScope) {
factoryScope.inTransaction( session -> session.doWork( connection -> {
final Dialect dialect = session.getDialect();
try ( final Statement statement = connection.createStatement() ) {
statement.executeUpdate( dialect.getDropTableString( "MATERIAL_RATINGS" ) );
statement.executeUpdate( dialect.getDropTableString( "BUILDING_RATINGS" ) );
statement.executeUpdate( dialect.getDropTableString( "ASSOCIATION_TABLE" ) );
statement.executeUpdate( dialect.getDropTableString( "MAIN_TABLE" ) );
statement.executeUpdate( """
create table MAIN_TABLE(
ID integer not null,
NAME varchar(255) not null,
CODE varchar(10) not null,
primary key (ID, CODE)
)""" );
statement.executeUpdate( """
create table ASSOCIATION_TABLE(
MAIN_ID integer not null,
MAIN_CODE varchar(10) not null,
ASSOCIATION_ID int not null,
ASSOCIATION_CODE varchar(10) not null,
primary key (MAIN_ID, MAIN_CODE, ASSOCIATION_ID, ASSOCIATION_CODE)
)""" );
statement.executeUpdate("""
create table MATERIAL_RATINGS(
MATERIAL_ID integer not null,
RATING_ID integer not null,
primary key (MATERIAL_ID, RATING_ID)
)""" );
statement.executeUpdate( """
create table BUILDING_RATINGS(
BUILDING_ID integer not null,
RATING_ID integer not null,
primary key (BUILDING_ID, RATING_ID)
)""" );
statement.executeUpdate( "insert into MAIN_TABLE(ID, NAME, CODE) VALUES( 1, 'plastic', 'MATERIAL' )" );
statement.executeUpdate( "insert into MAIN_TABLE(ID, NAME, CODE) VALUES( 1, 'house', 'BUILDING' )" );
statement.executeUpdate( "insert into MAIN_TABLE(ID, NAME, CODE) VALUES( 1, 'high', 'RATING' )" );
statement.executeUpdate( "insert into MAIN_TABLE(ID, NAME, CODE) VALUES( 2, 'medium', 'RATING' )" );
statement.executeUpdate( "insert into MAIN_TABLE(ID, NAME, CODE) VALUES( 3, 'low', 'RATING' )" );
statement.executeUpdate( "insert into MAIN_TABLE(ID, NAME, CODE) VALUES( 1, 'small', 'SIZE' )" );
statement.executeUpdate( "insert into MAIN_TABLE(ID, NAME, CODE) VALUES( 2, 'medium', 'SIZE' )" );
statement.executeUpdate( "insert into ASSOCIATION_TABLE(MAIN_ID, MAIN_CODE, ASSOCIATION_ID, ASSOCIATION_CODE) " +
"VALUES( 1, 'MATERIAL', 1, 'RATING' )" );
statement.executeUpdate( "insert into ASSOCIATION_TABLE(MAIN_ID, MAIN_CODE, ASSOCIATION_ID, ASSOCIATION_CODE) " +
"VALUES( 1, 'MATERIAL', 2, 'RATING' )" );
statement.executeUpdate( "insert into ASSOCIATION_TABLE(MAIN_ID, MAIN_CODE, ASSOCIATION_ID, ASSOCIATION_CODE) " +
"VALUES( 1, 'MATERIAL', 3, 'RATING' )" );
statement.executeUpdate( "insert into ASSOCIATION_TABLE(MAIN_ID, MAIN_CODE, ASSOCIATION_ID, ASSOCIATION_CODE) " +
"VALUES( 1, 'MATERIAL', 2, 'SIZE' )" );
statement.executeUpdate( "insert into ASSOCIATION_TABLE(MAIN_ID, MAIN_CODE, ASSOCIATION_ID, ASSOCIATION_CODE) " +
"VALUES( 1, 'BUILDING', 1, 'RATING' )" );
statement.executeUpdate( "insert into ASSOCIATION_TABLE(MAIN_ID, MAIN_CODE, ASSOCIATION_ID, ASSOCIATION_CODE) " +
"VALUES( 1, 'BUILDING', 1, 'SIZE' )" );
statement.executeUpdate( "insert into MATERIAL_RATINGS(MATERIAL_ID, RATING_ID) VALUES( 1, 1 )" );
statement.executeUpdate( "insert into BUILDING_RATINGS(BUILDING_ID, RATING_ID) VALUES( 1, 1 )" );
statement.executeUpdate( "insert into BUILDING_RATINGS(BUILDING_ID, RATING_ID) VALUES( 1, 2 )" );
statement.executeUpdate( "insert into BUILDING_RATINGS(BUILDING_ID, RATING_ID) VALUES( 1, 3 )" );
}
} ) );
}
@AfterAll
public void dropSchema(SessionFactoryScope factoryScope) {
factoryScope.dropData();
}
@Test
@JiraKey( value = "HHH-12875")
public void testInitializeFromUniqueAssociationTable(SessionFactoryScope factoryScope) {
factoryScope.inTransaction( (session) -> {
var material = session.find( Material.class, 1 );
assertEquals( "plastic", material.getName() );
// Material#ratings is mapped with lazy="true"
assertFalse( Hibernate.isInitialized( material.getRatings() ) );
assertEquals( 1, material.getRatings().size() );
assertTrue( Hibernate.isInitialized( material.getRatings() ) );
final Rating rating = material.getRatings().iterator().next();
assertEquals( "high", rating.getName() );
var building = session.find( Building.class, 1 );
assertEquals( "house", building.getName() );
// Building#ratings is mapped with lazy="true"
assertFalse( Hibernate.isInitialized( building.getMediumOrHighRatings() ) );
checkMediumOrHighRatings( building.getMediumOrHighRatings() );
} );
}
@Test
@JiraKey( value = "HHH-12875")
public void testInitializeFromNonUniqueAssociationTable(SessionFactoryScope factoryScope) {
factoryScope.inTransaction( (session) -> {
var material = session.find( Material.class, 1 );
assertEquals( "plastic", material.getName() );
// Material#mediumOrHighRatingsFromCombined is mapped with lazy="true"
assertFalse( Hibernate.isInitialized( material.getMediumOrHighRatingsFromCombined() ) );
checkMediumOrHighRatings( material.getMediumOrHighRatingsFromCombined() );
Rating highRating = null;
for ( Rating rating : material.getMediumOrHighRatingsFromCombined() ) {
if ( "high".equals( rating.getName() ) ) {
highRating = rating;
}
}
assertNotNull( highRating );
// Material#sizesFromCombined is mapped with lazy="true"
assertFalse( Hibernate.isInitialized( material.getSizesFromCombined() ) );
assertEquals( 1, material.getSizesFromCombined().size() );
assertTrue( Hibernate.isInitialized( material.getSizesFromCombined() ) );
final Size size = material.getSizesFromCombined().iterator().next();
assertEquals( "medium", size.getName() );
var building = session.find( Building.class, 1 );
// building.ratingsFromCombined is mapped with lazy="true"
assertFalse( Hibernate.isInitialized( building.getRatingsFromCombined() ) );
assertEquals( 1, building.getRatingsFromCombined().size() );
assertTrue( Hibernate.isInitialized( building.getRatingsFromCombined() ) );
assertSame( highRating, building.getRatingsFromCombined().iterator().next() );
// Building#sizesFromCombined is mapped with lazy="true"
assertFalse( Hibernate.isInitialized( building.getSizesFromCombined() ) );
assertEquals( 1, building.getSizesFromCombined().size() );
assertTrue( Hibernate.isInitialized( building.getSizesFromCombined() ) );
assertEquals( "small", building.getSizesFromCombined().iterator().next().getName() );
} );
}
private void checkMediumOrHighRatings(List<Rating> mediumOrHighRatings) {
assertEquals( 2, mediumOrHighRatings.size() );
final Iterator<Rating> iterator = mediumOrHighRatings.iterator();
final Rating firstRating = iterator.next();
final Rating secondRating = iterator.next();
if ( "high".equals( firstRating.getName() ) ) {
assertEquals( "medium", secondRating.getName() );
}
else if ( "medium".equals( firstRating.getName() ) ) {
assertEquals( "high", secondRating.getName() );
}
else {
fail( "unexpected rating" );
}
}
@Entity( name = "Material" )
@Table( name = "MAIN_TABLE" )
@SQLRestriction( "CODE = 'MATERIAL'" )
public static
|
LazyManyToManyNonUniqueIdWhereTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transform/TransformRegistry.java
|
{
"start": 694,
"end": 3288
}
|
class ____ {
private final Map<
String,
TransformFactory<? extends Transform, ? extends Transform.Result, ? extends ExecutableTransform<?, ?>>> factories;
public TransformRegistry(
Map<String, TransformFactory<? extends Transform, ? extends Transform.Result, ? extends ExecutableTransform<?, ?>>> factories
) {
Map<String, TransformFactory<? extends Transform, ? extends Transform.Result, ? extends ExecutableTransform<?, ?>>> map =
new HashMap<>(factories);
map.put(ChainTransform.TYPE, new ChainTransformFactory(this));
this.factories = Collections.unmodifiableMap(map);
}
public TransformFactory<? extends Transform, ? extends Transform.Result, ? extends ExecutableTransform<?, ?>> factory(String type) {
return factories.get(type);
}
public ExecutableTransform<? extends Transform, ? extends Transform.Result> parse(String watchId, XContentParser parser)
throws IOException {
String type = null;
XContentParser.Token token;
ExecutableTransform<? extends Transform, ? extends Transform.Result> transform = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
type = parser.currentName();
} else if (type != null) {
transform = parse(watchId, type, parser);
}
}
return transform;
}
private ExecutableTransform<? extends Transform, ? extends Transform.Result> parse(String watchId, String type, XContentParser parser)
throws IOException {
TransformFactory<? extends Transform, ? extends Transform.Result, ? extends ExecutableTransform<?, ?>> factory = factories.get(
type
);
if (factory == null) {
throw new ElasticsearchParseException("could not parse transform for watch [{}], unknown transform type [{}]", watchId, type);
}
return factory.parseExecutable(watchId, parser);
}
public Transform parseTransform(String watchId, String type, XContentParser parser) throws IOException {
TransformFactory<? extends Transform, ? extends Transform.Result, ? extends ExecutableTransform<?, ?>> factory = factories.get(
type
);
if (factory == null) {
throw new ElasticsearchParseException("could not parse transform for watch [{}], unknown transform type [{}]", watchId, type);
}
return factory.parseTransform(watchId, parser);
}
}
|
TransformRegistry
|
java
|
reactor__reactor-core
|
reactor-core/src/main/java/reactor/core/publisher/InnerProducer.java
|
{
"start": 1104,
"end": 1406
}
|
interface ____<O>
extends Scannable, Subscription {
@Nullable CoreSubscriber<? super O> actual();
@Override
default @Nullable Object scanUnsafe(Attr key) {
if (key == Attr.ACTUAL) {
return actual();
}
if (key == InternalProducerAttr.INSTANCE) return true;
return null;
}
}
|
InnerProducer
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AHugeFilesStorageClass.java
|
{
"start": 2131,
"end": 4901
}
|
class ____ extends AbstractSTestS3AHugeFiles {
private static final Logger LOG = LoggerFactory.getLogger(ITestS3AHugeFilesStorageClass.class);
@Override
protected Configuration createScaleConfiguration() {
Configuration conf = super.createScaleConfiguration();
skipIfStorageClassTestsDisabled(conf);
disableFilesystemCaching(conf);
removeBaseAndBucketOverrides(conf, STORAGE_CLASS);
conf.set(STORAGE_CLASS, STORAGE_CLASS_REDUCED_REDUNDANCY);
return conf;
}
@Override
protected String getBlockOutputBufferName() {
return Constants.FAST_UPLOAD_BUFFER_ARRAY;
}
@Test
@Override
public void test_010_CreateHugeFile() throws IOException {
super.test_010_CreateHugeFile();
assertStorageClass(getPathOfFileToCreate());
}
@Test
@Override
public void test_030_postCreationAssertions() throws Throwable {
super.test_030_postCreationAssertions();
assertStorageClass(getPathOfFileToCreate());
}
@Override
@Test
public void test_040_PositionedReadHugeFile() throws Throwable {
skipQuietly("PositionedReadHugeFile");
}
@Test
@Override
public void test_050_readHugeFile() throws Throwable {
skipQuietly("readHugeFile");
}
@Test
@Override
public void test_090_verifyRenameSourceEncryption() throws IOException {
skipQuietly("verifyRenameSourceEncryption");
}
@Test
@Override
public void test_100_renameHugeFile() throws Throwable {
Path hugefile = getHugefile();
Path hugefileRenamed = getHugefileRenamed();
assumeHugeFileExists();
describe("renaming %s to %s", hugefile, hugefileRenamed);
S3AFileSystem fs = getFileSystem();
FileStatus status = fs.getFileStatus(hugefile);
long size = status.getLen();
fs.delete(hugefileRenamed, false);
ContractTestUtils.NanoTimer timer = new ContractTestUtils.NanoTimer();
fs.rename(hugefile, hugefileRenamed);
long mb = Math.max(size / _1MB, 1);
timer.end("time to rename file of %d MB", mb);
LOG.info("Time per MB to rename = {} nS", toHuman(timer.nanosPerOperation(mb)));
bandwidth(timer, size);
FileStatus destFileStatus = fs.getFileStatus(hugefileRenamed);
assertEquals(size, destFileStatus.getLen());
assertStorageClass(hugefileRenamed);
}
@Test
@Override
public void test_110_verifyRenameDestEncryption() throws IOException {
skipQuietly("verifyRenameDestEncryption");
}
private void skipQuietly(String text) {
describe("Skipping: %s", text);
}
protected void assertStorageClass(Path hugeFile) throws IOException {
String actual = getS3AInternals().getObjectMetadata(hugeFile).storageClassAsString();
assertTrue(STORAGE_CLASS_REDUCED_REDUNDANCY.equalsIgnoreCase(actual),
"Storage
|
ITestS3AHugeFilesStorageClass
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.