language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | grpc__grpc-java | okhttp/src/main/java/io/grpc/okhttp/OkHttpChannelBuilder.java | {
"start": 3504,
"end": 4042
} | class ____ extends ForwardingChannelBuilder2<OkHttpChannelBuilder> {
private static final Logger log = Logger.getLogger(OkHttpChannelBuilder.class.getName());
public static final int DEFAULT_FLOW_CONTROL_WINDOW = 65535;
private final ManagedChannelImplBuilder managedChannelImplBuilder;
private final ChannelCredentials channelCredentials;
private TransportTracer.Factory transportTracerFactory = TransportTracer.getDefaultFactory();
/** Identifies the negotiation used for starting up HTTP/2. */
private | OkHttpChannelBuilder |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/aot/hint/ResourcePatternHints.java | {
"start": 1588,
"end": 4220
} | class ____ {
private final Set<ResourcePatternHint> includes = new LinkedHashSet<>();
Builder() {
}
/**
* Include resources matching the specified glob patterns.
* @param reachableType the type that should be reachable for this hint to apply
* @param includes the include patterns (see {@link ResourcePatternHint} documentation)
* @return {@code this}, to facilitate method chaining
*/
public Builder includes(@Nullable TypeReference reachableType, String... includes) {
Arrays.stream(includes)
.map(this::expandToIncludeDirectories)
.flatMap(List::stream)
.map(include -> new ResourcePatternHint(include, reachableType))
.forEach(this.includes::add);
return this;
}
/**
* Expand the supplied include pattern into multiple patterns that include
* all parent directories for the ultimate resource or resources.
* <p>This is necessary to support classpath scanning within a GraalVM
* native image.
* @see <a href="https://github.com/spring-projects/spring-framework/issues/29403">gh-29403</a>
*/
private List<String> expandToIncludeDirectories(String includePattern) {
// Resource in root or no explicit subdirectories?
if (!includePattern.contains("/")) {
// Include the root directory as well as the pattern
return List.of("/", includePattern);
}
List<String> includePatterns = new ArrayList<>();
// Ensure the root directory and original pattern are always included
includePatterns.add("/");
includePatterns.add(includePattern);
StringBuilder path = new StringBuilder();
for (String pathElement : includePattern.split("/")) {
if (pathElement.isEmpty()) {
// Skip empty path elements
continue;
}
if (pathElement.contains("*")) {
// Stop at the first encountered wildcard, since we cannot reliably reason
// any further about the directory structure below this path element.
break;
}
if (!path.isEmpty()) {
path.append("/");
}
path.append(pathElement);
includePatterns.add(path.toString());
}
return includePatterns;
}
/**
* Include resources matching the specified glob patterns.
* @param includes the include patterns (see {@link ResourcePatternHint} documentation)
* @return {@code this}, to facilitate method chaining
*/
public Builder includes(String... includes) {
return includes(null, includes);
}
/**
* Create {@link ResourcePatternHints} based on the state of this
* builder.
* @return resource pattern hints
*/
ResourcePatternHints build() {
return new ResourcePatternHints(this);
}
}
}
| Builder |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/core/metrics/DefaultApplicationStartup.java | {
"start": 1840,
"end": 2006
} | class ____ implements StartupStep.Tags {
@Override
public Iterator<StartupStep.Tag> iterator() {
return Collections.emptyIterator();
}
}
}
}
| DefaultTags |
java | apache__camel | components/camel-google/camel-google-functions/src/generated/java/org/apache/camel/component/google/functions/GoogleCloudFunctionsEndpointUriFactory.java | {
"start": 526,
"end": 2362
} | class ____ extends org.apache.camel.support.component.EndpointUriFactorySupport implements EndpointUriFactory {
private static final String BASE = ":functionName";
private static final Set<String> PROPERTY_NAMES;
private static final Set<String> SECRET_PROPERTY_NAMES;
private static final Map<String, String> MULTI_VALUE_PREFIXES;
static {
Set<String> props = new HashSet<>(8);
props.add("client");
props.add("functionName");
props.add("lazyStartProducer");
props.add("location");
props.add("operation");
props.add("pojoRequest");
props.add("project");
props.add("serviceAccountKey");
PROPERTY_NAMES = Collections.unmodifiableSet(props);
SECRET_PROPERTY_NAMES = Collections.emptySet();
MULTI_VALUE_PREFIXES = Collections.emptyMap();
}
@Override
public boolean isEnabled(String scheme) {
return "google-functions".equals(scheme);
}
@Override
public String buildUri(String scheme, Map<String, Object> properties, boolean encode) throws URISyntaxException {
String syntax = scheme + BASE;
String uri = syntax;
Map<String, Object> copy = new HashMap<>(properties);
uri = buildPathParameter(syntax, uri, "functionName", null, true, copy);
uri = buildQueryParameters(uri, copy, encode);
return uri;
}
@Override
public Set<String> propertyNames() {
return PROPERTY_NAMES;
}
@Override
public Set<String> secretPropertyNames() {
return SECRET_PROPERTY_NAMES;
}
@Override
public Map<String, String> multiValuePrefixes() {
return MULTI_VALUE_PREFIXES;
}
@Override
public boolean isLenientProperties() {
return false;
}
}
| GoogleCloudFunctionsEndpointUriFactory |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/tofix/ObjectIdWithUnwrapping1298Test.java | {
"start": 776,
"end": 1094
} | class ____ {
public List<Parent> parents = new ArrayList<>();
public void addParent(Parent parent) {
parents.add(parent);
}
}
@JsonIdentityInfo(generator = ObjectIdGenerators.PropertyGenerator.class, property = "id", scope = Parent.class)
public static final | ListOfParents |
java | apache__dubbo | dubbo-remoting/dubbo-remoting-api/src/main/java/org/apache/dubbo/remoting/transport/AbstractChannel.java | {
"start": 1099,
"end": 1997
} | class ____ extends AbstractPeer implements Channel {
public AbstractChannel(URL url, ChannelHandler handler) {
super(url, handler);
}
@Override
public void send(Object message, boolean sent) throws RemotingException {
if (isClosed()) {
throw new RemotingException(
this,
"Failed to send message "
+ (message == null ? "" : message.getClass().getName()) + ":"
+ PayloadDropper.getRequestWithoutData(message)
+ ", cause: Channel closed. channel: " + getLocalAddress() + " -> " + getRemoteAddress());
}
}
@Override
public String toString() {
return getLocalAddress() + " -> " + getRemoteAddress();
}
@Override
protected void setUrl(URL url) {
super.setUrl(url);
}
}
| AbstractChannel |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/reuse/SinkReuser.java | {
"start": 2707,
"end": 8034
} | class ____ {
private final boolean isStreamingMode;
public SinkReuser(boolean isStreamingMode) {
this.isStreamingMode = isStreamingMode;
}
public List<RelNode> reuseDuplicatedSink(List<RelNode> relNodes) {
// Find all sinks
List<Sink> allSinkNodes =
relNodes.stream()
.filter(node -> node instanceof Sink)
.map(node -> (Sink) node)
.collect(Collectors.toList());
List<ReusableSinkGroup> reusableSinkGroups = groupReusableSink(allSinkNodes);
Set<Sink> reusedSinkNodes = reuseSinkAndAddUnion(reusableSinkGroups);
// Remove all unused sink nodes
return relNodes.stream()
.filter(root -> !(root instanceof Sink) || reusedSinkNodes.contains(root))
.collect(Collectors.toList());
}
private Set<Sink> reuseSinkAndAddUnion(List<ReusableSinkGroup> reusableSinkGroups) {
final Set<Sink> reusedSinkNodes = Collections.newSetFromMap(new IdentityHashMap<>());
reusableSinkGroups.forEach(
group -> {
List<Sink> originalSinks = group.originalSinks;
if (originalSinks.size() <= 1) {
Preconditions.checkState(originalSinks.size() == 1);
reusedSinkNodes.add(originalSinks.get(0));
return;
}
List<RelNode> allSinkInputs = new ArrayList<>();
for (Sink sinkNode : originalSinks) {
allSinkInputs.add(sinkNode.getInput());
}
// Use the first sink node as the final reused sink node
Sink reusedSink = originalSinks.get(0);
Union unionForReusedSinks;
if (isStreamingMode) {
unionForReusedSinks =
new StreamPhysicalUnion(
reusedSink.getCluster(),
group.inputTraitSet,
allSinkInputs,
true,
// use sink input row type
reusedSink.getRowType());
} else {
unionForReusedSinks =
new BatchPhysicalUnion(
reusedSink.getCluster(),
group.inputTraitSet,
allSinkInputs,
true,
// use sink input row type
reusedSink.getRowType());
}
reusedSink.replaceInput(0, unionForReusedSinks);
reusedSinkNodes.add(reusedSink);
});
return reusedSinkNodes;
}
/**
* Grouping sinks that can be reused with each other.
*
* @param allSinkNodes in the plan.
* @return a list contains all grouped sink.
*/
private List<ReusableSinkGroup> groupReusableSink(List<Sink> allSinkNodes) {
List<ReusableSinkGroup> reusableSinkGroups = new ArrayList<>();
for (Sink currentSinkNode : allSinkNodes) {
Optional<ReusableSinkGroup> targetGroup =
reusableSinkGroups.stream()
.filter(
reusableSinkGroup ->
reusableSinkGroup.canBeReused(currentSinkNode))
.findFirst();
if (targetGroup.isPresent()) {
targetGroup.get().originalSinks.add(currentSinkNode);
} else {
// If the current sink cannot be reused with any existing groups, create a new
// group.
reusableSinkGroups.add(new ReusableSinkGroup(currentSinkNode));
}
}
return reusableSinkGroups;
}
private String getDigest(Sink sink) {
List<String> digest = new ArrayList<>();
digest.add(sink.contextResolvedTable().getIdentifier().asSummaryString());
int[][] targetColumns = sink.targetColumns();
if (targetColumns != null && targetColumns.length > 0) {
digest.add(
"targetColumns=["
+ Arrays.stream(targetColumns)
.map(Arrays::toString)
.collect(Collectors.joining(","))
+ "]");
}
String fieldTypes =
sink.getRowType().getFieldList().stream()
.map(f -> f.getType().toString())
.collect(Collectors.joining(", "));
digest.add("fieldTypes=[" + fieldTypes + "]");
if (!sink.hints().isEmpty()) {
digest.add("hints=" + RelExplainUtil.hintsToString(sink.hints()));
}
if (isStreamingMode) {
digest.add("upsertMaterialize=" + ((StreamPhysicalSink) sink).upsertMaterialize());
}
return digest.toString();
}
private | SinkReuser |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/web/configurers/HeadersConfigurerTests.java | {
"start": 30463,
"end": 30876
} | class ____ {
@Bean
SecurityFilterChain filterChain(HttpSecurity http) throws Exception {
// @formatter:off
http
.headers((headers) -> headers
.defaultsDisabled()
.xssProtection((xss) -> xss
.headerValue(XXssProtectionHeaderWriter.HeaderValue.ENABLED_MODE_BLOCK)));
// @formatter:on
return http.build();
}
}
@EnableWebSecurity
static | XssProtectionValueEnabledModeBlockConfig |
java | micronaut-projects__micronaut-core | module-info-runtime/src/main/java/io/micronaut/module/info/runtime/MicronautRuntimeModule.java | {
"start": 1236,
"end": 3700
} | class ____ {
private final MicronautModuleInfo info;
private final MicronautRuntimeModule parent;
private final List<MicronautRuntimeModule> children;
public MicronautRuntimeModule(MicronautModuleInfo info,
MicronautRuntimeModule parent,
List<MicronautRuntimeModule> children) {
this.info = info;
this.parent = parent;
this.children = children;
}
public MicronautModuleInfo getInfo() {
return info;
}
/**
* The id for this module. It is recommended to use "groupId:artifactId".
* Do NOT include version information in the id.
* @return the id for this module.
*/
public String getId() {
return info.getId();
}
/**
* A human-readable name for this module.
* @return the name of this module
*/
public String getName() {
return info.getName();
}
/**
* A description of this module.
* @return the description
*/
public Optional<String> getDescription() {
return info.getDescription();
}
/**
* The version of this module.
* @return the version
*/
public String getVersion() {
return info.getVersion();
}
/**
* Returns the Maven coordinates for this module,
* if it can be represented so.
* @return the Maven coordinates
*/
public Optional<MavenCoordinates> getMavenCoordinates() {
return info.getMavenCoordinates();
}
/**
* Returns the parent module, if any.
* @return the parent module
*/
public Optional<MicronautRuntimeModule> getParent() {
return Optional.ofNullable(parent);
}
/**
* Returns the child modules.
*
* @return the child modules
*/
public List<MicronautRuntimeModule> getChildren() {
return Collections.unmodifiableList(children);
}
/**
* A set of tags assigned to this module.
* @return the set of tags for this module
*/
public Set<String> getTags() {
return info.getTags();
}
/**
* Returns the root module descriptor.
* @return the root descriptor
*/
public MicronautRuntimeModule getRoot() {
if (parent == null) {
return this;
}
return parent.getRoot();
}
@Override
public String toString() {
return getId();
}
}
| MicronautRuntimeModule |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/rest/MultipartUploadExtension.java | {
"start": 3168,
"end": 10155
} | class ____ implements CustomExtension {
private static final Logger LOG = LoggerFactory.getLogger(MultipartUploadExtension.class);
private final Supplier<Path> tmpDirectorySupplier;
private RestServerEndpoint serverEndpoint;
protected String serverAddress;
protected InetSocketAddress serverSocketAddress;
protected MultipartMixedHandler mixedHandler;
protected MultipartJsonHandler jsonHandler;
protected MultipartFileHandler fileHandler;
protected File file1;
protected File file2;
private Path configuredUploadDir;
private BiConsumerWithException<HandlerRequest<?>, RestfulGateway, RestHandlerException>
fileUploadVerifier;
public MultipartUploadExtension(Supplier<Path> tmpDirectorySupplier) {
this.tmpDirectorySupplier = tmpDirectorySupplier;
}
@Override
public void before(ExtensionContext context) throws Exception {
Path tmpDirectory = tmpDirectorySupplier.get();
Configuration config = new Configuration();
config.set(RestOptions.BIND_PORT, "0");
config.set(RestOptions.ADDRESS, "localhost");
// set this to a lower value on purpose to test that files larger than the content limit are
// still accepted
config.set(RestOptions.SERVER_MAX_CONTENT_LENGTH, 1024 * 1024);
configuredUploadDir = TempDirUtils.newFolder(tmpDirectory).toPath();
config.set(WebOptions.UPLOAD_DIR, configuredUploadDir.toString());
RestfulGateway mockRestfulGateway = new TestingRestfulGateway();
final GatewayRetriever<RestfulGateway> mockGatewayRetriever =
() -> CompletableFuture.completedFuture(mockRestfulGateway);
file1 = TempDirUtils.newFile(tmpDirectory);
try (RandomAccessFile rw = new RandomAccessFile(file1, "rw")) {
// magic value that reliably reproduced https://github.com/netty/netty/issues/11668
rw.setLength(5043444);
}
file2 = TempDirUtils.newFile(tmpDirectory);
Files.write(file2.toPath(), "world".getBytes(ConfigConstants.DEFAULT_CHARSET));
mixedHandler = new MultipartMixedHandler(mockGatewayRetriever);
jsonHandler = new MultipartJsonHandler(mockGatewayRetriever);
fileHandler = new MultipartFileHandler(mockGatewayRetriever);
serverEndpoint =
TestRestServerEndpoint.builder(config)
.withHandler(mixedHandler)
.withHandler(jsonHandler)
.withHandler(fileHandler)
.buildAndStart();
serverAddress = serverEndpoint.getRestBaseUrl();
serverSocketAddress = serverEndpoint.getServerAddress();
this.setFileUploadVerifier(
(request, restfulGateway) -> {
// the default verifier checks for identiy (i.e. same name and content) of all
// uploaded files
assertUploadedFilesEqual(request, getFilesToUpload());
});
}
public static void assertUploadedFilesEqual(HandlerRequest<?> request, Collection<File> files)
throws IOException {
List<Path> expectedFiles = files.stream().map(File::toPath).collect(Collectors.toList());
List<Path> uploadedFiles =
request.getUploadedFiles().stream().map(File::toPath).collect(Collectors.toList());
assertThat(uploadedFiles).hasSameSizeAs(expectedFiles);
List<Path> expectedList = new ArrayList<>(expectedFiles);
List<Path> actualList = new ArrayList<>(uploadedFiles);
expectedList.sort(Comparator.comparing(Path::toString));
actualList.sort(Comparator.comparing(Path::toString));
for (int x = 0; x < expectedList.size(); x++) {
Path expected = expectedList.get(x);
Path actual = actualList.get(x);
assertThat(actual.getFileName()).hasToString(expected.getFileName().toString());
byte[] originalContent = Files.readAllBytes(expected);
byte[] receivedContent = Files.readAllBytes(actual);
assertThat(receivedContent).isEqualTo(originalContent);
}
}
public void setFileUploadVerifier(
BiConsumerWithException<
HandlerRequest<? extends RequestBody>, RestfulGateway, Exception>
verifier) {
this.fileUploadVerifier =
(request, restfulGateway) -> {
try {
verifier.accept(request, restfulGateway);
} catch (Exception e) {
// return 505 to differentiate from common BAD_REQUEST responses in this
// test
throw new RestHandlerException(
"Test verification failed.",
HttpResponseStatus.HTTP_VERSION_NOT_SUPPORTED,
e);
}
};
}
public Collection<File> getFilesToUpload() {
return Arrays.asList(file1, file2);
}
public String getServerAddress() {
return serverAddress;
}
public InetSocketAddress getServerSocketAddress() {
return serverSocketAddress;
}
public MultipartMixedHandler getMixedHandler() {
return mixedHandler;
}
public MultipartFileHandler getFileHandler() {
return fileHandler;
}
public MultipartJsonHandler getJsonHandler() {
return jsonHandler;
}
public Path getUploadDirectory() {
return configuredUploadDir;
}
public void resetState() {
mixedHandler.lastReceivedRequest = null;
jsonHandler.lastReceivedRequest = null;
}
@Override
public void after(ExtensionContext context) throws Exception {
if (serverEndpoint != null) {
try {
serverEndpoint.close();
} catch (Exception e) {
LOG.warn("Could not properly shutdown RestServerEndpoint.", e);
}
serverEndpoint = null;
}
}
public void assertUploadDirectoryIsEmpty() throws IOException {
Path actualUploadDir;
try (Stream<Path> containedFiles = Files.list(configuredUploadDir)) {
List<Path> files = containedFiles.collect(Collectors.toList());
Preconditions.checkArgument(
1 == files.size(),
"Directory structure in rest upload directory has changed. Test must be adjusted");
actualUploadDir = files.get(0);
}
try (Stream<Path> containedFiles = Files.list(actualUploadDir)) {
assertThat(containedFiles).withFailMessage("Not all files were cleaned up.").isEmpty();
}
}
/**
* Handler that accepts a mixed request consisting of a {@link TestRequestBody} and {@link
* #file1} and {@link #file2}.
*/
public | MultipartUploadExtension |
java | apache__maven | its/core-it-suite/src/test/resources/mng-8750-new-scopes/compile-only-test/src/main/java/org/apache/maven/its/mng8750/CompileOnlyExample.java | {
"start": 1138,
"end": 2573
} | class ____ {
/**
* Method that uses a compile-only dependency.
* This should compile successfully but the dependency won't be available at runtime.
*/
public String useCompileOnlyDep() {
CompileOnlyDep dep = new CompileOnlyDep();
return "Used compile-only dependency: " + dep.getMessage();
}
/**
* Method that uses a regular compile dependency.
* This should compile successfully and the dependency will be available at runtime.
*/
public String useCompileDep() {
CompileDep dep = new CompileDep();
return "Used compile dependency: " + dep.getMessage();
}
/**
* Main method for testing.
*/
public static void main(String[] args) {
CompileOnlyExample example = new CompileOnlyExample();
// This will work during compilation
System.out.println(example.useCompileDep());
// This will also work during compilation but fail at runtime
// if compile-only dependency is not in runtime classpath
try {
System.out.println(example.useCompileOnlyDep());
System.out.println("ERROR: Compile-only dependency should not be available at runtime!");
} catch (NoClassDefFoundError e) {
System.out.println(
"Runtime classpath verification: PASSED - compile-only dependency not available at runtime");
}
}
}
| CompileOnlyExample |
java | elastic__elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/api/filtering/ApiFilteringActionFilterTests.java | {
"start": 6181,
"end": 6956
} | class ____ extends ApiFilteringActionFilter<TestResponse> {
ExceptionTestFilter(ThreadContext threadContext) {
super(threadContext, "test.exception.action", TestResponse.class);
}
@Override
protected TestResponse filterResponse(TestResponse response) {
throw new RuntimeException("Throwing expected exception");
}
}
private ThreadContext getTestThreadContext(boolean isOperator) {
Settings settings;
if (isOperator) {
settings = Settings.builder().put("request.headers._security_privilege_category", "operator").build();
} else {
settings = Settings.EMPTY;
}
return new ThreadContext(settings);
}
private static | ExceptionTestFilter |
java | processing__processing4 | build/shared/tools/MovieMaker/src/ch/randelshofer/gui/datatransfer/PlainTextTransferable.java | {
"start": 747,
"end": 2697
} | class ____ extends AbstractTransferable {
private String plainText;
public PlainTextTransferable(String plainText) {
this(getDefaultFlavors(), plainText);
}
public PlainTextTransferable(DataFlavor flavor, String plainText) {
this(new DataFlavor[] { flavor }, plainText);
}
public PlainTextTransferable(DataFlavor[] flavors, String plainText) {
super(flavors);
this.plainText = plainText;
}
public Object getTransferData(DataFlavor flavor) throws UnsupportedFlavorException, IOException {
if (! isDataFlavorSupported(flavor)) {
throw new UnsupportedFlavorException(flavor);
}
plainText = (plainText == null) ? "" : plainText;
if (String.class.equals(flavor.getRepresentationClass())) {
return plainText;
} else if (Reader.class.equals(flavor.getRepresentationClass())) {
return new StringReader(plainText);
} else if (InputStream.class.equals(flavor.getRepresentationClass())) {
String charsetName = flavor.getParameter("charset");
return new ByteArrayInputStream(plainText.getBytes(charsetName==null?"UTF-8":charsetName));
//return new StringBufferInputStream(plainText);
} // fall through to unsupported
throw new UnsupportedFlavorException(flavor);
}
protected static DataFlavor[] getDefaultFlavors() {
try {
return new DataFlavor[] {
new DataFlavor("text/plain;class=java.lang.String"),
new DataFlavor("text/plain;class=java.io.Reader"),
new DataFlavor("text/plain;charset=unicode;class=java.io.InputStream")
};
} catch (ClassNotFoundException cle) {
InternalError ie = new InternalError(
"error initializing PlainTextTransferable");
ie.initCause(cle);
throw ie;
}
}
}
| PlainTextTransferable |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/mapper/DocCountFieldMapper.java | {
"start": 5054,
"end": 6253
} | class ____ extends SourceLoader.DocValuesBasedSyntheticFieldLoader {
private PostingsEnum postings;
private boolean hasValue;
@Override
public DocValuesLoader docValuesLoader(LeafReader leafReader, int[] docIdsInLeaf) throws IOException {
postings = leafLookup(leafReader);
if (postings == null) {
hasValue = false;
return null;
}
return docId -> {
if (docId < postings.docID()) {
return hasValue = false;
}
if (docId == postings.docID()) {
return hasValue = true;
}
return hasValue = docId == postings.advance(docId);
};
}
@Override
public boolean hasValue() {
return hasValue;
}
@Override
public void write(XContentBuilder b) throws IOException {
if (hasValue == false) {
return;
}
b.field(NAME, postings.freq());
}
@Override
public String fieldName() {
return NAME;
}
}
}
| SyntheticFieldLoader |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/TypeParameterShadowingTest.java | {
"start": 5728,
"end": 6075
} | class ____<T, D> {
<T, D> void something(T t) {
T other = t;
java.util.List<T> ts = new java.util.ArrayList<T>();
D d = null;
}
}
""")
.addOutputLines(
"out/Test.java",
"""
package foo.bar;
| Test |
java | quarkusio__quarkus | integration-tests/opentelemetry-reactive/src/test/java/io/quarkus/it/opentelemetry/reactive/OpenTelemetryWithSpanAtStartupTest.java | {
"start": 3383,
"end": 4116
} | class ____ implements QuarkusTestResourceLifecycleManager {
WireMockServer wireMockServer;
@Override
public Map<String, String> start() {
wireMockServer = new WireMockServer(WIREMOCK_PORT);
wireMockServer.stubFor(
WireMock.get(WireMock.urlMatching("/stub"))
.willReturn(ok()));
wireMockServer.start();
return Map.of(STARTUP_BEAN_ENABLED_PROPERTY, Boolean.TRUE.toString());
}
@Override
public synchronized void stop() {
if (wireMockServer != null) {
wireMockServer.stop();
wireMockServer = null;
}
}
}
}
| MyWireMockResource |
java | spring-projects__spring-boot | core/spring-boot-autoconfigure-processor/src/test/java/org/springframework/boot/autoconfigureprocessor/TestMethodConfiguration.java | {
"start": 805,
"end": 968
} | class ____ {
@TestConditionalOnClass(name = "java.io.InputStream", value = OutputStream.class)
public Object method() {
return null;
}
}
| TestMethodConfiguration |
java | google__error-prone | core/src/test/java/com/google/errorprone/ErrorProneCompilerIntegrationTest.java | {
"start": 18717,
"end": 19310
} | class ____ extends BugChecker implements ReturnTreeMatcher {
@Override
public Description matchReturn(ReturnTree tree, VisitorState state) {
throw new NullPointerException();
}
}
@Test
public void crashSourcePosition() {
compiler =
compilerBuilder.report(ScannerSupplier.fromBugCheckerClasses(CrashOnReturn.class)).build();
Result exitCode =
compiler.compile(
Arrays.asList(
forSourceLines(
"test/Test.java",
"""
package Test;
| CrashOnReturn |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestSetTimes.java | {
"start": 1975,
"end": 13720
} | class ____ {
static final long seed = 0xDEADBEEFL;
static final int blockSize = 8192;
static final int fileSize = 16384;
static final int numDatanodes = 1;
static final SimpleDateFormat dateForm = new SimpleDateFormat("yyyy-MM-dd HH:mm");
Random myrand = new Random();
Path hostsFile;
Path excludeFile;
private FSDataOutputStream writeFile(FileSystem fileSys, Path name, int repl)
throws IOException {
FSDataOutputStream stm = fileSys.create(name, true, fileSys.getConf()
.getInt(CommonConfigurationKeys.IO_FILE_BUFFER_SIZE_KEY, 4096),
(short) repl, blockSize);
byte[] buffer = new byte[fileSize];
Random rand = new Random(seed);
rand.nextBytes(buffer);
stm.write(buffer);
return stm;
}
private void cleanupFile(FileSystem fileSys, Path name) throws IOException {
assertTrue(fileSys.exists(name));
fileSys.delete(name, true);
assertTrue(!fileSys.exists(name));
}
private void printDatanodeReport(DatanodeInfo[] info) {
System.out.println("-------------------------------------------------");
for (int i = 0; i < info.length; i++) {
System.out.println(info[i].getDatanodeReport());
System.out.println();
}
}
/**
* Tests mod & access time in DFS.
*/
@Test
public void testTimes() throws IOException {
Configuration conf = new HdfsConfiguration();
final int MAX_IDLE_TIME = 2000; // 2s
conf.setInt("ipc.client.connection.maxidletime", MAX_IDLE_TIME);
conf.setInt(DFSConfigKeys.DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_KEY, 1000);
conf.setInt(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 1);
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(numDatanodes)
.build();
cluster.waitActive();
final int nnport = cluster.getNameNodePort();
InetSocketAddress addr = new InetSocketAddress("localhost",
cluster.getNameNodePort());
DFSClient client = new DFSClient(addr, conf);
DatanodeInfo[] info = client.datanodeReport(DatanodeReportType.LIVE);
assertEquals(numDatanodes, info.length, "Number of Datanodes ");
FileSystem fileSys = cluster.getFileSystem();
int replicas = 1;
assertTrue(fileSys instanceof DistributedFileSystem);
try {
//
// create file and record atime/mtime
//
System.out.println("Creating testdir1 and testdir1/test1.dat.");
Path dir1 = new Path("testdir1");
Path file1 = new Path(dir1, "test1.dat");
FSDataOutputStream stm = writeFile(fileSys, file1, replicas);
FileStatus stat = fileSys.getFileStatus(file1);
long atimeBeforeClose = stat.getAccessTime();
String adate = dateForm.format(new Date(atimeBeforeClose));
System.out.println("atime on " + file1 + " before close is " +
adate + " (" + atimeBeforeClose + ")");
assertTrue(atimeBeforeClose != 0);
stm.close();
stat = fileSys.getFileStatus(file1);
long atime1 = stat.getAccessTime();
long mtime1 = stat.getModificationTime();
adate = dateForm.format(new Date(atime1));
String mdate = dateForm.format(new Date(mtime1));
System.out.println("atime on " + file1 + " is " + adate +
" (" + atime1 + ")");
System.out.println("mtime on " + file1 + " is " + mdate +
" (" + mtime1 + ")");
assertTrue(atime1 != 0);
// check setting negative value for atime and mtime.
fileSys.setTimes(file1, -2, -2);
// The values shouldn't change.
stat = fileSys.getFileStatus(file1);
assertEquals(mtime1, stat.getModificationTime());
assertEquals(atime1, stat.getAccessTime());
//
// record dir times
//
stat = fileSys.getFileStatus(dir1);
long mdir1 = stat.getAccessTime();
assertTrue(mdir1 == 0);
// set the access time to be one day in the past
long atime2 = atime1 - (24L * 3600L * 1000L);
fileSys.setTimes(file1, -1, atime2);
// check new access time on file
stat = fileSys.getFileStatus(file1);
long atime3 = stat.getAccessTime();
String adate3 = dateForm.format(new Date(atime3));
System.out.println("new atime on " + file1 + " is " +
adate3 + " (" + atime3 + ")");
assertTrue(atime2 == atime3);
assertTrue(mtime1 == stat.getModificationTime());
// set the modification time to be 1 hour in the past
long mtime2 = mtime1 - (3600L * 1000L);
fileSys.setTimes(file1, mtime2, -1);
// check new modification time on file
stat = fileSys.getFileStatus(file1);
long mtime3 = stat.getModificationTime();
String mdate3 = dateForm.format(new Date(mtime3));
System.out.println("new mtime on " + file1 + " is " +
mdate3 + " (" + mtime3 + ")");
assertTrue(atime2 == stat.getAccessTime());
assertTrue(mtime2 == mtime3);
long mtime4 = Time.now() - (3600L * 1000L);
long atime4 = Time.now();
fileSys.setTimes(dir1, mtime4, atime4);
// check new modification time on file
stat = fileSys.getFileStatus(dir1);
assertTrue(mtime4 == stat.getModificationTime(), "Not matching the modification times");
assertTrue(atime4 == stat.getAccessTime(), "Not matching the access times");
Path nonExistingDir = new Path(dir1, "/nonExistingDir/");
try {
fileSys.setTimes(nonExistingDir, mtime4, atime4);
fail("Expecting FileNotFoundException");
} catch (FileNotFoundException e) {
assertTrue(e.getMessage().contains(
"File/Directory " + nonExistingDir.toString() + " does not exist."));
}
// shutdown cluster and restart
cluster.shutdown();
try {Thread.sleep(2*MAX_IDLE_TIME);} catch (InterruptedException e) {}
cluster = new MiniDFSCluster.Builder(conf).nameNodePort(nnport)
.format(false)
.build();
cluster.waitActive();
fileSys = cluster.getFileSystem();
// verify that access times and modification times persist after a
// cluster restart.
System.out.println("Verifying times after cluster restart");
stat = fileSys.getFileStatus(file1);
assertTrue(atime2 == stat.getAccessTime());
assertTrue(mtime3 == stat.getModificationTime());
cleanupFile(fileSys, file1);
cleanupFile(fileSys, dir1);
} catch (IOException e) {
info = client.datanodeReport(DatanodeReportType.ALL);
printDatanodeReport(info);
throw e;
} finally {
fileSys.close();
cluster.shutdown();
}
}
/**
* Tests mod time change at close in DFS.
*/
@Test
public void testTimesAtClose() throws IOException {
Configuration conf = new HdfsConfiguration();
final int MAX_IDLE_TIME = 2000; // 2s
int replicas = 1;
// parameter initialization
conf.setInt("ipc.client.connection.maxidletime", MAX_IDLE_TIME);
conf.setInt(DFSConfigKeys.DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_KEY, 1000);
conf.setInt(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 1);
conf.setInt(DFSConfigKeys.DFS_DATANODE_HANDLER_COUNT_KEY, 50);
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(numDatanodes)
.build();
cluster.waitActive();
InetSocketAddress addr = new InetSocketAddress("localhost",
cluster.getNameNodePort());
DFSClient client = new DFSClient(addr, conf);
DatanodeInfo[] info = client.datanodeReport(DatanodeReportType.LIVE);
assertEquals(numDatanodes, info.length, "Number of Datanodes ");
FileSystem fileSys = cluster.getFileSystem();
assertTrue(fileSys instanceof DistributedFileSystem);
try {
// create a new file and write to it
Path file1 = new Path("/simple.dat");
FSDataOutputStream stm = writeFile(fileSys, file1, replicas);
System.out.println("Created and wrote file simple.dat");
FileStatus statBeforeClose = fileSys.getFileStatus(file1);
long mtimeBeforeClose = statBeforeClose.getModificationTime();
String mdateBeforeClose = dateForm.format(new Date(
mtimeBeforeClose));
System.out.println("mtime on " + file1 + " before close is "
+ mdateBeforeClose + " (" + mtimeBeforeClose + ")");
assertTrue(mtimeBeforeClose != 0);
//close file after writing
stm.close();
System.out.println("Closed file.");
FileStatus statAfterClose = fileSys.getFileStatus(file1);
long mtimeAfterClose = statAfterClose.getModificationTime();
String mdateAfterClose = dateForm.format(new Date(mtimeAfterClose));
System.out.println("mtime on " + file1 + " after close is "
+ mdateAfterClose + " (" + mtimeAfterClose + ")");
assertTrue(mtimeAfterClose != 0);
assertTrue(mtimeBeforeClose != mtimeAfterClose);
cleanupFile(fileSys, file1);
} catch (IOException e) {
info = client.datanodeReport(DatanodeReportType.ALL);
printDatanodeReport(info);
throw e;
} finally {
fileSys.close();
cluster.shutdown();
}
}
/**
* Test that when access time updates are not needed, the FSNamesystem
* write lock is not taken by getBlockLocations.
* Regression test for HDFS-3981.
*/
@Test
@Timeout(value = 60)
public void testGetBlockLocationsOnlyUsesReadLock() throws IOException {
Configuration conf = new HdfsConfiguration();
conf.setInt(DFSConfigKeys.DFS_NAMENODE_ACCESSTIME_PRECISION_KEY, 100*1000);
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(0)
.build();
ReentrantReadWriteLock spyLock =
NameNodeAdapterMockitoUtil.spyOnFsLock(cluster.getNamesystem());
try {
// Create empty file in the FSN.
Path p = new Path("/empty-file");
DFSTestUtil.createFile(cluster.getFileSystem(), p, 0, (short)1, 0L);
// getBlockLocations() should not need the write lock, since we just created
// the file (and thus its access time is already within the 100-second
// accesstime precision configured above).
MockitoUtil.doThrowWhenCallStackMatches(
new AssertionError("Should not need write lock"),
".*getBlockLocations.*")
.when(spyLock).writeLock();
cluster.getFileSystem().getFileBlockLocations(p, 0, 100);
} finally {
cluster.shutdown();
}
}
/**
* Test whether atime can be set explicitly even when the atime support is
* disabled.
*/
@Test
public void testAtimeUpdate() throws Exception {
Configuration conf = new HdfsConfiguration();
conf.setInt(DFSConfigKeys.DFS_NAMENODE_ACCESSTIME_PRECISION_KEY, 0);
MiniDFSCluster cluster = null;
FileSystem fs = null;
try {
cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(0)
.build();
fs = cluster.getFileSystem();
// Create an empty file
Path p = new Path("/testAtimeUpdate");
DFSTestUtil.createFile(cluster.getFileSystem(), p, 0, (short)1, 0L);
fs.setTimes(p, -1L, 123456L);
Assertions.assertEquals(123456L, fs.getFileStatus(p).getAccessTime());
} finally {
if (cluster != null) {
cluster.shutdown();
}
}
}
public static void main(String[] args) throws Exception {
new TestSetTimes().testTimes();
}
}
| TestSetTimes |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilderTests.java | {
"start": 1123,
"end": 7252
} | class ____ extends BaseAggregationTestCase<CompositeAggregationBuilder> {
private DateHistogramValuesSourceBuilder randomDateHistogramSourceBuilder() {
DateHistogramValuesSourceBuilder histo = new DateHistogramValuesSourceBuilder(randomAlphaOfLengthBetween(5, 10));
if (randomBoolean()) {
histo.field(randomAlphaOfLengthBetween(1, 20));
} else {
histo.script(new Script(randomAlphaOfLengthBetween(10, 20)));
}
if (randomBoolean()) {
histo.calendarInterval(
randomFrom(DateHistogramInterval.days(1), DateHistogramInterval.minutes(1), DateHistogramInterval.weeks(1))
);
} else {
histo.fixedInterval(
randomFrom(
new DateHistogramInterval(randomNonNegativeLong() + "ms"),
DateHistogramInterval.days(10),
DateHistogramInterval.hours(10)
)
);
}
if (randomBoolean()) {
histo.timeZone(randomZone());
}
if (randomBoolean()) {
histo.missingBucket(true);
}
histo.missingOrder(randomFrom(MissingOrder.values()));
return histo;
}
private GeoTileGridValuesSourceBuilder randomGeoTileGridValuesSourceBuilder() {
GeoTileGridValuesSourceBuilder geoTile = new GeoTileGridValuesSourceBuilder(randomAlphaOfLengthBetween(5, 10));
if (randomBoolean()) {
geoTile.precision(randomIntBetween(0, GeoTileUtils.MAX_ZOOM));
}
if (randomBoolean()) {
geoTile.geoBoundingBox(GeoBoundingBoxTests.randomBBox());
}
return geoTile;
}
private TermsValuesSourceBuilder randomTermsSourceBuilder() {
TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder(randomAlphaOfLengthBetween(5, 10));
if (randomBoolean()) {
terms.field(randomAlphaOfLengthBetween(1, 20));
} else {
terms.script(new Script(randomAlphaOfLengthBetween(10, 20)));
}
terms.order(randomFrom(SortOrder.values()));
if (randomBoolean()) {
terms.missingBucket(true);
}
terms.missingOrder(randomFrom(MissingOrder.values()));
return terms;
}
private HistogramValuesSourceBuilder randomHistogramSourceBuilder() {
HistogramValuesSourceBuilder histo = new HistogramValuesSourceBuilder(randomAlphaOfLengthBetween(5, 10));
if (randomBoolean()) {
histo.field(randomAlphaOfLengthBetween(1, 20));
} else {
histo.script(new Script(randomAlphaOfLengthBetween(10, 20)));
}
if (randomBoolean()) {
histo.missingBucket(true);
}
histo.missingOrder(randomFrom(MissingOrder.values()));
histo.interval(randomDoubleBetween(Math.nextUp(0), Double.MAX_VALUE, false));
return histo;
}
@Override
protected CompositeAggregationBuilder createTestAggregatorBuilder() {
int numSources = randomIntBetween(1, 10);
List<CompositeValuesSourceBuilder<?>> sources = new ArrayList<>();
for (int i = 0; i < numSources; i++) {
int type = randomIntBetween(0, 3);
switch (type) {
case 0 -> sources.add(randomTermsSourceBuilder());
case 1 -> sources.add(randomDateHistogramSourceBuilder());
case 2 -> sources.add(randomHistogramSourceBuilder());
case 3 -> sources.add(randomGeoTileGridValuesSourceBuilder());
default -> throw new AssertionError("wrong branch");
}
}
return new CompositeAggregationBuilder(randomAlphaOfLength(10), sources);
}
public void testSupportsParallelCollection() {
assertTrue(
new CompositeAggregationBuilder(randomAlphaOfLength(10), Collections.singletonList(randomDateHistogramSourceBuilder()))
.supportsParallelCollection(null)
);
assertTrue(
new CompositeAggregationBuilder(randomAlphaOfLength(10), Collections.singletonList(randomHistogramSourceBuilder()))
.supportsParallelCollection(null)
);
CompositeAggregationBuilder builder = new CompositeAggregationBuilder(
randomAlphaOfLength(10),
Collections.singletonList(randomGeoTileGridValuesSourceBuilder())
);
assertTrue(builder.supportsParallelCollection(null));
builder.subAggregation(new TermsAggregationBuilder("name") {
@Override
public boolean supportsParallelCollection(ToLongFunction<String> fieldCardinalityResolver) {
return false;
}
});
assertFalse(builder.supportsParallelCollection(null));
assertFalse(
new CompositeAggregationBuilder(randomAlphaOfLength(10), Collections.singletonList(new TermsValuesSourceBuilder("name")))
.supportsParallelCollection(field -> -1)
);
assertTrue(
new CompositeAggregationBuilder(randomAlphaOfLength(10), Collections.singletonList(new TermsValuesSourceBuilder("name")))
.supportsParallelCollection(field -> randomIntBetween(0, 50))
);
assertFalse(
new CompositeAggregationBuilder(randomAlphaOfLength(10), Collections.singletonList(new TermsValuesSourceBuilder("name")))
.supportsParallelCollection(field -> randomIntBetween(51, 100))
);
assertFalse(
new CompositeAggregationBuilder(
randomAlphaOfLength(10),
Collections.singletonList(new TermsValuesSourceBuilder("name").script(new Script("id")))
).supportsParallelCollection(field -> randomIntBetween(-1, 100))
);
assertFalse(
new CompositeAggregationBuilder(
randomAlphaOfLength(10),
List.of(randomDateHistogramSourceBuilder(), new TermsValuesSourceBuilder("name"))
).supportsParallelCollection(field -> randomIntBetween(51, 100))
);
}
}
| CompositeAggregationBuilderTests |
java | quarkusio__quarkus | independent-projects/tools/devtools-common/src/main/java/io/quarkus/devtools/project/update/rewrite/operations/UpdateDependencyVersionOperation.java | {
"start": 209,
"end": 1430
} | class ____ implements RewriteOperation {
private final String groupId;
private final String artifactId;
private final String newVersion;
public UpdateDependencyVersionOperation(String groupId, String artifactId, String newVersion) {
this.groupId = groupId;
this.artifactId = artifactId;
this.newVersion = newVersion;
}
@Override
public Map<String, Object> single(BuildTool buildTool) {
switch (buildTool) {
case GRADLE_KOTLIN_DSL:
case GRADLE:
return Map.of("org.openrewrite.gradle.UpgradeDependencyVersion",
Map.of(
"groupId", groupId,
"artifactId", artifactId,
"newVersion", newVersion));
case MAVEN:
return Map.of("org.openrewrite.maven.UpgradeDependencyVersion",
Map.of(
"groupId", groupId,
"artifactId", artifactId,
"newVersion", newVersion));
default:
return Map.of();
}
}
}
| UpdateDependencyVersionOperation |
java | apache__flink | flink-formats/flink-avro/src/test/java/org/apache/flink/formats/avro/typeutils/AvroUnionLogicalSerializerTest.java | {
"start": 1176,
"end": 1939
} | class ____ extends SerializerTestBase<UnionLogicalType> {
@Override
protected TypeSerializer<UnionLogicalType> createSerializer() {
return new AvroSerializer<>(UnionLogicalType.class);
}
@Override
protected int getLength() {
return -1;
}
@Override
protected Class<UnionLogicalType> getTypeClass() {
return UnionLogicalType.class;
}
@Override
protected UnionLogicalType[] getTestData() {
final Random rnd = new Random();
final UnionLogicalType[] data = new UnionLogicalType[20];
for (int i = 0; i < data.length; i++) {
data[i] = new UnionLogicalType(Instant.ofEpochMilli(rnd.nextLong()));
}
return data;
}
}
| AvroUnionLogicalSerializerTest |
java | spring-projects__spring-boot | module/spring-boot-micrometer-metrics/src/main/java/org/springframework/boot/micrometer/metrics/autoconfigure/export/appoptics/AppOpticsMetricsExportAutoConfiguration.java | {
"start": 2384,
"end": 3056
} | class ____ {
private final AppOpticsProperties properties;
AppOpticsMetricsExportAutoConfiguration(AppOpticsProperties properties) {
this.properties = properties;
}
@Bean
@ConditionalOnMissingBean
AppOpticsConfig appOpticsConfig() {
return new AppOpticsPropertiesConfigAdapter(this.properties);
}
@Bean
@ConditionalOnMissingBean
AppOpticsMeterRegistry appOpticsMeterRegistry(AppOpticsConfig config, Clock clock) {
return AppOpticsMeterRegistry.builder(config)
.clock(clock)
.httpClient(
new HttpUrlConnectionSender(this.properties.getConnectTimeout(), this.properties.getReadTimeout()))
.build();
}
}
| AppOpticsMetricsExportAutoConfiguration |
java | apache__camel | core/camel-api/src/main/java/org/apache/camel/spi/ThreadPoolFactory.java | {
"start": 1334,
"end": 2672
} | interface ____ {
/**
* Service factory key.
*/
String FACTORY = "thread-pool-factory";
/**
* Creates a new cached thread pool
* <p/>
* The cached thread pool is a term from the JDK from the method
* {@link java.util.concurrent.Executors#newCachedThreadPool()}.
* <p/>
* Typically, it will have no size limit (this is why it is handled separately)
*
* @param threadFactory factory for creating threads
* @return the created thread pool
*/
ExecutorService newCachedThreadPool(ThreadFactory threadFactory);
/**
* Create a thread pool using the given thread pool profile
*
* @param profile parameters of the thread pool
* @param threadFactory factory for creating threads
* @return the created thread pool
*/
ExecutorService newThreadPool(ThreadPoolProfile profile, ThreadFactory threadFactory);
/**
* Create a scheduled thread pool using the given thread pool profile
*
* @param profile parameters of the thread pool
* @param threadFactory factory for creating threads
* @return the created thread pool
*/
ScheduledExecutorService newScheduledThreadPool(ThreadPoolProfile profile, ThreadFactory threadFactory);
}
| ThreadPoolFactory |
java | reactor__reactor-core | reactor-core/src/test/java/reactor/core/publisher/FluxDelayUntilTest.java | {
"start": 1140,
"end": 7748
} | class ____ {
@Test
public void testFluxEmptyAndPublisherVoid() {
Publisher<Void> voidPublisher = Mono.fromRunnable(() -> { });
StepVerifier.create(Flux.<String>empty().delayUntil(a -> voidPublisher))
.verifyComplete();
}
@Test
public void testFlux1AndPublisherVoid() {
Publisher<Void> voidPublisher = Mono.fromRunnable(() -> { });
StepVerifier.create(Flux.just("foo").delayUntil(a -> voidPublisher))
.expectNext("foo")
.verifyComplete();
}
@Test
public void testFlux2AndPublisherVoid() {
Publisher<Void> voidPublisher = Mono.fromRunnable(() -> { });
StepVerifier.create(Flux.just("foo", "bar").delayUntil(a -> voidPublisher))
.expectNext("foo", "bar")
.verifyComplete();
}
@Test
public void testFlux2DoesntReorderViaDelays() {
StepVerifier.withVirtualTime(() ->
Flux.just(100, 200, 300)
.delayUntil(v -> Mono.delay(Duration.ofMillis(400 - v)))
)
.expectSubscription()
.expectNoEvent(Duration.ofMillis(300))
.expectNext(100)
.expectNoEvent(Duration.ofMillis(200))
.expectNext(200)
.expectNoEvent(Duration.ofMillis(100))
.expectNext(300)
.verifyComplete();
}
@Test
public void triggerSequenceWithDelays() {
StepVerifier.withVirtualTime(() -> Flux.just("foo", "bar")
.delayUntil(a -> Flux.just(1, 2, 3).hide().delayElements(Duration.ofMillis(500))))
.expectSubscription()
.expectNoEvent(Duration.ofMillis(1400))
.thenAwait(Duration.ofMillis(100))
.expectNext("foo")
.expectNoEvent(Duration.ofMillis(1500))
.expectNext("bar")
.verifyComplete();
}
@Test
public void triggerSequenceHasMultipleValuesNotCancelled() {
AtomicBoolean triggerCancelled = new AtomicBoolean();
StepVerifier.create(Flux.just("foo")
.delayUntil(
a -> Flux.just(1, 2, 3).hide()
.doOnCancel(() -> triggerCancelled.set(true))))
.expectNext("foo")
.verifyComplete();
assertThat(triggerCancelled.get()).isFalse();
}
@Test
public void triggerSequenceHasSingleValueNotCancelled() {
AtomicBoolean triggerCancelled = new AtomicBoolean();
StepVerifier.create(Flux.just("foo")
.delayUntil(
a -> Mono.just(1)
.doOnCancel(() -> triggerCancelled.set(true))))
.expectNext("foo")
.verifyComplete();
assertThat(triggerCancelled.get()).isFalse();
}
@Test
public void triggerSequenceDoneFirst() {
StepVerifier.withVirtualTime(() -> Mono.delay(Duration.ofSeconds(2))
.flatMapMany(Flux::just)
.delayUntil(a -> Mono.just("foo")))
.expectSubscription()
.expectNoEvent(Duration.ofSeconds(2))
.expectNext(0L)
.verifyComplete();
}
@Test
public void sourceHasError() {
StepVerifier.create(Flux.<String>error(new IllegalStateException("boom"))
.delayUntil(a -> Mono.just("foo")))
.verifyErrorMessage("boom");
}
@Test
public void triggerHasError() {
StepVerifier.create(Flux.just("foo")
.delayUntil(a -> Mono.<String>error(new IllegalStateException("boom"))))
.verifyErrorMessage("boom");
}
@Test
public void sourceAndTriggerHaveErrorsNotDelayed() {
StepVerifier.create(Flux.<String>error(new IllegalStateException("boom1"))
.delayUntil(a -> Mono.<Integer>error(new IllegalStateException("boom2"))))
.verifyErrorMessage("boom1");
}
@Test
public void testAPIDelayUntil() {
StepVerifier.withVirtualTime(() -> Flux.just("foo")
.delayUntil(a -> Mono.delay(Duration.ofSeconds(2))))
.expectSubscription()
.expectNoEvent(Duration.ofSeconds(2))
.expectNext("foo")
.verifyComplete();
}
@Test
public void testAPIDelayUntilErrorsImmediately() {
IllegalArgumentException boom = new IllegalArgumentException("boom");
StepVerifier.create(Flux.error(boom)
.delayUntil(a -> Mono.delay(Duration.ofSeconds(2))))
.expectErrorMessage("boom")
.verify(Duration.ofMillis(200)); //at least, less than 2s
}
@Test
public void testAPIchainingCumulatesDelaysAfterValueGenerated() {
AtomicInteger generator1Used = new AtomicInteger();
AtomicInteger generator2Used = new AtomicInteger();
Function<String, Mono<Long>> generator1 = a -> {
generator1Used.incrementAndGet();
return Mono.delay(Duration.ofMillis(400));
};
Function<Object, Mono<Long>> generator2 = a -> {
generator2Used.incrementAndGet();
return Mono.delay(Duration.ofMillis(800));
};
StepVerifier.withVirtualTime(() -> Flux.just("foo")
.delayElements(Duration.ofSeconds(3))
.delayUntil(generator1)
.delayUntil(generator2))
.expectSubscription()
.expectNoEvent(Duration.ofMillis(2900))
.then(() -> assertThat(generator1Used.get()).isZero())
.then(() -> assertThat(generator2Used.get()).isZero())
.expectNoEvent(Duration.ofMillis(100))
.then(() -> assertThat(generator1Used).hasValue(1))
.then(() -> assertThat(generator2Used).hasValue(0))
.expectNoEvent(Duration.ofMillis(400))
.then(() -> assertThat(generator2Used).hasValue(1))
.expectNoEvent(Duration.ofMillis(800))
.expectNext("foo")
.verifyComplete();
}
@Test
public void immediateCancel() {
AtomicReference<String> value = new AtomicReference<>();
AtomicReference<Throwable> error = new AtomicReference<>();
Disposable s = Flux.just("foo", "bar")
.delayUntil(v -> Mono.just(1))
.subscribeWith(new LambdaSubscriber<>(value::set, error::set, () -> {}, Subscription::cancel));
assertThat(value.get()).isNull();
assertThat(error.get()).isNull(); //would be a NPE if trigger array wasn't pre-initialized
}
@Test
public void isAlias() {
assertThat(Flux.range(1, 10).delayUntil(a -> Mono.empty()))
.isInstanceOf(FluxConcatMapNoPrefetch.class);
}
}
| FluxDelayUntilTest |
java | apache__flink | flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/dataview/StateMapView.java | {
"start": 1686,
"end": 2830
} | class ____<N, EK, EV> extends MapView<EK, EV> implements StateDataView<N> {
@Override
public Map<EK, EV> getMap() {
final Map<EK, EV> map = new HashMap<>();
try {
entries().forEach(entry -> map.put(entry.getKey(), entry.getValue()));
} catch (Exception e) {
throw new RuntimeException("Unable to collect map.", e);
}
return map;
}
@Override
public void setMap(Map<EK, EV> map) {
clear();
try {
putAll(map);
} catch (TableRuntimeException e) {
throw e;
} catch (Exception e) {
throw new RuntimeException("Unable to replace map.", e);
}
}
/**
* {@link StateMapViewWithKeysNotNull} is a {@link MapView} which implemented using state
* backend which map keys shouldn't be null. This is the default implementation for {@link
* StateMapView}.
*
* @param <N> the type of namespace
* @param <EK> the external type of the {@link MapView} key
* @param <EV> the external type of the {@link MapView} value
*/
private abstract static | StateMapView |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestTests.java | {
"start": 946,
"end": 2451
} | class ____ extends ESTestCase {
public void testSerialization() throws IOException {
SimulatePipelineRequest request = jsonSimulatePipelineRequest(new BytesArray(""));
// Sometimes we set an id
if (randomBoolean()) {
request.setId(randomAlphaOfLengthBetween(1, 10));
}
// Sometimes we explicitly set a boolean (with whatever value)
if (randomBoolean()) {
request.setVerbose(randomBoolean());
}
BytesStreamOutput out = new BytesStreamOutput();
request.writeTo(out);
StreamInput streamInput = out.bytes().streamInput();
SimulatePipelineRequest otherRequest = new SimulatePipelineRequest(streamInput);
assertThat(otherRequest.getId(), equalTo(request.getId()));
assertThat(otherRequest.isVerbose(), equalTo(request.isVerbose()));
}
public void testSerializationWithXContent() throws IOException {
SimulatePipelineRequest request = jsonSimulatePipelineRequest("{}");
assertEquals(XContentType.JSON, request.getXContentType());
BytesStreamOutput output = new BytesStreamOutput();
request.writeTo(output);
StreamInput in = StreamInput.wrap(output.bytes().toBytesRef().bytes);
SimulatePipelineRequest serialized = new SimulatePipelineRequest(in);
assertEquals(XContentType.JSON, serialized.getXContentType());
assertEquals("{}", serialized.getSource().utf8ToString());
}
}
| SimulatePipelineRequestTests |
java | elastic__elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/GetInfluencersActionRequestTests.java | {
"start": 616,
"end": 2284
} | class ____ extends AbstractXContentSerializingTestCase<Request> {
@Override
protected Request doParseInstance(XContentParser parser) {
return GetInfluencersAction.Request.parseRequest(null, parser);
}
@Override
protected Request createTestInstance() {
Request request = new Request(randomAlphaOfLengthBetween(1, 20));
if (randomBoolean()) {
String start = randomBoolean() ? randomAlphaOfLengthBetween(1, 20) : String.valueOf(randomNonNegativeLong());
request.setStart(start);
}
if (randomBoolean()) {
String end = randomBoolean() ? randomAlphaOfLengthBetween(1, 20) : String.valueOf(randomNonNegativeLong());
request.setEnd(end);
}
if (randomBoolean()) {
request.setInfluencerScore(randomDouble());
}
if (randomBoolean()) {
request.setExcludeInterim(randomBoolean());
}
if (randomBoolean()) {
request.setSort(randomAlphaOfLengthBetween(1, 20));
}
if (randomBoolean()) {
request.setDescending(randomBoolean());
}
if (randomBoolean()) {
int from = randomInt(10000);
int size = randomInt(10000);
request.setPageParams(new PageParams(from, size));
}
return request;
}
@Override
protected Request mutateInstance(Request instance) {
return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929
}
@Override
protected Writeable.Reader<Request> instanceReader() {
return Request::new;
}
}
| GetInfluencersActionRequestTests |
java | apache__camel | components/camel-kubernetes/src/test/java/org/apache/camel/component/kubernetes/consumer/integration/events/KubernetesEventsConsumerLabelsIT.java | {
"start": 1630,
"end": 2526
} | class ____ extends KubernetesConsumerTestSupport {
@Test
public void labelsTest() throws Exception {
result.expectedBodiesReceived("Event e4 " + ns2 + " ADDED");
createEvent(ns2, "e1", null);
createEvent(ns2, "e2", Map.of("otherKey", "otherValue"));
createEvent(ns1, "e3", LABELS);
createEvent(ns2, "e4", LABELS);
result.assertIsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
fromF("kubernetes-events://%s?oauthToken=%s&namespace=%s&labelKey=%s&labelValue=%s",
host, authToken, ns2, "testkey", "testvalue")
.process(new KubernetesProcessor())
.to(result);
}
};
}
}
| KubernetesEventsConsumerLabelsIT |
java | apache__avro | lang/java/avro/src/test/java/org/apache/avro/TestDataFileMeta.java | {
"start": 1314,
"end": 2771
} | class ____ {
@TempDir
public File DIR;
@Test
public void useReservedMeta() throws IOException {
try (DataFileWriter<?> w = new DataFileWriter<>(new GenericDatumWriter<>())) {
assertThrows(AvroRuntimeException.class, () -> w.setMeta("avro.foo", "bar"));
}
}
@Test
public void useMeta() throws IOException {
File f = new File(DIR, "testDataFileMeta.avro");
try (DataFileWriter<?> w = new DataFileWriter<>(new GenericDatumWriter<>())) {
w.setMeta("hello", "bar");
w.create(Schema.create(Type.NULL), f);
}
try (DataFileStream<Void> r = new DataFileStream<>(new FileInputStream(f), new GenericDatumReader<>())) {
assertTrue(r.getMetaKeys().contains("hello"));
assertEquals("bar", r.getMetaString("hello"));
}
}
@Test
public void useMetaAfterCreate() throws IOException {
try (DataFileWriter<?> w = new DataFileWriter<>(new GenericDatumWriter<>())) {
w.create(Schema.create(Type.NULL), new ByteArrayOutputStream());
assertThrows(AvroRuntimeException.class, () -> w.setMeta("foo", "bar"));
}
}
@Test
public void blockSizeSetInvalid() {
int exceptions = 0;
for (int i = -1; i < 33; i++) {
// 33 invalid, one valid
try {
new DataFileWriter<>(new GenericDatumWriter<>()).setSyncInterval(i);
} catch (IllegalArgumentException iae) {
exceptions++;
}
}
assertEquals(33, exceptions);
}
}
| TestDataFileMeta |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/inheritance/ManyToOneInheritanceSubTypeTest.java | {
"start": 6862,
"end": 6975
} | class ____ {
@Id
@GeneratedValue
private Integer id;
}
@Entity( name = "UnionA" )
public static | UnionEntity |
java | spring-projects__spring-framework | spring-orm/src/main/java/org/springframework/orm/jpa/EntityManagerRuntimeHints.java | {
"start": 1234,
"end": 3538
} | class ____ implements RuntimeHintsRegistrar {
private static final String HIBERNATE_SESSION_FACTORY_CLASS_NAME = "org.hibernate.SessionFactory";
private static final String ENTITY_MANAGER_FACTORY_CLASS_NAME = "jakarta.persistence.EntityManagerFactory";
// Up to Hibernate 7.0
private static final String QUERY_SQM_IMPL_CLASS_NAME = "org.hibernate.query.sqm.internal.QuerySqmImpl";
// As of Hibernate 7.1
private static final String SQM_QUERY_IMPL_CLASS_NAME = "org.hibernate.query.sqm.internal.SqmQueryImpl";
private static final String NATIVE_QUERY_IMPL_CLASS_NAME = "org.hibernate.query.sql.internal.NativeQueryImpl";
@Override
public void registerHints(RuntimeHints hints, @Nullable ClassLoader classLoader) {
if (ClassUtils.isPresent(HIBERNATE_SESSION_FACTORY_CLASS_NAME, classLoader)) {
hints.proxies().registerJdkProxy(TypeReference.of(HIBERNATE_SESSION_FACTORY_CLASS_NAME),
TypeReference.of(EntityManagerFactoryInfo.class));
hints.proxies().registerJdkProxy(TypeReference.of("org.hibernate.Session"),
TypeReference.of(EntityManagerProxy.class));
}
if (ClassUtils.isPresent(ENTITY_MANAGER_FACTORY_CLASS_NAME, classLoader)) {
hints.reflection().registerType(TypeReference.of(ENTITY_MANAGER_FACTORY_CLASS_NAME), builder -> {
builder.onReachableType(SharedEntityManagerCreator.class).withMethod("getCriteriaBuilder",
Collections.emptyList(), ExecutableMode.INVOKE);
builder.onReachableType(SharedEntityManagerCreator.class).withMethod("getMetamodel",
Collections.emptyList(), ExecutableMode.INVOKE);
});
}
try {
Class<?> clazz = ClassUtils.forName(QUERY_SQM_IMPL_CLASS_NAME, classLoader);
hints.proxies().registerJdkProxy(ClassUtils.getAllInterfacesForClass(clazz, classLoader));
}
catch (ClassNotFoundException ignored) {
}
try {
Class<?> clazz = ClassUtils.forName(SQM_QUERY_IMPL_CLASS_NAME, classLoader);
hints.proxies().registerJdkProxy(ClassUtils.getAllInterfacesForClass(clazz, classLoader));
}
catch (ClassNotFoundException ignored) {
}
try {
Class<?> clazz = ClassUtils.forName(NATIVE_QUERY_IMPL_CLASS_NAME, classLoader);
hints.proxies().registerJdkProxy(ClassUtils.getAllInterfacesForClass(clazz, classLoader));
}
catch (ClassNotFoundException ignored) {
}
}
}
| EntityManagerRuntimeHints |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/context/event/EventListener.java | {
"start": 4421,
"end": 6244
} | class ____ interface
* for all given event classes.
*/
@AliasFor("value")
Class<?>[] classes() default {};
/**
* Spring Expression Language (SpEL) expression used for making the event
* handling conditional.
* <p>The event will be handled if the expression evaluates to boolean
* {@code true} or one of the following strings: {@code "true"}, {@code "on"},
* {@code "yes"}, or {@code "1"}.
* <p>The default expression is {@code ""}, meaning the event is always handled.
* <p>The SpEL expression will be evaluated against a dedicated context that
* provides the following metadata:
* <ul>
* <li>{@code #root.event} or {@code event} for references to the
* {@link ApplicationEvent}</li>
* <li>{@code #root.args} or {@code args} for references to the method
* arguments array</li>
* <li>Method arguments can be accessed by index. For example, the first
* argument can be accessed via {@code #root.args[0]}, {@code args[0]},
* {@code #a0}, or {@code #p0}.</li>
* <li>Method arguments can be accessed by name (with a preceding hash tag)
* if parameter names are available in the compiled byte code.</li>
* </ul>
*/
String condition() default "";
/**
* Whether the event should be handled by default, without any special
* pre-conditions such as an active transaction. Declared here for overriding
* in composed annotations such as {@code TransactionalEventListener}.
* @since 6.2
*/
boolean defaultExecution() default true;
/**
* An optional identifier for the listener, defaulting to the fully-qualified
* signature of the declaring method (for example, "mypackage.MyClass.myMethod()").
* @since 5.3.5
* @see SmartApplicationListener#getListenerId()
* @see ApplicationEventMulticaster#removeApplicationListeners(Predicate)
*/
String id() default "";
}
| or |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/discriminator/JoinedInheritanceEagerTest.java | {
"start": 3724,
"end": 3896
} | class ____ {
@Id
private Long id;
public BaseEntity() {
}
public BaseEntity(Long id) {
this.id = id;
}
}
@Entity(name = "EntityA")
public static | BaseEntity |
java | apache__kafka | tools/src/main/java/org/apache/kafka/tools/AclCommand.java | {
"start": 21551,
"end": 34301
} | class ____ extends CommandDefaultOptions {
private final OptionSpec<String> bootstrapServerOpt;
private final OptionSpec<String> bootstrapControllerOpt;
private final OptionSpec<String> commandConfigOpt;
private final OptionSpec<String> topicOpt;
private final OptionSpecBuilder clusterOpt;
private final OptionSpec<String> groupOpt;
private final OptionSpec<String> transactionalIdOpt;
private final OptionSpecBuilder idempotentOpt;
private final OptionSpec<String> delegationTokenOpt;
private final OptionSpec<PatternType> resourcePatternType;
private final OptionSpecBuilder addOpt;
private final OptionSpecBuilder removeOpt;
private final OptionSpecBuilder listOpt;
private final OptionSpec<String> operationsOpt;
private final OptionSpec<String> allowPrincipalsOpt;
private final OptionSpec<String> denyPrincipalsOpt;
private final OptionSpec<String> listPrincipalsOpt;
private final OptionSpec<String> allowHostsOpt;
private final OptionSpec<String> denyHostsOpt;
private final OptionSpecBuilder producerOpt;
private final OptionSpecBuilder consumerOpt;
private final OptionSpecBuilder forceOpt;
private final OptionSpec<String> userPrincipalOpt;
public AclCommandOptions(String[] args) {
super(args);
bootstrapServerOpt = parser.accepts("bootstrap-server", "A list of host/port pairs to use for establishing the connection to the Kafka cluster." +
" This list should be in the form host1:port1,host2:port2,... This config is required for acl management using admin client API.")
.withRequiredArg()
.describedAs("server to connect to")
.ofType(String.class);
bootstrapControllerOpt = parser.accepts("bootstrap-controller", "A list of host/port pairs to use for establishing the connection to the Kafka cluster." +
" This list should be in the form host1:port1,host2:port2,... This config is required for acl management using admin client API.")
.withRequiredArg()
.describedAs("controller to connect to")
.ofType(String.class);
commandConfigOpt = parser.accepts("command-config", "A property file containing configs to be passed to Admin Client.")
.withOptionalArg()
.describedAs("command-config")
.ofType(String.class);
topicOpt = parser.accepts("topic", "topic to which ACLs should be added or removed. " +
"A value of '*' indicates ACL should apply to all topics.")
.withRequiredArg()
.describedAs("topic")
.ofType(String.class);
clusterOpt = parser.accepts("cluster", "Add/Remove cluster ACLs.");
groupOpt = parser.accepts("group", "Consumer Group to which the ACLs should be added or removed. " +
"A value of '*' indicates the ACLs should apply to all groups.")
.withRequiredArg()
.describedAs("group")
.ofType(String.class);
transactionalIdOpt = parser.accepts("transactional-id", "The transactionalId to which ACLs should " +
"be added or removed. A value of '*' indicates the ACLs should apply to all transactionalIds.")
.withRequiredArg()
.describedAs("transactional-id")
.ofType(String.class);
idempotentOpt = parser.accepts("idempotent", "Enable idempotence for the producer. This should be " +
"used in combination with the --producer option. Note that idempotence is enabled automatically if " +
"the producer is authorized to a particular transactional-id.");
delegationTokenOpt = parser.accepts("delegation-token", "Delegation token to which ACLs should be added or removed. " +
"A value of '*' indicates ACL should apply to all tokens.")
.withRequiredArg()
.describedAs("delegation-token")
.ofType(String.class);
resourcePatternType = parser.accepts("resource-pattern-type", "The type of the resource pattern or pattern filter. " +
"When adding acls, this should be a specific pattern type, e.g. 'literal' or 'prefixed'. " +
"When listing or removing acls, a specific pattern type can be used to list or remove acls from specific resource patterns, " +
"or use the filter values of 'any' or 'match', where 'any' will match any pattern type, but will match the resource name exactly, " +
"where as 'match' will perform pattern matching to list or remove all acls that affect the supplied resource(s). " +
"WARNING: 'match', when used in combination with the '--remove' switch, should be used with care.")
.withRequiredArg()
.ofType(String.class)
.withValuesConvertedBy(new PatternTypeConverter())
.defaultsTo(PatternType.LITERAL);
addOpt = parser.accepts("add", "Indicates you are trying to add ACLs.");
removeOpt = parser.accepts("remove", "Indicates you are trying to remove ACLs.");
listOpt = parser.accepts("list", "List ACLs for the specified resource, use --topic <topic> or --group <group> or --cluster to specify a resource.");
operationsOpt = parser.accepts("operation", "Operation that is being allowed or denied. Valid operation names are: " + NL +
AclEntry.ACL_OPERATIONS.stream().map(o -> "\t" + SecurityUtils.operationName(o)).collect(Collectors.joining(NL)) + NL)
.withRequiredArg()
.ofType(String.class)
.defaultsTo(SecurityUtils.operationName(AclOperation.ALL));
allowPrincipalsOpt = parser.accepts("allow-principal", "principal is in principalType:name format." +
" Note that principalType must be supported by the Authorizer being used." +
" For example, User:'*' is the wild card indicating all users.")
.withRequiredArg()
.describedAs("allow-principal")
.ofType(String.class);
denyPrincipalsOpt = parser.accepts("deny-principal", "principal is in principalType:name format. " +
"By default anyone not added through --allow-principal is denied access. " +
"You only need to use this option as negation to already allowed set. " +
"Note that principalType must be supported by the Authorizer being used. " +
"For example if you wanted to allow access to all users in the system but not test-user you can define an ACL that " +
"allows access to User:'*' and specify --deny-principal=User:test@EXAMPLE.COM. " +
"AND PLEASE REMEMBER DENY RULES TAKES PRECEDENCE OVER ALLOW RULES.")
.withRequiredArg()
.describedAs("deny-principal")
.ofType(String.class);
listPrincipalsOpt = parser.accepts("principal", "List ACLs for the specified principal. principal is in principalType:name format." +
" Note that principalType must be supported by the Authorizer being used. Multiple --principal option can be passed.")
.withOptionalArg()
.describedAs("principal")
.ofType(String.class);
allowHostsOpt = parser.accepts("allow-host", "Host from which principals listed in --allow-principal will have access. " +
"If you have specified --allow-principal then the default for this option will be set to '*' which allows access from all hosts.")
.withRequiredArg()
.describedAs("allow-host")
.ofType(String.class);
denyHostsOpt = parser.accepts("deny-host", "Host from which principals listed in --deny-principal will be denied access. " +
"If you have specified --deny-principal then the default for this option will be set to '*' which denies access from all hosts.")
.withRequiredArg()
.describedAs("deny-host")
.ofType(String.class);
producerOpt = parser.accepts("producer", "Convenience option to add/remove ACLs for producer role. " +
"This will generate ACLs that allows WRITE,DESCRIBE and CREATE on topic.");
consumerOpt = parser.accepts("consumer", "Convenience option to add/remove ACLs for consumer role. " +
"This will generate ACLs that allows READ,DESCRIBE on topic and READ on group.");
forceOpt = parser.accepts("force", "Assume Yes to all queries and do not prompt.");
userPrincipalOpt = parser.accepts("user-principal", "Specifies a user principal as a resource in relation with the operation. For instance " +
"one could grant CreateTokens or DescribeTokens permission on a given user principal.")
.withRequiredArg()
.describedAs("user-principal")
.ofType(String.class);
try {
options = parser.parse(args);
} catch (OptionException e) {
CommandLineUtils.printUsageAndExit(parser, e.getMessage());
}
checkArgs();
}
void checkArgs() {
CommandLineUtils.maybePrintHelpOrVersion(this, "This tool helps to manage acls on kafka.");
if (options.has(bootstrapServerOpt) && options.has(bootstrapControllerOpt)) {
CommandLineUtils.printUsageAndExit(parser, "Only one of --bootstrap-server or --bootstrap-controller must be specified");
}
if (!options.has(bootstrapServerOpt) && !options.has(bootstrapControllerOpt)) {
CommandLineUtils.printUsageAndExit(parser, "One of --bootstrap-server or --bootstrap-controller must be specified");
}
List<AbstractOptionSpec<?>> mutuallyExclusiveOptions = List.of(addOpt, removeOpt, listOpt);
long mutuallyExclusiveOptionsCount = mutuallyExclusiveOptions.stream()
.filter(abstractOptionSpec -> options.has(abstractOptionSpec))
.count();
if (mutuallyExclusiveOptionsCount != 1) {
CommandLineUtils.printUsageAndExit(parser, "Command must include exactly one action: --list, --add, --remove. ");
}
CommandLineUtils.checkInvalidArgs(parser, options, listOpt, producerOpt, consumerOpt, allowHostsOpt, allowPrincipalsOpt, denyHostsOpt, denyPrincipalsOpt);
//when --producer or --consumer is specified , user should not specify operations as they are inferred and we also disallow --deny-principals and --deny-hosts.
CommandLineUtils.checkInvalidArgs(parser, options, producerOpt, operationsOpt, denyPrincipalsOpt, denyHostsOpt);
CommandLineUtils.checkInvalidArgs(parser, options, consumerOpt, operationsOpt, denyPrincipalsOpt, denyHostsOpt);
if (options.has(listPrincipalsOpt) && !options.has(listOpt)) {
CommandLineUtils.printUsageAndExit(parser, "The --principal option is only available if --list is set");
}
if (options.has(producerOpt) && !options.has(topicOpt)) {
CommandLineUtils.printUsageAndExit(parser, "With --producer you must specify a --topic");
}
if (options.has(idempotentOpt) && !options.has(producerOpt)) {
CommandLineUtils.printUsageAndExit(parser, "The --idempotent option is only available if --producer is set");
}
if (options.has(consumerOpt) && (!options.has(topicOpt) || !options.has(groupOpt) || (!options.has(producerOpt) && (options.has(clusterOpt) || options.has(transactionalIdOpt))))) {
CommandLineUtils.printUsageAndExit(parser, "With --consumer you must specify a --topic and a --group and no --cluster or --transactional-id option should be specified.");
}
}
}
static | AclCommandOptions |
java | grpc__grpc-java | services/src/main/java/io/grpc/protobuf/services/BinaryLogProviderImpl.java | {
"start": 958,
"end": 3103
} | class ____ extends BinaryLogProvider {
// avoid using 0 because proto3 long fields default to 0 when unset
private static final AtomicLong counter = new AtomicLong(1);
private final BinlogHelper.Factory factory;
private final BinaryLogSink sink;
public BinaryLogProviderImpl() throws IOException {
this(new TempFileSink(), System.getenv("GRPC_BINARY_LOG_CONFIG"));
}
/**
* Deprecated and will be removed in a future version of gRPC.
*/
@Deprecated
@SuppressWarnings("InlineMeSuggester") // Only called internally; don't care
public BinaryLogProviderImpl(BinaryLogSink sink) throws IOException {
this(sink, System.getenv("GRPC_BINARY_LOG_CONFIG"));
}
/**
* Creates an instance.
* @param sink ownership is transferred to this class.
* @param configStr config string to parse to determine logged methods and msg size limits.
* @throws IOException if initialization failed.
*/
public BinaryLogProviderImpl(BinaryLogSink sink, String configStr) throws IOException {
this.sink = Preconditions.checkNotNull(sink);
try {
factory = new BinlogHelper.FactoryImpl(sink, configStr);
} catch (RuntimeException e) {
sink.close();
// parsing the conf string may throw if it is blank or contains errors
throw new IOException(
"Can not initialize. The env variable GRPC_BINARY_LOG_CONFIG must be valid.", e);
}
}
@Nullable
@Override
public ServerInterceptor getServerInterceptor(String fullMethodName) {
BinlogHelper helperForMethod = factory.getLog(fullMethodName);
if (helperForMethod == null) {
return null;
}
return helperForMethod.getServerInterceptor(counter.getAndIncrement());
}
@Nullable
@Override
public ClientInterceptor getClientInterceptor(
String fullMethodName, CallOptions callOptions) {
BinlogHelper helperForMethod = factory.getLog(fullMethodName);
if (helperForMethod == null) {
return null;
}
return helperForMethod.getClientInterceptor(counter.getAndIncrement());
}
@Override
public void close() throws IOException {
sink.close();
}
}
| BinaryLogProviderImpl |
java | hibernate__hibernate-orm | hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/onetoone/bidirectional/ids/BiMulIdRefEdEntity.java | {
"start": 501,
"end": 2102
} | class ____ {
@Id
private Integer id1;
@Id
private Integer id2;
@Audited
private String data;
@Audited
@OneToOne(mappedBy = "reference")
private BiMulIdRefIngEntity referencing;
public BiMulIdRefEdEntity() {
}
public BiMulIdRefEdEntity(Integer id1, Integer id2, String data) {
this.id1 = id1;
this.id2 = id2;
this.data = data;
}
public Integer getId1() {
return id1;
}
public void setId1(Integer id1) {
this.id1 = id1;
}
public Integer getId2() {
return id2;
}
public void setId2(Integer id2) {
this.id2 = id2;
}
public String getData() {
return data;
}
public void setData(String data) {
this.data = data;
}
public BiMulIdRefIngEntity getReferencing() {
return referencing;
}
public void setReferencing(BiMulIdRefIngEntity referencing) {
this.referencing = referencing;
}
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( !(o instanceof BiMulIdRefEdEntity) ) {
return false;
}
BiMulIdRefEdEntity that = (BiMulIdRefEdEntity) o;
if ( data != null ? !data.equals( that.getData() ) : that.getData() != null ) {
return false;
}
if ( id1 != null ? !id1.equals( that.getId1() ) : that.getId1() != null ) {
return false;
}
if ( id2 != null ? !id2.equals( that.getId2() ) : that.getId2() != null ) {
return false;
}
return true;
}
public int hashCode() {
int result;
result = (id1 != null ? id1.hashCode() : 0);
result = 31 * result + (id2 != null ? id2.hashCode() : 0);
result = 31 * result + (data != null ? data.hashCode() : 0);
return result;
}
}
| BiMulIdRefEdEntity |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/store/StateStoreUtils.java | {
"start": 3056,
"end": 3165
} | class ____ the data record.
* @param clazz Class of the data record to check.
* @return Name of the base | of |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java | {
"start": 11384,
"end": 11732
} | class ____ extends BooleanParam {
/**
* Parameter name.
*/
public static final String NAME = HttpFSFileSystem.OVERWRITE_PARAM;
/**
* Constructor.
*/
public OverwriteParam() {
super(NAME, true);
}
}
/**
* Class for owner parameter.
*/
@InterfaceAudience.Private
public static | OverwriteParam |
java | apache__camel | components/camel-zipfile/src/test/java/org/apache/camel/processor/aggregate/zipfile/ZipSplitAggregateTransactedIssueTest.java | {
"start": 1545,
"end": 4622
} | class ____ extends CamelTestSupport {
private static final Logger LOG = LoggerFactory.getLogger(ZipSplitAggregateTransactedIssueTest.class);
String zipArchiveWithTwoFiles
= "UEsDBBQAAAAIAFlrtFDFAfecUAAAAB4BAAALAAAAT3JkZXJzMS54bWyzyS9KSS0qtuPl4oQwQSxOm8wUOxMb/cwUCK+gKD+lNLkEzOG0yUvMTbWDCik42uiD+WB1+kgKbfThxqEZbEqUwU6kG2xGlMHOhA2GsortAFBLAwQUAAAACABBW9hQgBf0tVgAAAAqAQAACwAAAE9yZGVyczIueG1ss8kvSkktKrbj5eKEMEEsTpvMFDtDQ0Mb/cwUCL+gKD+lNLkEzOG0yUvMTbWDCimA1YFFwCr1kZTa6MONRDPcyMiIKMPB6kg13NjYmCjDweoIGQ5lFdsBAFBLAQIfABQAAAAIAFlrtFDFAfecUAAAAB4BAAALACQAAAAAAAAAIAAAAAAAAABPcmRlcnMxLnhtbAoAIAAAAAAAAQAYAAD57I2ZLtYBg97kuHn02gEA+eyNmS7WAVBLAQIfABQAAAAIAEFb2FCAF/S1WAAAACoBAAALACQAAAAAAAAAIAAAAHkAAABPcmRlcnMyLnhtbAoAIAAAAAAAAQAYAAAxPXoJStYBjn3iuHn02gEAMT16CUrWAVBLBQYAAAAAAgACALoAAAD6AAAAAAA=";
@Test
public void testIfAllSplitsAggregated() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
template.sendBody("direct:start", "");
mock.assertIsSatisfied();
// Check if second file was processed in aggregate() method of AggregationStrategy
assertEquals("Orders2.xml", mock.getExchanges().get(0).getMessage().getHeader("CamelFileName", String.class));
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
JdbcDataSource dataSource = new JdbcDataSource();
dataSource.setURL("jdbc:h2:mem:testdb;DB_CLOSE_DELAY=-1");
dataSource.setUser("sa");
dataSource.setPassword("");
DataSourceTransactionManager txManager = new DataSourceTransactionManager(dataSource);
TransactionTemplate transactionTemplate = new TransactionTemplate(txManager);
transactionTemplate.setPropagationBehaviorName("PROPAGATION_REQUIRED");
transactionTemplate.setIsolationLevelName("ISOLATION_READ_COMMITTED");
transactionTemplate.setTimeout(1800);
SpringTransactionPolicy springTransactionPolicy = new SpringTransactionPolicy();
springTransactionPolicy.setTransactionManager(txManager);
springTransactionPolicy.setTransactionTemplate(transactionTemplate);
getContext().getRegistry().bind("transacted", springTransactionPolicy);
getContext().getRegistry().bind("zipSplitter", new ZipSplitter());
from("direct:start").streamCache(false)
.transacted("transacted")
.setBody().simple(zipArchiveWithTwoFiles)
.unmarshal().base64()
.split().ref("zipSplitter").aggregationStrategy(new StringAggregationStrategy())
.log("Splitting ${header.CamelFileName}")
.end()
.to("mock:result");
}
};
}
private static | ZipSplitAggregateTransactedIssueTest |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/observable/ObservableConcatWithMaybe.java | {
"start": 1210,
"end": 1661
} | class ____<T> extends AbstractObservableWithUpstream<T, T> {
final MaybeSource<? extends T> other;
public ObservableConcatWithMaybe(Observable<T> source, MaybeSource<? extends T> other) {
super(source);
this.other = other;
}
@Override
protected void subscribeActual(Observer<? super T> observer) {
source.subscribe(new ConcatWithObserver<>(observer, other));
}
static final | ObservableConcatWithMaybe |
java | apache__camel | components/camel-azure/camel-azure-cosmosdb/src/main/java/org/apache/camel/component/azure/cosmosdb/CosmosDbConfigurationOptionsProxy.java | {
"start": 1072,
"end": 1270
} | class ____ {@link CosmosDbConfiguration} and {@link CosmosDbConstants}. Ideally this is responsible to obtain
* the correct configurations options either from configs or exchange headers
*/
public | for |
java | playframework__playframework | core/play/src/main/java/play/Application.java | {
"start": 361,
"end": 2192
} | interface ____ {
/**
* Get the underlying Scala application.
*
* @return the application
* @see Application#asScala() method
* @deprecated Use {@link #asScala()} instead.
*/
@Deprecated
play.api.Application getWrappedApplication();
/**
* Get the application as a Scala application.
*
* @return this application as a Scala application.
* @see play.api.Application
*/
play.api.Application asScala();
/**
* Get the application environment.
*
* @return the environment.
*/
Environment environment();
/**
* Get the application configuration.
*
* @return the configuration
*/
Config config();
/**
* Get the runtime injector for this application. In a runtime dependency injection based
* application, this can be used to obtain components as bound by the DI framework.
*
* @return the injector
*/
Injector injector();
/**
* Get the application path.
*
* @return the application path
*/
default File path() {
return asScala().path();
}
/**
* Get the application classloader.
*
* @return the application classloader
*/
default ClassLoader classloader() {
return asScala().classloader();
}
/**
* Check whether the application is in {@link Mode#DEV} mode.
*
* @return true if the application is in DEV mode
*/
default boolean isDev() {
return asScala().isDev();
}
/**
* Check whether the application is in {@link Mode#PROD} mode.
*
* @return true if the application is in PROD mode
*/
default boolean isProd() {
return asScala().isProd();
}
/**
* Check whether the application is in {@link Mode#TEST} mode.
*
* @return true if the application is in TEST mode
*/
default boolean isTest() {
return asScala().isTest();
}
}
| Application |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/CassandraEndpointBuilderFactory.java | {
"start": 50813,
"end": 51558
} | class ____ implements logic for converting ResultSet
* into message body ALL, ONE, LIMIT_10, LIMIT_100...
*
* The option is a:
* <code>org.apache.camel.component.cassandra.ResultSetConversionStrategy</code> type.
*
* Group: advanced
*
* @param resultSetConversionStrategy the value to set
* @return the dsl builder
*/
default AdvancedCassandraEndpointBuilder resultSetConversionStrategy(org.apache.camel.component.cassandra.ResultSetConversionStrategy resultSetConversionStrategy) {
doSetProperty("resultSetConversionStrategy", resultSetConversionStrategy);
return this;
}
/**
* To use a custom | that |
java | apache__flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/SlideWithSizeAndSlideOnTimeWithAlias.java | {
"start": 1109,
"end": 1667
} | class ____ extends GroupWindow {
private final Expression size;
private final Expression slide;
SlideWithSizeAndSlideOnTimeWithAlias(
Expression alias, Expression timeField, Expression size, Expression slide) {
super(alias, timeField);
this.size = ApiExpressionUtils.unwrapFromApi(size);
this.slide = ApiExpressionUtils.unwrapFromApi(slide);
}
public Expression getSize() {
return size;
}
public Expression getSlide() {
return slide;
}
}
| SlideWithSizeAndSlideOnTimeWithAlias |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/web/servlet/setup/StandaloneMockMvcBuilderTests.java | {
"start": 6495,
"end": 6725
} | class ____ {
@RequestMapping(value="/persons")
public String persons() {
return null;
}
@RequestMapping(value="/forward")
public String forward() {
return "forward:/persons";
}
}
private static | PersonController |
java | apache__flink | flink-tests/src/test/java/org/apache/flink/test/classloading/jar/CustomInputSplitProgram.java | {
"start": 4594,
"end": 5545
} | class ____ implements InputSplitAssigner {
private final List<CustomInputSplit> remainingSplits;
public CustomSplitAssigner(CustomInputSplit[] splits) {
this.remainingSplits = new ArrayList<CustomInputSplit>(Arrays.asList(splits));
}
@Override
public InputSplit getNextInputSplit(String host, int taskId) {
synchronized (this) {
int size = remainingSplits.size();
if (size > 0) {
return remainingSplits.remove(size - 1);
} else {
return null;
}
}
}
@Override
public void returnInputSplit(List<InputSplit> splits, int taskId) {
synchronized (this) {
for (InputSplit split : splits) {
remainingSplits.add((CustomInputSplit) split);
}
}
}
}
}
| CustomSplitAssigner |
java | spring-projects__spring-security | core/src/main/java/org/springframework/security/core/SpringSecurityMessageSource.java | {
"start": 1158,
"end": 1303
} | class ____ only used when the deployment environment has not specified an alternative
* message source.
* </p>
*
* @author Ben Alex
*/
public | is |
java | spring-projects__spring-boot | module/spring-boot-webclient-test/src/test/java/org/springframework/boot/webclient/test/autoconfigure/MockWebServerConfiguration.java | {
"start": 1233,
"end": 1907
} | class ____ implements DisposableBean, WebClientCustomizer {
private final MockWebServer mockWebServer = new MockWebServer();
MockWebServerConfiguration() {
try {
this.mockWebServer.start();
}
catch (IOException ex) {
throw new UncheckedIOException(ex);
}
}
@Override
public void destroy() {
try {
this.mockWebServer.shutdown();
}
catch (IOException ex) {
throw new UncheckedIOException(ex);
}
}
@Override
public void customize(WebClient.Builder webClientBuilder) {
webClientBuilder.baseUrl(this.mockWebServer.url("/").toString());
}
@Bean
MockWebServer mockWebServer() {
return this.mockWebServer;
}
}
| MockWebServerConfiguration |
java | apache__dubbo | dubbo-rpc/dubbo-rpc-api/src/test/java/org/apache/dubbo/rpc/filter/ClassLoaderFilterTest.java | {
"start": 1314,
"end": 2890
} | class ____ {
private ClassLoaderFilter classLoaderFilter = new ClassLoaderFilter();
@Test
void testInvoke() throws Exception {
URL url = URL.valueOf("test://test:11/test?accesslog=true&group=dubbo&version=1.1");
String path = DemoService.class.getResource("/").getPath();
final URLClassLoader cl = new URLClassLoader(new java.net.URL[] {new java.net.URL("file:" + path)}) {
@Override
public Class<?> loadClass(String name) throws ClassNotFoundException {
try {
return findClass(name);
} catch (ClassNotFoundException e) {
return super.loadClass(name);
}
}
};
final Class<?> clazz = cl.loadClass(DemoService.class.getCanonicalName());
Invoker invoker = new MyInvoker(url) {
@Override
public Class getInterface() {
return clazz;
}
@Override
public Result invoke(Invocation invocation) throws RpcException {
Assertions.assertEquals(cl, Thread.currentThread().getContextClassLoader());
return null;
}
};
Invocation invocation = Mockito.mock(Invocation.class);
ServiceModel serviceModel = Mockito.mock(ServiceModel.class);
Mockito.when(serviceModel.getClassLoader()).thenReturn(cl);
Mockito.when(invocation.getServiceModel()).thenReturn(serviceModel);
classLoaderFilter.invoke(invoker, invocation);
}
}
| ClassLoaderFilterTest |
java | elastic__elasticsearch | plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2NameResolver.java | {
"start": 1169,
"end": 1236
} | class ____ implements CustomNameResolver {
private | Ec2NameResolver |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/DoNotCallCheckerTest.java | {
"start": 6255,
"end": 6503
} | interface ____ {
@DoNotCall
public void f();
}
""")
.addSourceLines(
"B.java",
"""
import com.google.errorprone.annotations.DoNotCall;
public | A |
java | apache__thrift | lib/java/src/main/java/org/apache/thrift/protocol/TMap.java | {
"start": 907,
"end": 1320
} | class ____ {
public TMap() {
this(TType.STOP, TType.STOP, 0);
}
public TMap(byte k, byte v, int s) {
keyType = k;
valueType = v;
size = s;
}
public final byte keyType;
public final byte valueType;
public final int size;
public byte getKeyType() {
return keyType;
}
public byte getValueType() {
return valueType;
}
public int getSize() {
return size;
}
}
| TMap |
java | hibernate__hibernate-orm | hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/naming/VersionsJoinTableRangeTestEntity.java | {
"start": 556,
"end": 2045
} | class ____ extends
VersionsJoinTableRangeTestEntitySuperClass {
@Column(name = "val")
private String value;
/**
* Default constructor
*/
public VersionsJoinTableRangeTestEntity() {
super();
}
/**
* @return the value
*/
public String getValue() {
return value;
}
/**
* @param value the value to set
*/
public void setValue(String value) {
this.value = value;
}
@Override
public int hashCode() {
final int prime = 31;
int result = super.hashCode();
result = prime * result + ((value == null) ? 0 : value.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if ( this == obj ) {
return true;
}
if ( !super.equals( obj ) ) {
return false;
}
if ( getClass() != obj.getClass() ) {
return false;
}
VersionsJoinTableRangeTestEntity other = (VersionsJoinTableRangeTestEntity) obj;
if ( value == null ) {
if ( other.value != null ) {
return false;
}
}
else if ( !value.equals( other.value ) ) {
return false;
}
return true;
}
@Override
public String toString() {
StringBuilder output = new StringBuilder();
output.append( "VersionsJoinTableRangeComponentTestEntity {" );
output.append( " id = \"" ).append( getId() ).append( "\", " );
output.append( " genericValue = \"" ).append( getGenericValue() ).append(
"\", "
);
output.append( " value = \"" ).append( this.value ).append( "\"}" );
return output.toString();
}
}
| VersionsJoinTableRangeTestEntity |
java | spring-projects__spring-boot | module/spring-boot-cloudfoundry/src/main/java/org/springframework/boot/cloudfoundry/autoconfigure/actuate/endpoint/servlet/CloudFoundryActuatorAutoConfiguration.java | {
"start": 9121,
"end": 10377
} | class ____ {
private static final int FILTER_CHAIN_ORDER = -1;
@Bean
@Order(FILTER_CHAIN_ORDER)
SecurityFilterChain cloudFoundrySecurityFilterChain(HttpSecurity http,
CloudFoundryWebEndpointServletHandlerMapping handlerMapping) {
RequestMatcher cloudFoundryRequest = getRequestMatcher(handlerMapping);
http.csrf((csrf) -> csrf.ignoringRequestMatchers(cloudFoundryRequest));
http.securityMatchers((matches) -> matches.requestMatchers(cloudFoundryRequest))
.authorizeHttpRequests((authorize) -> authorize.anyRequest().permitAll());
return http.build();
}
private RequestMatcher getRequestMatcher(CloudFoundryWebEndpointServletHandlerMapping handlerMapping) {
PathMappedEndpoints endpoints = new PathMappedEndpoints(BASE_PATH, handlerMapping::getAllEndpoints);
List<RequestMatcher> matchers = new ArrayList<>();
endpoints.getAllPaths().forEach((path) -> matchers.add(pathMatcher(path + "/**")));
matchers.add(pathMatcher(BASE_PATH));
matchers.add(pathMatcher(BASE_PATH + "/"));
return new OrRequestMatcher(matchers);
}
private PathPatternRequestMatcher pathMatcher(String path) {
return PathPatternRequestMatcher.withDefaults().matcher(path);
}
}
}
| IgnoredCloudFoundryPathsWebSecurityConfiguration |
java | assertj__assertj-core | assertj-core/src/main/java/org/assertj/core/api/recursive/assertion/RecursiveAssertionConfiguration.java | {
"start": 17687,
"end": 18988
} | class ____ {
* public int primitiveField;
* public String objectField;
* } </code></pre>
* <p>
* By default, the assertion being applied recursively is applied to <code>primitiveField</code> and to
* <code>objectField</code>. If ignoring primitives it set to true, the assertion will only be applied to <code>objectField</code>.
* <p>
* If you elect to assert over primitives then it is your own responsibility as a developer to ensure that your
* {@link Predicate} can handle (boxed) primitive arguments.</p>
*
* @param ignorePrimitiveFields <code>true</code> to avoid asserting over primitives, <code>false</code> to enable.
* @return This builder.
*/
public Builder withIgnorePrimitiveFields(final boolean ignorePrimitiveFields) {
this.ignorePrimitiveFields = ignorePrimitiveFields;
return this;
}
/**
* <p>Choose whether or not, while recursively applying a {@link Predicate} to an object tree, the recursion will dive into
* types defined in the Java Class Library. That is to say, whether or not to recurse into objects whose classes are
* declared in a package starting with java.* or javax.* .</p>
* <p>Consider the following example:</p>
* <pre><code style='java'> | Example |
java | apache__flink | flink-streaming-java/src/test/java/org/apache/flink/streaming/runtime/operators/windowing/ContinuousProcessingTimeTriggerTest.java | {
"start": 3282,
"end": 14045
} | class ____
implements WindowFunction<Integer, WindowedInteger, Byte, TimeWindow> {
@Override
public void apply(
Byte key,
TimeWindow window,
Iterable<Integer> input,
Collector<WindowedInteger> out)
throws Exception {
int sum =
StreamSupport.stream(input.spliterator(), false)
.mapToInt(Integer::intValue)
.sum();
out.collect(new WindowedInteger(window, sum));
}
}
/** Verify ContinuousProcessingTimeTrigger fire. */
@Test
void testProcessingTimeWindowFiring() throws Exception {
ContinuousProcessingTimeTrigger<TimeWindow> trigger =
ContinuousProcessingTimeTrigger.of(Duration.ofMillis(5));
assertThat(trigger.canMerge()).isTrue();
ListStateDescriptor<Integer> stateDesc =
new ListStateDescriptor<>(
"window-contents",
BasicTypeInfo.INT_TYPE_INFO.createSerializer(new SerializerConfigImpl()));
WindowOperatorFactory<Byte, Integer, Iterable<Integer>, WindowedInteger, TimeWindow>
operator =
new WindowOperatorFactory<>(
TumblingProcessingTimeWindows.of(Duration.ofMillis(10)),
new TimeWindow.Serializer(),
new NullByteKeySelector<>(),
BasicTypeInfo.BYTE_TYPE_INFO.createSerializer(
new SerializerConfigImpl()),
stateDesc,
new InternalIterableWindowFunction<>(
new IntegerSumWindowFunction()),
trigger,
0,
null);
KeyedOneInputStreamOperatorTestHarness<Byte, Integer, WindowedInteger> testHarness =
new KeyedOneInputStreamOperatorTestHarness<>(
operator, operator.getKeySelector(), BasicTypeInfo.BYTE_TYPE_INFO);
ArrayDeque<Object> expectedOutput = new ArrayDeque<>();
testHarness.open();
// window [0, 10)
testHarness.getProcessingTimeService().setCurrentTime(0);
testHarness.processElement(1, NO_TIMESTAMP);
// window [0, 10)
testHarness.getProcessingTimeService().setCurrentTime(2);
testHarness.processElement(2, NO_TIMESTAMP);
// Fire window [0, 10), value is 1+2=3.
testHarness.getProcessingTimeService().setCurrentTime(5);
expectedOutput.add(new StreamRecord<>(new WindowedInteger(new TimeWindow(0, 10), 3), 9));
TestHarnessUtil.assertOutputEquals(
"Output mismatch", expectedOutput, testHarness.getOutput());
// window [0, 10)
testHarness.getProcessingTimeService().setCurrentTime(7);
testHarness.processElement(3, NO_TIMESTAMP);
// Fire window [0, 10), value is 3+3=6.
testHarness.getProcessingTimeService().setCurrentTime(9);
expectedOutput.add(new StreamRecord<>(new WindowedInteger(new TimeWindow(0, 10), 6), 9));
TestHarnessUtil.assertOutputEquals(
"Output mismatch", expectedOutput, testHarness.getOutput());
// window [10, 20)
testHarness.getProcessingTimeService().setCurrentTime(10);
testHarness.processElement(3, NO_TIMESTAMP);
// Fire window [10, 20), value is 3.
testHarness.getProcessingTimeService().setCurrentTime(15);
expectedOutput.add(new StreamRecord<>(new WindowedInteger(new TimeWindow(10, 20), 3), 19));
TestHarnessUtil.assertOutputEquals(
"Output mismatch", expectedOutput, testHarness.getOutput());
// window [10, 20)
testHarness.getProcessingTimeService().setCurrentTime(18);
testHarness.processElement(3, NO_TIMESTAMP);
// Fire window [10, 20), value is 3+3=6.
testHarness.getProcessingTimeService().setCurrentTime(20);
expectedOutput.add(new StreamRecord<>(new WindowedInteger(new TimeWindow(10, 20), 6), 19));
TestHarnessUtil.assertOutputEquals(
"Output mismatch", expectedOutput, testHarness.getOutput());
}
@Test
public void testEventTimeWindowFiring() throws Exception {
ContinuousProcessingTimeTrigger<TimeWindow> trigger =
ContinuousProcessingTimeTrigger.of(Duration.ofMillis(5));
ListStateDescriptor<Integer> stateDesc =
new ListStateDescriptor<>(
"window-contents",
BasicTypeInfo.INT_TYPE_INFO.createSerializer(new SerializerConfigImpl()));
WindowOperatorFactory<Byte, Integer, Iterable<Integer>, WindowedInteger, TimeWindow>
operator =
new WindowOperatorFactory<>(
TumblingEventTimeWindows.of(Duration.ofMillis(10)),
new TimeWindow.Serializer(),
new NullByteKeySelector<>(),
BasicTypeInfo.BYTE_TYPE_INFO.createSerializer(
new SerializerConfigImpl()),
stateDesc,
new InternalIterableWindowFunction<>(
new IntegerSumWindowFunction()),
trigger,
0,
null);
KeyedOneInputStreamOperatorTestHarness<Byte, Integer, WindowedInteger> testHarness =
new KeyedOneInputStreamOperatorTestHarness<>(
operator, operator.getKeySelector(), BasicTypeInfo.BYTE_TYPE_INFO);
ArrayDeque<Object> expectedOutput = new ArrayDeque<>();
testHarness.open();
testHarness.getProcessingTimeService().setCurrentTime(0);
// event time window [0, 10)
testHarness.processElement(1, 1);
testHarness.processElement(2, 3);
testHarness.processElement(3, 7);
// Fire window [0, 10), value is 1+2+3=6.
testHarness.getProcessingTimeService().setCurrentTime(5);
expectedOutput.add(new StreamRecord<>(new WindowedInteger(new TimeWindow(0, 10), 6), 9));
TestHarnessUtil.assertOutputEquals(
"Output mismatch", expectedOutput, testHarness.getOutput());
testHarness.processElement(3, 8);
// Fire window [0, 10), value is 1+2+3+3=9.
testHarness.getProcessingTimeService().setCurrentTime(10);
expectedOutput.add(new StreamRecord<>(new WindowedInteger(new TimeWindow(0, 10), 9), 9));
TestHarnessUtil.assertOutputEquals(
"Output mismatch", expectedOutput, testHarness.getOutput());
// event time window [10, 20)
testHarness.processElement(3, 12);
// Fire window [10, 20), value is 3.
testHarness.getProcessingTimeService().setCurrentTime(15);
expectedOutput.add(new StreamRecord<>(new WindowedInteger(new TimeWindow(10, 20), 3), 19));
TestHarnessUtil.assertOutputEquals(
"Output mismatch", expectedOutput, testHarness.getOutput());
}
@Test
void testMergingWindows() throws Exception {
ContinuousProcessingTimeTrigger<TimeWindow> trigger =
ContinuousProcessingTimeTrigger.of(Duration.ofMillis(5));
assertThat(trigger.canMerge()).isTrue();
ListStateDescriptor<Integer> stateDesc =
new ListStateDescriptor<>(
"window-contents",
BasicTypeInfo.INT_TYPE_INFO.createSerializer(new SerializerConfigImpl()));
WindowOperatorFactory<Byte, Integer, Iterable<Integer>, WindowedInteger, TimeWindow>
operator =
new WindowOperatorFactory<>(
ProcessingTimeSessionWindows.withGap(Duration.ofMillis(10)),
new TimeWindow.Serializer(),
new NullByteKeySelector<>(),
BasicTypeInfo.BYTE_TYPE_INFO.createSerializer(
new SerializerConfigImpl()),
stateDesc,
new InternalIterableWindowFunction<>(
new IntegerSumWindowFunction()),
trigger,
0,
null);
KeyedOneInputStreamOperatorTestHarness<Byte, Integer, WindowedInteger> testHarness =
new KeyedOneInputStreamOperatorTestHarness<>(
operator, operator.getKeySelector(), BasicTypeInfo.BYTE_TYPE_INFO);
ArrayDeque<Object> expectedOutput = new ArrayDeque<>();
testHarness.open();
// window [0, 10)
testHarness.getProcessingTimeService().setCurrentTime(0);
testHarness.processElement(1, NO_TIMESTAMP);
// window [2, 12) ==> [0, 12)
testHarness.getProcessingTimeService().setCurrentTime(2);
testHarness.processElement(2, NO_TIMESTAMP);
// Merged timer should still fire.
testHarness.getProcessingTimeService().setCurrentTime(5);
expectedOutput.add(new StreamRecord<>(new WindowedInteger(new TimeWindow(0, 12), 3), 11));
TestHarnessUtil.assertOutputEquals(
"Output mismatch", expectedOutput, testHarness.getOutput());
// Merged window should work as normal.
testHarness.getProcessingTimeService().setCurrentTime(9);
TestHarnessUtil.assertOutputEquals(
"Output mismatch", expectedOutput, testHarness.getOutput());
testHarness.getProcessingTimeService().setCurrentTime(10);
expectedOutput.add(new StreamRecord<>(new WindowedInteger(new TimeWindow(0, 12), 3), 11));
TestHarnessUtil.assertOutputEquals(
"Output mismatch", expectedOutput, testHarness.getOutput());
// Firing on time.
testHarness.getProcessingTimeService().setCurrentTime(15);
expectedOutput.add(new StreamRecord<>(new WindowedInteger(new TimeWindow(0, 12), 3), 11));
TestHarnessUtil.assertOutputEquals(
"Output mismatch", expectedOutput, testHarness.getOutput());
// Window is dropped already.
testHarness.getProcessingTimeService().setCurrentTime(100);
TestHarnessUtil.assertOutputEquals(
"Output mismatch", expectedOutput, testHarness.getOutput());
}
}
| IntegerSumWindowFunction |
java | google__guava | android/guava-tests/test/com/google/common/primitives/ImmutableLongArrayTest.java | {
"start": 2070,
"end": 6696
} | class ____ extends TestCase {
// Test all creation paths very lazily: by assuming asList() works
public void testOf0() {
assertThat(ImmutableLongArray.of().asList()).isEmpty();
}
public void testOf1() {
assertThat(ImmutableLongArray.of(0).asList()).containsExactly(0L);
}
public void testOf2() {
assertThat(ImmutableLongArray.of(0, 1).asList()).containsExactly(0L, 1L).inOrder();
}
public void testOf3() {
assertThat(ImmutableLongArray.of(0, 1, 3).asList()).containsExactly(0L, 1L, 3L).inOrder();
}
public void testOf4() {
assertThat(ImmutableLongArray.of(0, 1, 3, 6).asList())
.containsExactly(0L, 1L, 3L, 6L)
.inOrder();
}
public void testOf5() {
assertThat(ImmutableLongArray.of(0, 1, 3, 6, 10).asList())
.containsExactly(0L, 1L, 3L, 6L, 10L)
.inOrder();
}
public void testOf6() {
assertThat(ImmutableLongArray.of(0, 1, 3, 6, 10, 15).asList())
.containsExactly(0L, 1L, 3L, 6L, 10L, 15L)
.inOrder();
}
public void testOf7() {
assertThat(ImmutableLongArray.of(0, 1, 3, 6, 10, 15, 21).asList())
.containsExactly(0L, 1L, 3L, 6L, 10L, 15L, 21L)
.inOrder();
}
public void testCopyOf_array_empty() {
/*
* We don't guarantee the same-as property, so we aren't obligated to test it. However, it's
* useful in testing - when two things are the same then one can't have bugs the other doesn't.
*/
assertThat(ImmutableLongArray.copyOf(new long[0])).isSameInstanceAs(ImmutableLongArray.of());
}
public void testCopyOf_array_nonempty() {
long[] array = new long[] {0, 1, 3};
ImmutableLongArray iia = ImmutableLongArray.copyOf(array);
array[2] = 2;
assertThat(iia.asList()).containsExactly(0L, 1L, 3L).inOrder();
}
public void testCopyOf_iterable_notCollection_empty() {
Iterable<Long> iterable = iterable(Collections.<Long>emptySet());
assertThat(ImmutableLongArray.copyOf(iterable)).isSameInstanceAs(ImmutableLongArray.of());
}
public void testCopyOf_iterable_notCollection_nonempty() {
List<Long> list = Arrays.asList(0L, 1L, 3L);
ImmutableLongArray iia = ImmutableLongArray.copyOf(iterable(list));
list.set(2, 2L);
assertThat(iia.asList()).containsExactly(0L, 1L, 3L).inOrder();
}
public void testCopyOf_iterable_collection_empty() {
Iterable<Long> iterable = Collections.emptySet();
assertThat(ImmutableLongArray.copyOf(iterable)).isSameInstanceAs(ImmutableLongArray.of());
}
public void testCopyOf_iterable_collection_nonempty() {
List<Long> list = Arrays.asList(0L, 1L, 3L);
ImmutableLongArray iia = ImmutableLongArray.copyOf((Iterable<Long>) list);
list.set(2, 2L);
assertThat(iia.asList()).containsExactly(0L, 1L, 3L).inOrder();
}
public void testCopyOf_collection_empty() {
Collection<Long> iterable = Collections.emptySet();
assertThat(ImmutableLongArray.copyOf(iterable)).isSameInstanceAs(ImmutableLongArray.of());
}
public void testCopyOf_collection_nonempty() {
List<Long> list = Arrays.asList(0L, 1L, 3L);
ImmutableLongArray iia = ImmutableLongArray.copyOf(list);
list.set(2, 2L);
assertThat(iia.asList()).containsExactly(0L, 1L, 3L).inOrder();
}
public void testCopyOf_stream() {
assertThat(ImmutableLongArray.copyOf(LongStream.empty()))
.isSameInstanceAs(ImmutableLongArray.of());
assertThat(ImmutableLongArray.copyOf(LongStream.of(0, 1, 3)).asList())
.containsExactly(0L, 1L, 3L)
.inOrder();
}
public void testBuilder_presize_zero() {
ImmutableLongArray.Builder builder = ImmutableLongArray.builder(0);
builder.add(5L);
ImmutableLongArray array = builder.build();
assertThat(array.asList()).containsExactly(5L);
}
public void testBuilder_presize_negative() {
assertThrows(IllegalArgumentException.class, () -> ImmutableLongArray.builder(-1));
}
/**
* If there's a bug in builder growth, we wouldn't know how to expose it. So, brute force the hell
* out of it for a while and see what happens.
*/
public void testBuilder_bruteForce() {
for (int i = 0; i < reduceIterationsIfGwt(100); i++) {
ImmutableLongArray.Builder builder = ImmutableLongArray.builder(random.nextInt(20));
AtomicLong counter = new AtomicLong(0);
while (counter.get() < 1000) {
BuilderOp op = BuilderOp.randomOp();
op.doIt(builder, counter);
}
ImmutableLongArray iia = builder.build();
for (int j = 0; j < iia.length(); j++) {
assertThat(iia.get(j)).isEqualTo((long) j);
}
}
}
private | ImmutableLongArrayTest |
java | elastic__elasticsearch | server/src/internalClusterTest/java/org/elasticsearch/action/bulk/WriteAckDelayIT.java | {
"start": 918,
"end": 2885
} | class ____ extends ESIntegTestCase {
@Override
protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) {
Settings.Builder settings = Settings.builder().put(super.nodeSettings(nodeOrdinal, otherSettings));
settings.put(WriteAckDelay.WRITE_ACK_DELAY_INTERVAL.getKey(), TimeValue.timeValueMillis(50));
settings.put(WriteAckDelay.WRITE_ACK_DELAY_RANDOMNESS_BOUND.getKey(), TimeValue.timeValueMillis(20));
return settings.build();
}
public void testIndexWithWriteDelayEnabled() throws Exception {
createIndex("test");
int numOfDocs = randomIntBetween(100, 400);
logger.info("indexing [{}] docs", numOfDocs);
List<IndexRequestBuilder> builders = new ArrayList<>(numOfDocs);
for (int j = 0; j < numOfDocs; j++) {
builders.add(prepareIndex("test").setSource("field", "value_" + j));
}
indexRandom(true, builders);
logger.info("verifying indexed content");
int numOfChecks = randomIntBetween(8, 12);
for (int j = 0; j < numOfChecks; j++) {
try {
logger.debug("running search");
assertResponse(prepareSearch("test"), response -> {
if (response.getHits().getTotalHits().value() != numOfDocs) {
final String message = "Count is "
+ response.getHits().getTotalHits().value()
+ " but "
+ numOfDocs
+ " was expected. "
+ ElasticsearchAssertions.formatShardStatus(response);
logger.error("{}. search response: \n{}", message, response);
fail(message);
}
});
} catch (Exception e) {
logger.error("search failed", e);
throw e;
}
}
}
}
| WriteAckDelayIT |
java | apache__flink | flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/operators/window/TimeWindow.java | {
"start": 1346,
"end": 4575
} | class ____ extends Window {
private final long start;
private final long end;
public TimeWindow(long start, long end) {
this.start = start;
this.end = end;
}
/**
* Gets the starting timestamp of the window. This is the first timestamp that belongs to this
* window.
*
* @return The starting timestamp of this window.
*/
public long getStart() {
return start;
}
/**
* Gets the end timestamp of this window. The end timestamp is exclusive, meaning it is the
* first timestamp that does not belong to this window any more.
*
* @return The exclusive end timestamp of this window.
*/
public long getEnd() {
return end;
}
/**
* Gets the largest timestamp that still belongs to this window.
*
* <p>This timestamp is identical to {@code getEnd() - 1}.
*
* @return The largest timestamp that still belongs to this window.
* @see #getEnd()
*/
@Override
public long maxTimestamp() {
return end - 1;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
TimeWindow window = (TimeWindow) o;
return end == window.end && start == window.start;
}
@Override
public int hashCode() {
// inspired from Apache BEAM
// The end values are themselves likely to be arithmetic sequence, which
// is a poor distribution to use for a hashtable, so we
// add a highly non-linear transformation.
return (int) (start + modInverse((int) (end << 1) + 1));
}
/** Compute the inverse of (odd) x mod 2^32. */
private int modInverse(int x) {
// Cube gives inverse mod 2^4, as x^4 == 1 (mod 2^4) for all odd x.
int inverse = x * x * x;
// Newton iteration doubles correct bits at each step.
inverse *= 2 - x * inverse;
inverse *= 2 - x * inverse;
inverse *= 2 - x * inverse;
return inverse;
}
@Override
public String toString() {
return "TimeWindow{" + "start=" + start + ", end=" + end + '}';
}
/** Returns {@code true} if this window intersects the given window. */
public boolean intersects(TimeWindow other) {
return this.start <= other.end && this.end >= other.start;
}
/** Returns the minimal window covers both this window and the given window. */
public TimeWindow cover(TimeWindow other) {
return new TimeWindow(Math.min(start, other.start), Math.max(end, other.end));
}
@Override
public int compareTo(Window o) {
TimeWindow that = (TimeWindow) o;
if (this.start == that.start) {
return Long.compare(this.end, that.end);
} else {
return Long.compare(this.start, that.start);
}
}
// ------------------------------------------------------------------------
// Serializer
// ------------------------------------------------------------------------
/** The serializer used to write the TimeWindow type. */
public static | TimeWindow |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/jsontype/vld/BasicPTVKnownTypesTest.java | {
"start": 848,
"end": 2289
} | class ____ {
public int x;
}
public void testWithJDKBasicsOk() throws Exception
{
Object[] input = new Object[] {
"test", 42, new java.net.URL("http://localhost"),
java.util.UUID.nameUUIDFromBytes("abc".getBytes()),
new Object[] { }
};
String json = DEFAULTING_MAPPER.writeValueAsString(input);
Object result = DEFAULTING_MAPPER.readValue(json, Object.class);
assertEquals(Object[].class, result.getClass());
// but then non-ok case:
json = DEFAULTING_MAPPER.writeValueAsString(new Object[] {
new Dangerous()
});
try {
DEFAULTING_MAPPER.readValue(json, Object.class);
fail("Should not pass");
} catch (InvalidTypeIdException e) {
verifyException(e, "Could not resolve type id 'tools.jackson.");
verifyException(e, "as a subtype of");
}
// and another one within array
json = DEFAULTING_MAPPER.writeValueAsString(new Object[] {
new Dangerous[] { new Dangerous() }
});
try {
DEFAULTING_MAPPER.readValue(json, Object.class);
fail("Should not pass");
} catch (InvalidTypeIdException e) {
verifyException(e, "Could not resolve type id '[Ltools.jackson.");
verifyException(e, "as a subtype of");
}
}
}
| Dangerous |
java | apache__flink | flink-connectors/flink-connector-files/src/main/java/org/apache/flink/connector/file/src/impl/FileSourceReader.java | {
"start": 1552,
"end": 2813
} | class ____<T, SplitT extends FileSourceSplit>
extends SingleThreadMultiplexSourceReaderBase<
RecordAndPosition<T>, T, SplitT, FileSourceSplitState<SplitT>> {
public FileSourceReader(
SourceReaderContext readerContext,
BulkFormat<T, SplitT> readerFormat,
Configuration config) {
super(
() -> new FileSourceSplitReader<>(config, readerFormat),
new FileSourceRecordEmitter<>(),
config,
readerContext);
}
@Override
public void start() {
// we request a split only if we did not get splits during the checkpoint restore
if (getNumberOfCurrentlyAssignedSplits() == 0) {
context.sendSplitRequest();
}
}
@Override
protected void onSplitFinished(Map<String, FileSourceSplitState<SplitT>> finishedSplitIds) {
context.sendSplitRequest();
}
@Override
protected FileSourceSplitState<SplitT> initializedState(SplitT split) {
return new FileSourceSplitState<>(split);
}
@Override
protected SplitT toSplitType(String splitId, FileSourceSplitState<SplitT> splitState) {
return splitState.toFileSourceSplit();
}
}
| FileSourceReader |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/event/service/spi/EventListenerRegistry.java | {
"start": 515,
"end": 1125
} | interface ____ extends Service {
<T> EventListenerGroup<T> getEventListenerGroup(EventType<T> eventType);
void addDuplicationStrategy(DuplicationStrategy strategy);
<T> void setListeners(EventType<T> type, Class<? extends T>... listeners);
<T> void setListeners(EventType<T> type, T... listeners);
<T> void appendListeners(EventType<T> type, Class<? extends T>... listeners);
<T> void appendListeners(EventType<T> type, T... listeners);
<T> void prependListeners(EventType<T> type, Class<? extends T>... listeners);
<T> void prependListeners(EventType<T> type, T... listeners);
}
| EventListenerRegistry |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/bytecode/internal/BytecodeProviderInitiator.java | {
"start": 482,
"end": 2281
} | class ____ implements StandardServiceInitiator<BytecodeProvider> {
/**
* @deprecated Register a {@link BytecodeProvider} through Java {@linkplain java.util.ServiceLoader services}.
*/
@Deprecated( forRemoval = true, since = "6.2" )
public static final String BYTECODE_PROVIDER_NAME_BYTEBUDDY = "bytebuddy";
/**
* Singleton access
*/
public static final StandardServiceInitiator<BytecodeProvider> INSTANCE = new BytecodeProviderInitiator();
@Override
public BytecodeProvider initiateService(Map<String, Object> configurationValues, ServiceRegistryImplementor registry) {
final var bytecodeProviders =
registry.requireService( ClassLoaderService.class )
.loadJavaServices( BytecodeProvider.class );
return getBytecodeProvider( bytecodeProviders );
}
@Override
public Class<BytecodeProvider> getServiceInitiated() {
return BytecodeProvider.class;
}
@Internal
public static BytecodeProvider buildDefaultBytecodeProvider() {
// Use BytecodeProvider's ClassLoader to ensure we can find the service
return getBytecodeProvider( ServiceLoader.load(
BytecodeProvider.class,
BytecodeProvider.class.getClassLoader()
) );
}
@Internal
public static BytecodeProvider getBytecodeProvider(Iterable<BytecodeProvider> bytecodeProviders) {
final var iterator = bytecodeProviders.iterator();
if ( !iterator.hasNext() ) {
// If no BytecodeProvider service is available, default to the "no-op" enhancer
return new org.hibernate.bytecode.internal.none.BytecodeProviderImpl();
}
else {
final var provider = iterator.next();
if ( iterator.hasNext() ) {
throw new IllegalStateException(
"Found multiple BytecodeProvider service registrations, cannot determine which one to use" );
}
return provider;
}
}
}
| BytecodeProviderInitiator |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ReconfigurationProtocolServerSideUtils.java | {
"start": 1520,
"end": 3156
} | class ____ {
private ReconfigurationProtocolServerSideUtils() {
}
public static ListReconfigurablePropertiesResponseProto
listReconfigurableProperties(
List<String> reconfigurableProperties) {
ListReconfigurablePropertiesResponseProto.Builder builder =
ListReconfigurablePropertiesResponseProto.newBuilder();
builder.addAllName(reconfigurableProperties);
return builder.build();
}
public static GetReconfigurationStatusResponseProto getReconfigurationStatus(
ReconfigurationTaskStatus status) {
GetReconfigurationStatusResponseProto.Builder builder =
GetReconfigurationStatusResponseProto.newBuilder();
builder.setStartTime(status.getStartTime());
if (status.stopped()) {
builder.setEndTime(status.getEndTime());
assert status.getStatus() != null;
for (Map.Entry<PropertyChange, Optional<String>> result : status
.getStatus().entrySet()) {
GetReconfigurationStatusConfigChangeProto.Builder changeBuilder =
GetReconfigurationStatusConfigChangeProto.newBuilder();
PropertyChange change = result.getKey();
changeBuilder.setName(change.prop);
changeBuilder.setOldValue(change.oldVal != null ? change.oldVal : "");
if (change.newVal != null) {
changeBuilder.setNewValue(change.newVal);
}
if (result.getValue().isPresent()) {
// Get full stack trace.
changeBuilder.setErrorMessage(result.getValue().get());
}
builder.addChanges(changeBuilder);
}
}
return builder.build();
}
} | ReconfigurationProtocolServerSideUtils |
java | mockito__mockito | mockito-core/src/main/java/org/mockito/plugins/AnnotationEngine.java | {
"start": 332,
"end": 771
} | interface ____ an extension point that make possible to use a different annotation engine allowing to extend
* or replace mockito default engine.
*
* <p>
* If you are interested then see implementations or source code of {@link org.mockito.MockitoAnnotations#openMocks(Object)}
*
* <p>This plugin mechanism supersedes the {@link org.mockito.configuration.IMockitoConfiguration}
* in regard of switching mockito components.
*/
public | is |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/ast/SQLOrderBy.java | {
"start": 835,
"end": 3593
} | class ____ extends SQLObjectImpl implements SQLReplaceable {
protected final List<SQLSelectOrderByItem> items = new ArrayList<SQLSelectOrderByItem>();
// for postgres
private boolean siblings;
public SQLOrderBy() {
}
public SQLOrderBy(SQLExpr expr) {
SQLSelectOrderByItem item = new SQLSelectOrderByItem(expr);
addItem(item);
}
public SQLOrderBy(SQLExpr expr, SQLOrderingSpecification type) {
SQLSelectOrderByItem item = new SQLSelectOrderByItem(expr, type);
addItem(item);
}
public void addItem(SQLSelectOrderByItem item) {
if (item != null) {
item.setParent(this);
}
this.items.add(item);
}
public void addItem(SQLExpr item) {
addItem(new SQLSelectOrderByItem(item));
}
public List<SQLSelectOrderByItem> getItems() {
return this.items;
}
public boolean isSiblings() {
return this.siblings;
}
public void setSiblings(boolean siblings) {
this.siblings = siblings;
}
protected void accept0(SQLASTVisitor v) {
if (v.visit(this)) {
for (int i = 0; i < this.items.size(); i++) {
final SQLSelectOrderByItem item = this.items.get(i);
item.accept(v);
}
}
v.endVisit(this);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
SQLOrderBy order = (SQLOrderBy) o;
if (siblings != order.siblings) {
return false;
}
return items.equals(order.items);
}
@Override
public int hashCode() {
int result = items.hashCode();
result = 31 * result + (siblings ? 1 : 0);
return result;
}
public void addItem(SQLExpr expr, SQLOrderingSpecification type) {
SQLSelectOrderByItem item = createItem();
item.setExpr(expr);
item.setType(type);
addItem(item);
}
@Override
public boolean replace(SQLExpr expr, SQLExpr target) {
for (SQLSelectOrderByItem item : items) {
if (item.replace(expr, target)) {
return true;
}
}
return false;
}
protected SQLSelectOrderByItem createItem() {
return new SQLSelectOrderByItem();
}
public SQLOrderBy clone() {
SQLOrderBy x = new SQLOrderBy();
for (SQLSelectOrderByItem item : items) {
SQLSelectOrderByItem item1 = item.clone();
item1.setParent(x);
x.items.add(item1);
}
x.siblings = siblings;
return x;
}
}
| SQLOrderBy |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/api/sync/RedisGeoCommands.java | {
"start": 1272,
"end": 10874
} | interface ____<K, V> {
/**
* Single geo add.
*
* @param key the key of the geo set.
* @param longitude the longitude coordinate according to WGS84.
* @param latitude the latitude coordinate according to WGS84.
* @param member the member to add.
* @return Long integer-reply the number of elements that were added to the set.
*/
Long geoadd(K key, double longitude, double latitude, V member);
/**
* Single geo add.
*
* @param key the key of the geo set.
* @param longitude the longitude coordinate according to WGS84.
* @param latitude the latitude coordinate according to WGS84.
* @param member the member to add.
* @param args additional arguments.
* @return Long integer-reply the number of elements that were added to the set.
* @since 6.1
*/
Long geoadd(K key, double longitude, double latitude, V member, GeoAddArgs args);
/**
* Multi geo add.
*
* @param key the key of the geo set.
* @param lngLatMember triplets of double longitude, double latitude and V member.
* @return Long integer-reply the number of elements that were added to the set.
*/
Long geoadd(K key, Object... lngLatMember);
/**
* Multi geo add.
*
* @param key the key of the geo set.
* @param values {@link io.lettuce.core.GeoValue} values to add.
* @return Long integer-reply the number of elements that were added to the set.
* @since 6.1
*/
Long geoadd(K key, GeoValue<V>... values);
/**
* Multi geo add.
*
* @param key the key of the geo set.
* @param args additional arguments.
* @param lngLatMember triplets of double longitude, double latitude and V member.
* @return Long integer-reply the number of elements that were added to the set.
* @since 6.1
*/
Long geoadd(K key, GeoAddArgs args, Object... lngLatMember);
/**
* Multi geo add.
*
* @param key the key of the geo set.
* @param args additional arguments.
* @param values {@link io.lettuce.core.GeoValue} values to add.
* @return Long integer-reply the number of elements that were added to the set.
* @since 6.1
*/
Long geoadd(K key, GeoAddArgs args, GeoValue<V>... values);
/**
* Retrieve distance between points {@code from} and {@code to}. If one or more elements are missing {@code null} is
* returned. Default in meters by, otherwise according to {@code unit}
*
* @param key the key of the geo set.
* @param from from member.
* @param to to member.
* @param unit distance unit.
* @return distance between points {@code from} and {@code to}. If one or more elements are missing {@code null} is
* returned.
*/
Double geodist(K key, V from, V to, GeoArgs.Unit unit);
/**
* Retrieve Geohash strings representing the position of one or more elements in a sorted set value representing a
* geospatial index.
*
* @param key the key of the geo set.
* @param members the members.
* @return bulk reply Geohash strings in the order of {@code members}. Returns {@code null} if a member is not found.
*/
List<Value<String>> geohash(K key, V... members);
/**
* Get geo coordinates for the {@code members}.
*
* @param key the key of the geo set.
* @param members the members.
* @return a list of {@link GeoCoordinates}s representing the x,y position of each element specified in the arguments. For
* missing elements {@code null} is returned.
*/
List<GeoCoordinates> geopos(K key, V... members);
/**
* Retrieve members selected by distance with the center of {@code longitude} and {@code latitude}.
*
* @param key the key of the geo set.
* @param longitude the longitude coordinate according to WGS84.
* @param latitude the latitude coordinate according to WGS84.
* @param distance radius distance.
* @param unit distance unit.
* @return bulk reply.
*/
Set<V> georadius(K key, double longitude, double latitude, double distance, GeoArgs.Unit unit);
/**
* Retrieve members selected by distance with the center of {@code longitude} and {@code latitude}.
*
* @param key the key of the geo set.
* @param longitude the longitude coordinate according to WGS84.
* @param latitude the latitude coordinate according to WGS84.
* @param distance radius distance.
* @param unit distance unit.
* @param geoArgs args to control the result.
* @return nested multi-bulk reply. The {@link GeoWithin} contains only fields which were requested by {@link GeoArgs}.
*/
List<GeoWithin<V>> georadius(K key, double longitude, double latitude, double distance, GeoArgs.Unit unit, GeoArgs geoArgs);
/**
* Perform a {@link #georadius(Object, double, double, double, GeoArgs.Unit, GeoArgs)} query and store the results in a
* sorted set.
*
* @param key the key of the geo set.
* @param longitude the longitude coordinate according to WGS84.
* @param latitude the latitude coordinate according to WGS84.
* @param distance radius distance.
* @param unit distance unit.
* @param geoRadiusStoreArgs args to store either the resulting elements with their distance or the resulting elements with
* their locations a sorted set.
* @return Long integer-reply the number of elements in the result.
*/
Long georadius(K key, double longitude, double latitude, double distance, GeoArgs.Unit unit,
GeoRadiusStoreArgs<K> geoRadiusStoreArgs);
/**
* Retrieve members selected by distance with the center of {@code member}. The member itself is always contained in the
* results.
*
* @param key the key of the geo set.
* @param member reference member.
* @param distance radius distance.
* @param unit distance unit.
* @return set of members.
*/
Set<V> georadiusbymember(K key, V member, double distance, GeoArgs.Unit unit);
/**
* Retrieve members selected by distance with the center of {@code member}. The member itself is always contained in the
* results.
*
* @param key the key of the geo set.
* @param member reference member.
* @param distance radius distance.
* @param unit distance unit.
* @param geoArgs args to control the result.
* @return nested multi-bulk reply. The {@link GeoWithin} contains only fields which were requested by {@link GeoArgs}.
*/
List<GeoWithin<V>> georadiusbymember(K key, V member, double distance, GeoArgs.Unit unit, GeoArgs geoArgs);
/**
* Perform a {@link #georadiusbymember(Object, Object, double, GeoArgs.Unit, GeoArgs)} query and store the results in a
* sorted set.
*
* @param key the key of the geo set.
* @param member reference member.
* @param distance radius distance.
* @param unit distance unit.
* @param geoRadiusStoreArgs args to store either the resulting elements with their distance or the resulting elements with
* their locations a sorted set.
* @return Long integer-reply the number of elements in the result.
*/
Long georadiusbymember(K key, V member, double distance, GeoArgs.Unit unit, GeoRadiusStoreArgs<K> geoRadiusStoreArgs);
/**
* Retrieve members selected by distance with the center of {@code reference} the search {@code predicate}. Use
* {@link GeoSearch} to create reference and predicate objects.
*
* @param key the key of the geo set.
* @param reference the reference member or longitude/latitude coordinates.
* @param predicate the bounding box or radius to search in.
* @return bulk reply.
* @since 6.1
*/
Set<V> geosearch(K key, GeoSearch.GeoRef<K> reference, GeoSearch.GeoPredicate predicate);
/**
* Retrieve members selected by distance with the center of {@code reference} the search {@code predicate}. Use
* {@link GeoSearch} to create reference and predicate objects.
*
* @param key the key of the geo set.
* @param reference the reference member or longitude/latitude coordinates.
* @param predicate the bounding box or radius to search in.
* @param geoArgs args to control the result.
* @return nested multi-bulk reply. The {@link GeoWithin} contains only fields which were requested by {@link GeoArgs}.
* @since 6.1
*/
List<GeoWithin<V>> geosearch(K key, GeoSearch.GeoRef<K> reference, GeoSearch.GeoPredicate predicate, GeoArgs geoArgs);
/**
* Perform a {@link #geosearch(Object, GeoSearch.GeoRef, GeoSearch.GeoPredicate, GeoArgs)} query and store the results in a
* sorted set.
*
* @param destination the destination where to store results.
* @param key the key of the geo set.
* @param reference the reference member or longitude/latitude coordinates.
* @param predicate the bounding box or radius to search in.
* @param geoArgs args to control the result.
* @param storeDist stores the items in a sorted set populated with their distance from the center of the circle or box, as
* a floating-point number, in the same unit specified for that shape.
* @return Long integer-reply the number of elements in the result.
* @since 6.1
*/
Long geosearchstore(K destination, K key, GeoSearch.GeoRef<K> reference, GeoSearch.GeoPredicate predicate, GeoArgs geoArgs,
boolean storeDist);
}
| RedisGeoCommands |
java | netty__netty | handler/src/main/java/io/netty/handler/timeout/IdleStateEvent.java | {
"start": 2614,
"end": 3021
} | class ____ extends IdleStateEvent {
private final String representation;
DefaultIdleStateEvent(IdleState state, boolean first) {
super(state, first);
this.representation = "IdleStateEvent(" + state + (first ? ", first" : "") + ')';
}
@Override
public String toString() {
return representation;
}
}
}
| DefaultIdleStateEvent |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/executor/BatchExecutorTest.java | {
"start": 773,
"end": 987
} | class ____ extends BaseExecutorTest {
@Test
void dummy() {
}
@Override
protected Executor createExecutor(Transaction transaction) {
return new BatchExecutor(config, transaction);
}
}
| BatchExecutorTest |
java | elastic__elasticsearch | x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleRunnerTests.java | {
"start": 55633,
"end": 56819
} | class ____ extends ClusterStateWaitStep {
private RuntimeException exception;
private boolean willComplete;
private long executeCount = 0;
private ToXContentObject expectedInfo = null;
MockClusterStateWaitStep(StepKey key, StepKey nextStepKey) {
super(key, nextStepKey);
}
@Override
public boolean isRetryable() {
return false;
}
public void setException(RuntimeException exception) {
this.exception = exception;
}
public void setWillComplete(boolean willComplete) {
this.willComplete = willComplete;
}
void expectedInfo(ToXContentObject expectedInfo) {
this.expectedInfo = expectedInfo;
}
public long getExecuteCount() {
return executeCount;
}
@Override
public Result isConditionMet(Index index, ProjectState currentState) {
executeCount++;
if (exception != null) {
throw exception;
}
return new Result(willComplete, expectedInfo);
}
}
private static | MockClusterStateWaitStep |
java | mockito__mockito | mockito-integration-tests/inline-mocks-tests/src/test/java/org/mockitoinline/InOrderVerificationTest.java | {
"start": 883,
"end": 7673
} | class ____ {
@Test
public void shouldVerifyStaticMethods() {
try (MockedStatic<StaticContext> mockedStatic = mockStatic(StaticContext.class)) {
// given
InOrder inOrder = inOrder(StaticContext.class);
// when
StaticContext.firstMethod();
StaticContext.secondMethod(0);
// then
inOrder.verify(mockedStatic, StaticContext::firstMethod);
inOrder.verify(mockedStatic, () -> StaticContext.secondMethod(0));
}
}
@Test
public void shouldVerifyStaticAndInstanceMethods() {
try (MockedStatic<StaticContext> mockedStatic = mockStatic(StaticContext.class)) {
// given
StaticContext mocked = mock(StaticContext.class);
InOrder inOrder = inOrder(mocked, StaticContext.class);
// when
StaticContext.firstMethod();
mocked.instanceMethod();
StaticContext.secondMethod(10);
// then
inOrder.verify(mockedStatic, StaticContext::firstMethod);
inOrder.verify(mocked).instanceMethod();
inOrder.verify(mockedStatic, () -> StaticContext.secondMethod(10));
}
}
@Test
public void shouldVerifyStaticMethodsWithSimpleAndWrapperModes() {
try (MockedStatic<StaticContext> mockedStatic = mockStatic(StaticContext.class)) {
// given
InOrder inOrder = inOrder(StaticContext.class);
// when
StaticContext.firstMethod();
StaticContext.firstMethod();
StaticContext.secondMethod(0);
// then
inOrder.verify(mockedStatic, StaticContext::firstMethod, times(2));
inOrder.verify(
mockedStatic, () -> StaticContext.secondMethod(0), timeout(100).atLeastOnce());
}
}
@Test
public void shouldThrowExceptionWhenModeIsUnsupported() {
try (MockedStatic<StaticContext> mockedStatic = mockStatic(StaticContext.class)) {
// given
VerificationMode unsupportedMode = data -> {};
InOrder inOrder = inOrder(StaticContext.class);
// when
StaticContext.firstMethod();
// then
assertThatThrownBy(
() ->
inOrder.verify(
mockedStatic,
StaticContext::firstMethod,
unsupportedMode))
.isInstanceOf(MockitoException.class);
}
}
@Test
public void shouldThrowExceptionWhenOrderIsWrong() {
try (MockedStatic<StaticContext> mockedStatic = mockStatic(StaticContext.class)) {
// given
InOrder inOrder = inOrder(StaticContext.class);
// when
StaticContext.firstMethod();
StaticContext.secondMethod(0);
// then
assertThatThrownBy(
() -> {
inOrder.verify(mockedStatic, () -> StaticContext.secondMethod(0));
inOrder.verify(mockedStatic, StaticContext::firstMethod);
})
.isInstanceOf(VerificationInOrderFailure.class);
}
}
@Test
public void shouldThrowExceptionWhenNoMoreInteractionsInvokedButThereAre() {
try (MockedStatic<StaticContext> mockedStatic = mockStatic(StaticContext.class)) {
// given
InOrder inOrder = inOrder(StaticContext.class);
// when
StaticContext.firstMethod();
StaticContext.secondMethod(0);
// then
inOrder.verify(mockedStatic, StaticContext::firstMethod);
assertThatThrownBy(inOrder::verifyNoMoreInteractions)
.isInstanceOf(VerificationInOrderFailure.class);
}
}
@Test
public void shouldThrowExceptionWhenNoMoreInteractionsInvokedWithoutVerifyingStaticMethods() {
try (MockedStatic<StaticContext> ignored = mockStatic(StaticContext.class)) {
// given
StaticContext mocked = mock(StaticContext.class);
InOrder inOrder = inOrder(StaticContext.class, mocked);
// when
mocked.instanceMethod();
StaticContext.firstMethod();
// then
inOrder.verify(mocked).instanceMethod();
assertThatThrownBy(inOrder::verifyNoMoreInteractions)
.isInstanceOf(VerificationInOrderFailure.class);
}
}
@Test
public void shouldThrowExceptionWhenClassIsNotMocked() {
assertThatThrownBy(() -> inOrder(StaticContext.class))
.isInstanceOf(NotAMockException.class);
}
@Test
public void shouldVerifyStaticMethodsWithoutInterferingWithMocking() {
try (MockedStatic<StaticContext> mockedStatic = mockStatic(StaticContext.class)) {
// given
InOrder inOrder = inOrder(StaticContext.class);
Exception expected = new RuntimeException();
mockedStatic.when(StaticContext::firstMethod).thenThrow(expected);
// when
Assert<?, ?> actual = assertThatThrownBy(StaticContext::firstMethod);
// then
actual.isSameAs(expected);
inOrder.verify(mockedStatic, StaticContext::firstMethod);
inOrder.verifyNoMoreInteractions();
}
}
@Test
public void shouldThrowExceptionWhenVerifyUsingInOrderWithoutValidClass() {
try (MockedStatic<StaticContext> mockedStaticContext = mockStatic(StaticContext.class)) {
try (MockedStatic<AnotherStaticContext> mockedAnotherStaticContext =
mockStatic(AnotherStaticContext.class)) {
// given
InOrder inOrder = inOrder(AnotherStaticContext.class);
// when
mockedAnotherStaticContext
.when(AnotherStaticContext::otherMethod)
.thenReturn("mocked value");
StaticContext.firstMethod();
// then
assertThat(AnotherStaticContext.otherMethod()).isEqualTo("mocked value");
inOrder.verify(mockedAnotherStaticContext, AnotherStaticContext::otherMethod);
assertThatThrownBy(
() ->
inOrder.verify(
mockedStaticContext, StaticContext::firstMethod))
.isInstanceOf(VerificationInOrderFailure.class);
}
}
}
private static | InOrderVerificationTest |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest/deployment/src/main/java/io/quarkus/resteasy/reactive/server/deployment/ResteasyReactiveCDIProcessor.java | {
"start": 10893,
"end": 13885
} | class ____ bean. See https://github.com/quarkusio/quarkus/issues/15028
@BuildStep
void pathInterfaceImpls(Optional<ResourceScanningResultBuildItem> resourceScanningResultBuildItem,
BuildProducer<AdditionalBeanBuildItem> additionalBeanBuildItemBuildProducer) {
if (!resourceScanningResultBuildItem.isPresent()) {
return;
}
ResourceScanningResult resourceScanningResult = resourceScanningResultBuildItem.get().getResult();
Map<DotName, String> pathInterfaces = resourceScanningResult.getPathInterfaces();
List<String> impls = new ArrayList<>();
for (Map.Entry<DotName, String> i : pathInterfaces.entrySet()) {
List<ClassInfo> candidateBeans = new ArrayList<>(1);
for (ClassInfo clazz : resourceScanningResult.getIndex().getAllKnownImplementors(i.getKey())) {
if (!Modifier.isAbstract(clazz.flags())) {
if ((clazz.enclosingClass() == null || Modifier.isStatic(clazz.flags())) &&
clazz.enclosingMethod() == null) {
candidateBeans.add(clazz);
}
}
}
if (candidateBeans.size() == 1) {
impls.add(candidateBeans.get(0).name().toString());
}
}
if (!impls.isEmpty()) {
additionalBeanBuildItemBuildProducer
.produce(AdditionalBeanBuildItem.builder().setUnremovable().addBeanClasses(impls.toArray(new String[0]))
.build());
}
}
@BuildStep
void additionalBeans(List<DynamicFeatureBuildItem> additionalDynamicFeatures,
BuildProducer<ReflectiveClassBuildItem> reflectiveClassBuildItemBuildProducer,
List<JaxrsFeatureBuildItem> featureBuildItems,
BuildProducer<AdditionalBeanBuildItem> additionalBean) {
AdditionalBeanBuildItem.Builder additionalProviders = AdditionalBeanBuildItem.builder();
for (DynamicFeatureBuildItem dynamicFeature : additionalDynamicFeatures) {
if (dynamicFeature.isRegisterAsBean()) {
additionalProviders.addBeanClass(dynamicFeature.getClassName());
} else {
reflectiveClassBuildItemBuildProducer
.produce(ReflectiveClassBuildItem.builder(dynamicFeature.getClassName())
.build());
}
}
for (JaxrsFeatureBuildItem feature : featureBuildItems) {
if (feature.isRegisterAsBean()) {
additionalProviders.addBeanClass(feature.getClassName());
} else {
reflectiveClassBuildItemBuildProducer
.produce(ReflectiveClassBuildItem.builder(feature.getClassName())
.build());
}
}
additionalBean.produce(additionalProviders.setUnremovable().setDefaultScope(DotNames.SINGLETON).build());
}
}
| a |
java | apache__maven | its/core-it-suite/src/test/java/org/apache/maven/it/MavenITmng3877BasedirAlignedModelTest.java | {
"start": 1160,
"end": 3634
} | class ____ extends AbstractMavenIntegrationTestCase {
/**
* Verify that project directories are basedir aligned when inspected by plugins via the MavenProject instance.
*
* @throws Exception in case of failure
*/
@Test
public void testitMNG3877() throws Exception {
File testDir = extractResources("/mng-3877");
Verifier verifier = newVerifier(testDir.getAbsolutePath());
verifier.setAutoclean(false);
verifier.deleteDirectory("target");
verifier.addCliArgument("validate");
verifier.execute();
verifier.verifyErrorFreeLog();
Properties modelProps = verifier.loadProperties("target/model.properties");
assertPathEquals(testDir, "target", modelProps.getProperty("project.build.directory"));
assertPathEquals(testDir, "target/classes", modelProps.getProperty("project.build.outputDirectory"));
assertPathEquals(testDir, "target/test-classes", modelProps.getProperty("project.build.testOutputDirectory"));
assertPathEquals(testDir, "src/main/java", modelProps.getProperty("project.build.sourceDirectory"));
assertPathEquals(testDir, "src/main/java", modelProps.getProperty("project.compileSourceRoots.0"));
assertPathEquals(testDir, "src/test/java", modelProps.getProperty("project.build.testSourceDirectory"));
assertPathEquals(testDir, "src/test/java", modelProps.getProperty("project.testCompileSourceRoots.0"));
assertPathEquals(testDir, "src/main/resources", modelProps.getProperty("project.build.resources.0.directory"));
assertPathEquals(
testDir, "src/test/resources", modelProps.getProperty("project.build.testResources.0.directory"));
assertPathEquals(testDir, "src/main/filters/it.properties", modelProps.getProperty("project.build.filters.0"));
/*
* NOTE: The script source directory is deliberately excluded from the checks due to MNG-3741.
*/
// MNG-3877
assertPathEquals(testDir, "target/site", modelProps.getProperty("project.reporting.outputDirectory"));
}
private void assertPathEquals(File basedir, String expected, String actual) throws IOException {
File actualFile = new File(actual);
assertTrue(actualFile.isAbsolute(), "path not absolute: " + actualFile);
ItUtils.assertCanonicalFileEquals(new File(basedir, expected), actualFile);
}
}
| MavenITmng3877BasedirAlignedModelTest |
java | apache__thrift | lib/java/src/main/java/org/apache/thrift/server/TSaslNonblockingServer.java | {
"start": 12807,
"end": 13919
} | class ____ {
private final List<NetworkThread> networkThreads;
private int accepted = 0;
NetworkThreadPool(int size) throws IOException {
networkThreads = new ArrayList<>(size);
int digits = (int) Math.log10(size) + 1;
String threadNamePattern = "network-thread-%0" + digits + "d";
for (int i = 0; i < size; i++) {
networkThreads.add(new NetworkThread(String.format(threadNamePattern, i)));
}
}
/**
* Round robin new connection among all the network threads.
*
* @param connection incoming connection.
* @return true if the incoming connection is accepted by network thread pool.
*/
boolean acceptNewConnection(TNonblockingTransport connection) {
return networkThreads.get((accepted++) % networkThreads.size()).accept(connection);
}
public void start() {
for (NetworkThread thread : networkThreads) {
thread.start();
}
}
void wakeupAll() {
for (NetworkThread networkThread : networkThreads) {
networkThread.wakeup();
}
}
}
public static | NetworkThreadPool |
java | apache__camel | components/camel-tracing/src/main/java/org/apache/camel/tracing/decorators/AzureServiceBusSpanDecorator.java | {
"start": 1031,
"end": 5379
} | class ____ extends AbstractMessagingSpanDecorator {
static final String SERVICEBUS_CONTENT_TYPE = "contentType";
static final String SERVICEBUS_CORRELATION_ID = "correlationId";
static final String SERVICEBUS_DELIVERY_COUNT = "deliveryCount";
static final String SERVICEBUS_ENQUEUED_SEQUENCE_NUMBER = "enqueuedSequenceNumber";
static final String SERVICEBUS_ENQUEUED_TIME = "enqueuedTime";
static final String SERVICEBUS_EXPIRES_AT = "expiresAt";
static final String SERVICEBUS_PARTITION_KEY = "partitionKey";
static final String SERVICEBUS_REPLY_TO_SESSION_ID = "replyToSessionId";
static final String SERVICEBUS_SESSION_ID = "sessionId";
static final String SERVICEBUS_TIME_TO_LIVE = "ttl";
/**
* Constants copied from {@link org.apache.camel.component.azure.servicebus.ServiceBusConstants}
*/
static final String CONTENT_TYPE = "CamelAzureServiceBusContentType";
static final String CORRELATION_ID = "CamelAzureServiceBusCorrelationId";
static final String DELIVERY_COUNT = "CamelAzureServiceBusDeliveryCount";
static final String ENQUEUED_SEQUENCE_NUMBER = "CamelAzureServiceBusEnqueuedSequenceNumber";
static final String ENQUEUED_TIME = "CamelAzureServiceBusEnqueuedTime";
static final String EXPIRES_AT = "CamelAzureServiceBusExpiresAt";
static final String MESSAGE_ID = "CamelAzureServiceBusMessageId";
static final String SESSION_ID = "CamelAzureServiceBusSessionId";
static final String REPLY_TO_SESSION_ID = "CamelAzureServiceBusReplyToSessionId";
static final String PARTITION_KEY = "CamelAzureServiceBusPartitionKey";
static final String TIME_TO_LIVE = "CamelAzureServiceBusTimeToLive";
@Override
public String getComponent() {
return "azure-servicebus";
}
@Override
public String getComponentClassName() {
return "org.apache.camel.component.azure.servicebus.ServiceBusComponent";
}
@Override
public void pre(SpanAdapter span, Exchange exchange, Endpoint endpoint) {
super.pre(span, exchange, endpoint);
String contentType = exchange.getIn().getHeader(CONTENT_TYPE, String.class);
if (contentType != null) {
span.setTag(SERVICEBUS_CONTENT_TYPE, contentType);
}
String correlationId = exchange.getIn().getHeader(CORRELATION_ID, String.class);
if (correlationId != null) {
span.setTag(SERVICEBUS_CORRELATION_ID, correlationId);
}
Long deliveryCount = exchange.getIn().getHeader(DELIVERY_COUNT, Long.class);
if (deliveryCount != null) {
span.setTag(SERVICEBUS_DELIVERY_COUNT, deliveryCount);
}
Long enqueuedSequenceNumber = exchange.getIn().getHeader(ENQUEUED_SEQUENCE_NUMBER, Long.class);
if (enqueuedSequenceNumber != null) {
span.setTag(SERVICEBUS_ENQUEUED_SEQUENCE_NUMBER, enqueuedSequenceNumber);
}
OffsetDateTime enqueuedTime = exchange.getIn().getHeader(ENQUEUED_TIME, OffsetDateTime.class);
if (enqueuedTime != null) {
span.setTag(SERVICEBUS_ENQUEUED_TIME, enqueuedTime.toString());
}
OffsetDateTime expiresAt = exchange.getIn().getHeader(EXPIRES_AT, OffsetDateTime.class);
if (expiresAt != null) {
span.setTag(SERVICEBUS_EXPIRES_AT, expiresAt.toString());
}
String partitionKey = exchange.getIn().getHeader(PARTITION_KEY, String.class);
if (partitionKey != null) {
span.setTag(SERVICEBUS_PARTITION_KEY, partitionKey);
}
String replyToSessionId = exchange.getIn().getHeader(REPLY_TO_SESSION_ID, String.class);
if (replyToSessionId != null) {
span.setTag(SERVICEBUS_REPLY_TO_SESSION_ID, replyToSessionId);
}
String sessionId = exchange.getIn().getHeader(SESSION_ID, String.class);
if (sessionId != null) {
span.setTag(SERVICEBUS_SESSION_ID, sessionId);
}
Duration timeToLive = exchange.getIn().getHeader(TIME_TO_LIVE, Duration.class);
if (timeToLive != null) {
span.setTag(SERVICEBUS_TIME_TO_LIVE, timeToLive.toString());
}
}
@Override
protected String getMessageId(Exchange exchange) {
return exchange.getIn().getHeader(MESSAGE_ID, String.class);
}
}
| AzureServiceBusSpanDecorator |
java | apache__kafka | metadata/src/main/java/org/apache/kafka/controller/BrokerHeartbeatTracker.java | {
"start": 1142,
"end": 1515
} | class ____ be present only on the active controller.
*
* UNLIKE MOST OF THE KAFKA CONTROLLER, THIS CLASS CAN BE ACCESSED FROM MULTIPLE THREADS.
* Everything in here must be thread-safe. It is intended to be accessed directly from the
* request handler thread pool. This ensures that the heartbeats always get through, even
* if the main controller thread is busy.
*/
| will |
java | quarkusio__quarkus | extensions/qute/deployment/src/main/java/io/quarkus/qute/deployment/QuteProcessor.java | {
"start": 105225,
"end": 106569
} | class ____ annotated with @TemplateData",
implicitClassName);
continue;
}
if (uncontrolled.containsKey(implicitClassName)) {
LOGGER.debugf("Implicit value resolver for %s ignored: %s declared on %s", implicitClassName,
uncontrolled.get(implicitClassName),
uncontrolled.get(implicitClassName).target());
continue;
}
builder.addClass(implicit.getClazz(), implicit.getTemplateData());
}
ValueResolverGenerator generator = builder.build();
generator.generate();
generatedValueResolvers.addAll(generator.getGeneratedTypes());
ExtensionMethodGenerator extensionMethodGenerator = new ExtensionMethodGenerator(index, classOutput);
Map<DotName, Map<String, List<TemplateExtensionMethodBuildItem>>> classToNamespaceExtensions = new HashMap<>();
Map<String, DotName> namespaceToClass = new HashMap<>();
for (TemplateExtensionMethodBuildItem templateExtension : templateExtensionMethods) {
String generatedValueResolverClass = existingValueResolvers.getGeneratedClass(templateExtension.getMethod());
if (generatedValueResolverClass != null) {
// A ValueResolver of a non-application | is |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/DynamicPropertyRegistrarIntegrationTests.java | {
"start": 5212,
"end": 5485
} | class ____ implements ApiUrlClient {
private final String apiUrl;
ConstructorInjectedService(@Value("${api.url.1}") String apiUrl) {
this.apiUrl = apiUrl;
}
@Override
public String getApiUrl() {
return this.apiUrl;
}
}
static | ConstructorInjectedService |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/converter/FallbackPromoteTest.java | {
"start": 1017,
"end": 1803
} | class ____ extends ContextTestSupport {
@Test
public void testFallbackPromote() {
MyCoolBean cool = new MyCoolBean();
cool.setCool("Camel rocks");
TypeConverter tc = context.getTypeConverterRegistry().lookup(String.class, MyCoolBean.class);
assertNull(tc, "No regular type converters");
String s = context.getTypeConverter().convertTo(String.class, cool);
assertEquals("This is cool: Camel rocks", s);
cool.setCool("It works");
s = context.getTypeConverter().convertTo(String.class, cool);
assertEquals("This is cool: It works", s);
tc = context.getTypeConverterRegistry().lookup(String.class, MyCoolBean.class);
assertNotNull(tc, "Should have been promoted");
}
}
| FallbackPromoteTest |
java | quarkusio__quarkus | test-framework/jacoco/runtime/src/main/java/io/quarkus/jacoco/runtime/JacocoConfig.java | {
"start": 2451,
"end": 3225
} | class ____ to exclude from the report. May use wildcard
* characters (* and ?). When not specified nothing will be excluded.
* <p>
* For instance:
* <ul>
* <li><code>**/fo/**/*</code> targets all classes under fo and sub packages</li>
* <li><code>**/bar/*</code> targets all classes directly under bar</li>
* <li><code>**/*BAR*.class</code> targets classes that contain BAR in their name regardless of path</li>
* </ul>
*/
public Optional<List<String>> excludes();
/**
* The location of the report files.
* The path can be relative (to the module) or absolute.
*/
@ConfigDocDefault(TARGET_JACOCO_REPORT)
public Optional<String> reportLocation();
}
| files |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/ChunkedTrainedModelRestorer.java | {
"start": 10794,
"end": 14404
} | interface ____ this exception cannot
* be unwrapped. This is important because the TrainedModelAssignmentNodeService has retry logic when a
* SearchPhaseExecutionException occurs:
* https://github.com/elastic/elasticsearch/blob/main/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentNodeService.java#L219
* This intentionally prevents that code from attempting to retry loading the entire model. If the retry logic here
* fails after the set retries we should not retry loading the entire model to avoid additional strain on the cluster.
*/
throw new ElasticsearchStatusException(
format(
"loading model [%s] failed after [%s] retries. The deployment is now in a failed state, "
+ "the error may be transient please stop the deployment and restart",
modelId,
retries
),
RestStatus.TOO_MANY_REQUESTS,
e
);
}
failureCount++;
logger.debug(format("[%s] searching for model part failed %s times, retrying", modelId, failureCount));
TimeUnit.SECONDS.sleep(sleep.getSeconds());
}
}
}
private static SearchRequestBuilder buildSearchBuilder(Client client, String modelId, String index, int searchSize) {
return client.prepareSearch(index)
.setQuery(
QueryBuilders.constantScoreQuery(
QueryBuilders.boolQuery()
.filter(QueryBuilders.termQuery(TrainedModelConfig.MODEL_ID.getPreferredName(), modelId))
.filter(
QueryBuilders.termQuery(InferenceIndexConstants.DOC_TYPE.getPreferredName(), TrainedModelDefinitionDoc.NAME)
)
)
)
.setSize(searchSize)
.setTrackTotalHits(true)
// First find the latest index
.addSort("_index", SortOrder.DESC)
// Then, sort by doc_num
.addSort(
SortBuilders.fieldSort(TrainedModelDefinitionDoc.DOC_NUM.getPreferredName()).order(SortOrder.ASC).unmappedType("long")
);
}
public static SearchRequest buildSearch(Client client, String modelId, String index, int searchSize, @Nullable TaskId parentTaskId) {
SearchRequest searchRequest = buildSearchBuilder(client, modelId, index, searchSize).request();
if (parentTaskId != null) {
searchRequest.setParentTask(parentTaskId);
}
return searchRequest;
}
public static TrainedModelDefinitionDoc parseModelDefinitionDocLenientlyFromSource(
BytesReference source,
String modelId,
NamedXContentRegistry xContentRegistry
) throws IOException {
try (
XContentParser parser = XContentHelper.createParserNotCompressed(
LoggingDeprecationHandler.XCONTENT_PARSER_CONFIG.withRegistry(xContentRegistry),
source,
XContentType.JSON
)
) {
return TrainedModelDefinitionDoc.fromXContent(parser, true).build();
} catch (IOException e) {
logger.error(() -> "[" + modelId + "] failed to parse model definition", e);
throw e;
}
}
}
| so |
java | apache__maven | impl/maven-core/src/main/java/org/apache/maven/execution/scope/internal/MojoExecutionScope.java | {
"start": 1329,
"end": 3455
} | class ____ extends org.apache.maven.impl.di.MojoExecutionScope
implements Scope, MojoExecutionListener {
public <T> void seed(Class<T> clazz, Provider<T> value) {
getScopeState().seed(clazz, value::get);
}
@Override
public <T> Provider<T> scope(final Key<T> key, Provider<T> unscoped) {
Object qualifier = key.getAnnotation() instanceof Named n ? n.value() : key.getAnnotation();
org.apache.maven.di.Key<T> k =
org.apache.maven.di.Key.ofType(key.getTypeLiteral().getType(), qualifier);
return scope(k, unscoped::get)::get;
}
public static <T> Provider<T> seededKeyProvider(Class<? extends T> clazz) {
return MojoExecutionScope.<T>seededKeySupplier(clazz)::get;
}
@Override
public void beforeMojoExecution(MojoExecutionEvent event) throws MojoExecutionException {
for (WeakMojoExecutionListener provided : getProvidedListeners()) {
provided.beforeMojoExecution(event);
}
}
@Override
public void afterMojoExecutionSuccess(MojoExecutionEvent event) throws MojoExecutionException {
for (WeakMojoExecutionListener provided : getProvidedListeners()) {
provided.afterMojoExecutionSuccess(event);
}
}
@Override
public void afterExecutionFailure(MojoExecutionEvent event) {
for (WeakMojoExecutionListener provided : getProvidedListeners()) {
provided.afterExecutionFailure(event);
}
}
private Collection<WeakMojoExecutionListener> getProvidedListeners() {
// the same instance can be provided multiple times under different Key's
// deduplicate instances to avoid redundant beforeXXX/afterXXX callbacks
IdentityHashMap<WeakMojoExecutionListener, Object> listeners = new IdentityHashMap<>();
for (Object provided : getScopeState().provided()) {
if (provided instanceof WeakMojoExecutionListener weakMojoExecutionListener) {
listeners.put(weakMojoExecutionListener, null);
}
}
return listeners.keySet();
}
}
| MojoExecutionScope |
java | spring-projects__spring-security | oauth2/oauth2-resource-server/src/main/java/org/springframework/security/oauth2/server/resource/introspection/OAuth2IntrospectionException.java | {
"start": 848,
"end": 1170
} | class ____ extends RuntimeException {
@Serial
private static final long serialVersionUID = -7327790383594166793L;
public OAuth2IntrospectionException(String message) {
super(message);
}
public OAuth2IntrospectionException(String message, Throwable cause) {
super(message, cause);
}
}
| OAuth2IntrospectionException |
java | apache__camel | components/camel-aws/camel-aws2-step-functions/src/main/java/org/apache/camel/component/aws2/stepfunctions/client/StepFunctions2InternalClient.java | {
"start": 1022,
"end": 1248
} | interface ____ {
/**
* Returns an StepFunctions client after a factory method determines which one to return.
*
* @return SfnClient SfnClient
*/
SfnClient getSfnClient();
}
| StepFunctions2InternalClient |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/filter/IgnoreUnknownPropertyUsingPropertyBasedTest.java | {
"start": 594,
"end": 1104
} | class ____ {
int a, b;
@JsonCreator
public IgnoreUnknownAnySetter(@JsonProperty("a") int a, @JsonProperty("b") int b) {
this.a = a;
this.b = b;
}
Map<String, Object> props = new HashMap<>();
@JsonAnySetter
public void addProperty(String key, Object value) {
props.put(key, value);
}
@JsonAnyGetter
public Map<String, Object> getProperties() {
return props;
}
}
@JsonIgnoreProperties(ignoreUnknown = true)
static | IgnoreUnknownAnySetter |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/SerializationTestUtils.java | {
"start": 2183,
"end": 4639
} | class ____ {
public static void assertSerialization(PhysicalPlan plan) {
assertSerialization(plan, EsqlTestUtils.TEST_CFG);
}
public static void assertSerialization(PhysicalPlan plan, Configuration configuration) {
var deserPlan = serializeDeserialize(
plan,
PlanStreamOutput::writeNamedWriteable,
in -> in.readNamedWriteable(PhysicalPlan.class),
configuration
);
EqualsHashCodeTestUtils.checkEqualsAndHashCode(plan, unused -> deserPlan);
}
public static void assertSerialization(LogicalPlan plan) {
var deserPlan = serializeDeserialize(plan, PlanStreamOutput::writeNamedWriteable, in -> in.readNamedWriteable(LogicalPlan.class));
EqualsHashCodeTestUtils.checkEqualsAndHashCode(plan, unused -> deserPlan);
}
public static void assertSerialization(Expression expression) {
assertSerialization(expression, EsqlTestUtils.TEST_CFG);
}
public static void assertSerialization(Expression expression, Configuration configuration) {
Expression deserExpression = serializeDeserialize(
expression,
PlanStreamOutput::writeNamedWriteable,
in -> in.readNamedWriteable(Expression.class),
configuration
);
EqualsHashCodeTestUtils.checkEqualsAndHashCode(expression, unused -> deserExpression);
}
public static <T> T serializeDeserialize(T orig, Serializer<T> serializer, Deserializer<T> deserializer) {
return serializeDeserialize(orig, serializer, deserializer, EsqlTestUtils.TEST_CFG);
}
public static <T> T serializeDeserialize(T orig, Serializer<T> serializer, Deserializer<T> deserializer, Configuration config) {
try (BytesStreamOutput out = new BytesStreamOutput()) {
PlanStreamOutput planStreamOutput = new PlanStreamOutput(out, config);
serializer.write(planStreamOutput, orig);
StreamInput in = new NamedWriteableAwareStreamInput(
ByteBufferStreamInput.wrap(BytesReference.toBytes(out.bytes())),
writableRegistry()
);
PlanStreamInput planStreamInput = new PlanStreamInput(in, in.namedWriteableRegistry(), config, new TestNameIdMapper());
return deserializer.read(planStreamInput);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
public | SerializationTestUtils |
java | apache__hadoop | hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/ITestNativeAzureFileSystemContractLive.java | {
"start": 1433,
"end": 3072
} | class ____ extends
FileSystemContractBaseTest {
private AzureBlobStorageTestAccount testAccount;
private Path basePath;
@RegisterExtension
private TestName methodName = new TestName();
private void nameThread() {
Thread.currentThread().setName("JUnit-" + methodName.getMethodName());
}
@BeforeEach
public void setUp() throws Exception {
nameThread();
testAccount = AzureBlobStorageTestAccount.create();
if (testAccount != null) {
fs = testAccount.getFileSystem();
}
assumeThat(fs).isNotNull();
basePath = fs.makeQualified(
AzureTestUtils.createTestPath(
new Path("NativeAzureFileSystemContractLive")));
}
@Override
public void tearDown() throws Exception {
super.tearDown();
testAccount = AzureTestUtils.cleanup(testAccount);
fs = null;
}
@Override
public Path getTestBaseDir() {
return basePath;
}
protected int getGlobalTimeout() {
return AzureTestConstants.AZURE_TEST_TIMEOUT;
}
/**
* The following tests are failing on Azure and the Azure
* file system code needs to be modified to make them pass.
* A separate work item has been opened for this.
*/
@Disabled
@Test
public void testMoveFileUnderParent() throws Throwable {
}
@Disabled
@Test
public void testRenameFileToSelf() throws Throwable {
}
@Disabled
@Test
public void testRenameChildDirForbidden() throws Exception {
}
@Disabled
@Test
public void testMoveDirUnderParent() throws Throwable {
}
@Disabled
@Test
public void testRenameDirToSelf() throws Throwable {
}
}
| ITestNativeAzureFileSystemContractLive |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/FtpsComponentBuilderFactory.java | {
"start": 1406,
"end": 1871
} | interface ____ {
/**
* FTPS (camel-ftp)
* Upload and download files to/from FTP servers supporting the FTPS
* protocol.
*
* Category: file
* Since: 2.2
* Maven coordinates: org.apache.camel:camel-ftp
*
* @return the dsl builder
*/
static FtpsComponentBuilder ftps() {
return new FtpsComponentBuilderImpl();
}
/**
* Builder for the FTPS component.
*/
| FtpsComponentBuilderFactory |
java | apache__rocketmq | client/src/main/java/org/apache/rocketmq/client/hook/CheckForbiddenContext.java | {
"start": 1072,
"end": 3437
} | class ____ {
private String nameSrvAddr;
private String group;
private Message message;
private MessageQueue mq;
private String brokerAddr;
private CommunicationMode communicationMode;
private SendResult sendResult;
private Exception exception;
private Object arg;
private boolean unitMode = false;
public String getGroup() {
return group;
}
public void setGroup(String group) {
this.group = group;
}
public Message getMessage() {
return message;
}
public void setMessage(Message message) {
this.message = message;
}
public MessageQueue getMq() {
return mq;
}
public void setMq(MessageQueue mq) {
this.mq = mq;
}
public String getBrokerAddr() {
return brokerAddr;
}
public void setBrokerAddr(String brokerAddr) {
this.brokerAddr = brokerAddr;
}
public CommunicationMode getCommunicationMode() {
return communicationMode;
}
public void setCommunicationMode(CommunicationMode communicationMode) {
this.communicationMode = communicationMode;
}
public SendResult getSendResult() {
return sendResult;
}
public void setSendResult(SendResult sendResult) {
this.sendResult = sendResult;
}
public Exception getException() {
return exception;
}
public void setException(Exception exception) {
this.exception = exception;
}
public Object getArg() {
return arg;
}
public void setArg(Object arg) {
this.arg = arg;
}
public boolean isUnitMode() {
return unitMode;
}
public void setUnitMode(boolean isUnitMode) {
this.unitMode = isUnitMode;
}
public String getNameSrvAddr() {
return nameSrvAddr;
}
public void setNameSrvAddr(String nameSrvAddr) {
this.nameSrvAddr = nameSrvAddr;
}
@Override
public String toString() {
return "SendMessageContext [nameSrvAddr=" + nameSrvAddr + ", group=" + group + ", message=" + message
+ ", mq=" + mq + ", brokerAddr=" + brokerAddr + ", communicationMode=" + communicationMode
+ ", sendResult=" + sendResult + ", exception=" + exception + ", unitMode=" + unitMode
+ ", arg=" + arg + "]";
}
}
| CheckForbiddenContext |
java | apache__flink | flink-core/src/test/java/org/apache/flink/api/common/typeutils/CompositeSerializerTest.java | {
"start": 8641,
"end": 10357
} | class ____
extends CompositeTypeSerializerSnapshot<List<Object>, TestListCompositeSerializer> {
private boolean isImmutableTargetType;
/** Constructor for read instantiation. */
public TestListCompositeSerializerSnapshot() {
this.isImmutableTargetType = false;
}
/** Constructor to create the snapshot for writing. */
public TestListCompositeSerializerSnapshot(
TestListCompositeSerializer serializerInstance, boolean isImmutableTargetType) {
super(serializerInstance);
this.isImmutableTargetType = isImmutableTargetType;
}
@Override
protected int getCurrentOuterSnapshotVersion() {
return 0;
}
@Override
protected void writeOuterSnapshot(DataOutputView out) throws IOException {
out.writeBoolean(isImmutableTargetType);
}
@Override
protected void readOuterSnapshot(
int readOuterSnapshotVersion, DataInputView in, ClassLoader userCodeClassLoader)
throws IOException {
this.isImmutableTargetType = in.readBoolean();
}
@Override
protected TypeSerializer<?>[] getNestedSerializers(
TestListCompositeSerializer outerSerializer) {
return outerSerializer.fieldSerializers;
}
@Override
protected TestListCompositeSerializer createOuterSerializerWithNestedSerializers(
TypeSerializer<?>[] nestedSerializers) {
return new TestListCompositeSerializer(isImmutableTargetType, nestedSerializers);
}
}
private static | TestListCompositeSerializerSnapshot |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/watcher/ResourceWatcherService.java | {
"start": 1728,
"end": 5506
} | enum ____ {
/**
* Defaults to 5 seconds
*/
HIGH(TimeValue.timeValueSeconds(5)),
/**
* Defaults to 30 seconds
*/
MEDIUM(TimeValue.timeValueSeconds(30)),
/**
* Defaults to 60 seconds
*/
LOW(TimeValue.timeValueSeconds(60));
final TimeValue interval;
Frequency(TimeValue interval) {
this.interval = interval;
}
}
public static final Setting<Boolean> ENABLED = Setting.boolSetting("resource.reload.enabled", true, Property.NodeScope);
public static final Setting<TimeValue> RELOAD_INTERVAL_HIGH = Setting.timeSetting(
"resource.reload.interval.high",
Frequency.HIGH.interval,
Property.NodeScope
);
public static final Setting<TimeValue> RELOAD_INTERVAL_MEDIUM = Setting.timeSetting(
"resource.reload.interval.medium",
Setting.timeSetting("resource.reload.interval", Frequency.MEDIUM.interval),
Property.NodeScope
);
public static final Setting<TimeValue> RELOAD_INTERVAL_LOW = Setting.timeSetting(
"resource.reload.interval.low",
Frequency.LOW.interval,
Property.NodeScope
);
private final boolean enabled;
final ResourceMonitor lowMonitor;
final ResourceMonitor mediumMonitor;
final ResourceMonitor highMonitor;
private final Cancellable lowFuture;
private final Cancellable mediumFuture;
private final Cancellable highFuture;
public ResourceWatcherService(Settings settings, ThreadPool threadPool) {
this.enabled = ENABLED.get(settings);
TimeValue interval = RELOAD_INTERVAL_LOW.get(settings);
lowMonitor = new ResourceMonitor(interval, Frequency.LOW);
interval = RELOAD_INTERVAL_MEDIUM.get(settings);
mediumMonitor = new ResourceMonitor(interval, Frequency.MEDIUM);
interval = RELOAD_INTERVAL_HIGH.get(settings);
highMonitor = new ResourceMonitor(interval, Frequency.HIGH);
if (enabled) {
final var executor = threadPool.generic();
lowFuture = threadPool.scheduleWithFixedDelay(lowMonitor, lowMonitor.interval, executor);
mediumFuture = threadPool.scheduleWithFixedDelay(mediumMonitor, mediumMonitor.interval, executor);
highFuture = threadPool.scheduleWithFixedDelay(highMonitor, highMonitor.interval, executor);
} else {
lowFuture = null;
mediumFuture = null;
highFuture = null;
}
}
@Override
public void close() {
if (enabled) {
lowFuture.cancel();
mediumFuture.cancel();
highFuture.cancel();
}
}
/**
* Register new resource watcher that will be checked in default {@link Frequency#MEDIUM MEDIUM} frequency
*/
public <W extends ResourceWatcher> WatcherHandle<W> add(W watcher) throws IOException {
return add(watcher, Frequency.MEDIUM);
}
/**
* Register new resource watcher that will be checked in the given frequency
*/
public <W extends ResourceWatcher> WatcherHandle<W> add(W watcher, Frequency frequency) throws IOException {
watcher.init();
return switch (frequency) {
case LOW -> lowMonitor.add(watcher);
case MEDIUM -> mediumMonitor.add(watcher);
case HIGH -> highMonitor.add(watcher);
};
}
public void notifyNow(Frequency frequency) {
switch (frequency) {
case LOW -> lowMonitor.run();
case MEDIUM -> mediumMonitor.run();
case HIGH -> highMonitor.run();
default -> throw new IllegalArgumentException("Unknown frequency [" + frequency + "]");
}
}
static | Frequency |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/MultiVariableDeclarationTest.java | {
"start": 1374,
"end": 1770
} | class ____ {
int a;
// BUG: Diagnostic contains:
int x = 1, y = 2;
}
""")
.doTest();
}
@Test
public void positive() {
BugCheckerRefactoringTestHelper.newInstance(MultiVariableDeclaration.class, getClass())
.addInputLines(
"in/A.java",
"""
package a;
public | A |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/inject/dagger/Util.java | {
"start": 6418,
"end": 6615
} | class ____ an abstract class.
*
* <ul>
* <li>Removes {@code final} if it was there.
* <li>Adds {@code abstract} if it wasn't there.
* <li>Adds a private empty constructor if the | to |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/ImmutableSetForContainsTest.java | {
"start": 14333,
"end": 15131
} | class ____ {
private static final ImmutableList<Class<?>> CLS_LIST =
ImmutableList.of(Long.class, Double.class);
private static final ImmutableList<Object> OBJ_LIST =
ImmutableList.of(new String(""), new Object());
private void myFunc() {
CLS_LIST.stream().forEach(System.out::println);
OBJ_LIST.forEach(System.out::println);
}
}
""")
.expectUnchanged()
.doTest();
}
@Test
public void immutableListGetInVarArg_doesNothing() {
refactoringHelper
.addInputLines(
"Test.java",
"""
import com.google.common.collect.ImmutableList;
import java.util.ArrayList;
| Test |
java | quarkusio__quarkus | independent-projects/tools/devtools-common/src/main/java/io/quarkus/devtools/commands/handlers/RemoveExtensionsCommandHandler.java | {
"start": 929,
"end": 1073
} | class ____ thread-safe. It extracts extensions to be removed from the project from an instance of
* {@link QuarkusCommandInvocation}.
*/
public | is |
java | redisson__redisson | redisson/src/main/java/org/redisson/api/RBitSetReactive.java | {
"start": 715,
"end": 788
} | interface ____ BitSet object
*
* @author Nikita Koksharov
*
*/
public | for |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/AnnotationPositionTest.java | {
"start": 4371,
"end": 4765
} | interface ____ {
/** Actually Javadoc. */
@NonTypeUse
/** Javadoc! */
public void foo();
}
""")
.expectUnchanged()
.doTest(TEXT_MATCH);
}
@Test
public void interspersedJavadoc_withComment() {
refactoringHelper
.addInputLines(
"Test.java",
"""
| Test |
java | quarkusio__quarkus | extensions/amazon-lambda/common-runtime/src/main/java/io/quarkus/amazon/lambda/runtime/AmazonLambdaMapperRecorder.java | {
"start": 545,
"end": 1634
} | class ____ {
private static final Logger log = Logger.getLogger(AmazonLambdaMapperRecorder.class);
public static ObjectMapper objectMapper;
public static ObjectReader cognitoIdReader;
public static ObjectReader clientCtxReader;
public void initObjectMapper() {
objectMapper = getObjectMapper()
.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)
.configure(MapperFeature.ACCEPT_CASE_INSENSITIVE_PROPERTIES, true)
.registerModule(new JodaModule())
.registerModule(new DateModule());
}
public void initContextReaders() {
cognitoIdReader = objectMapper.readerFor(CognitoIdentity.class);
clientCtxReader = objectMapper.readerFor(ClientContextImpl.class);
}
private ObjectMapper getObjectMapper() {
InstanceHandle<ObjectMapper> instance = Arc.container().instance(ObjectMapper.class);
if (instance.isAvailable()) {
return instance.get().copy();
}
return new ObjectMapper();
}
}
| AmazonLambdaMapperRecorder |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/rm/TestRMContainerAllocator.java | {
"start": 40852,
"end": 42024
} | class ____ extends MockRM {
private static long fakeClusterTimeStamp = System.currentTimeMillis();
public MyResourceManager(Configuration conf) {
super(conf);
}
public MyResourceManager(Configuration conf, RMStateStore store) {
super(conf, store);
}
@Override
public void serviceStart() throws Exception {
super.serviceStart();
// Ensure that the application attempt IDs for all the tests are the same
// The application attempt IDs will be used as the login user names
MyResourceManager.setClusterTimeStamp(fakeClusterTimeStamp);
}
@Override
protected EventHandler<SchedulerEvent> createSchedulerEventDispatcher() {
// Dispatch inline for test sanity
return new EventHandler<SchedulerEvent>() {
@Override
public void handle(SchedulerEvent event) {
scheduler.handle(event);
}
};
}
@Override
protected ResourceScheduler createScheduler() {
return new MyFifoScheduler(this.getRMContext());
}
MyFifoScheduler getMyFifoScheduler() {
return (MyFifoScheduler) scheduler;
}
}
private static | MyResourceManager |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.